summaryrefslogtreecommitdiff
path: root/android/media
diff options
context:
space:
mode:
authorJustin Klaassen <justinklaassen@google.com>2017-09-15 17:58:39 -0400
committerJustin Klaassen <justinklaassen@google.com>2017-09-15 17:58:39 -0400
commit10d07c88d69cc64f73a069163e7ea5ba2519a099 (patch)
tree8dbd149eb350320a29c3d10e7ad3201de1c5cbee /android/media
parent677516fb6b6f207d373984757d3d9450474b6b00 (diff)
downloadandroid-28-10d07c88d69cc64f73a069163e7ea5ba2519a099.tar.gz
Import Android SDK Platform PI [4335822]
/google/data/ro/projects/android/fetch_artifact \ --bid 4335822 \ --target sdk_phone_armv7-win_sdk \ sdk-repo-linux-sources-4335822.zip AndroidVersion.ApiLevel has been modified to appear as 28 Change-Id: Ic8f04be005a71c2b9abeaac754d8da8d6f9a2c32
Diffstat (limited to 'android/media')
-rw-r--r--android/media/AmrInputStream.java195
-rw-r--r--android/media/AsyncPlayer.java274
-rw-r--r--android/media/AudioAttributes.java1048
-rw-r--r--android/media/AudioDeviceCallback.java40
-rw-r--r--android/media/AudioDeviceInfo.java342
-rw-r--r--android/media/AudioDevicePort.java109
-rw-r--r--android/media/AudioDevicePortConfig.java46
-rw-r--r--android/media/AudioFocusInfo.java196
-rw-r--r--android/media/AudioFocusRequest.java549
-rw-r--r--android/media/AudioFormat.java1033
-rw-r--r--android/media/AudioGain.java159
-rw-r--r--android/media/AudioGainConfig.java84
-rw-r--r--android/media/AudioHandle.java54
-rw-r--r--android/media/AudioManager.java4535
-rw-r--r--android/media/AudioManagerInternal.java62
-rw-r--r--android/media/AudioMixPort.java70
-rw-r--r--android/media/AudioMixPortConfig.java41
-rw-r--r--android/media/AudioPatch.java83
-rw-r--r--android/media/AudioPlaybackConfiguration.java552
-rw-r--r--android/media/AudioPort.java226
-rw-r--r--android/media/AudioPortConfig.java103
-rw-r--r--android/media/AudioPortEventHandler.java176
-rw-r--r--android/media/AudioRecord.java1795
-rw-r--r--android/media/AudioRecordRoutingProxy.java32
-rw-r--r--android/media/AudioRecordingConfiguration.java284
-rw-r--r--android/media/AudioRoutesInfo.java89
-rw-r--r--android/media/AudioRouting.java78
-rw-r--r--android/media/AudioSystem.java925
-rw-r--r--android/media/AudioTimestamp.java89
-rw-r--r--android/media/AudioTrack.java3149
-rw-r--r--android/media/AudioTrackRoutingProxy.java32
-rw-r--r--android/media/BufferingParams.java460
-rw-r--r--android/media/CamcorderProfile.java503
-rw-r--r--android/media/CameraProfile.java114
-rw-r--r--android/media/Cea708CaptionRenderer.java2151
-rw-r--r--android/media/ClosedCaptionRenderer.java1510
-rw-r--r--android/media/DecoderCapabilities.java84
-rw-r--r--android/media/DeniedByServerException.java27
-rw-r--r--android/media/DrmInitData.java89
-rw-r--r--android/media/EncoderCapabilities.java163
-rw-r--r--android/media/ExifInterface.java4014
-rw-r--r--android/media/ExternalRingtonesCursorWrapper.java46
-rw-r--r--android/media/FaceDetector.java202
-rw-r--r--android/media/Image.java396
-rw-r--r--android/media/ImageReader.java1049
-rw-r--r--android/media/ImageUtils.java276
-rw-r--r--android/media/ImageWriter.java877
-rw-r--r--android/media/JetPlayer.java590
-rw-r--r--android/media/MediaActionSound.java289
-rw-r--r--android/media/MediaCas.java606
-rw-r--r--android/media/MediaCasException.java88
-rw-r--r--android/media/MediaCasStateException.java108
-rw-r--r--android/media/MediaCodec.java3748
-rw-r--r--android/media/MediaCodecInfo.java3116
-rw-r--r--android/media/MediaCodecList.java258
-rw-r--r--android/media/MediaCrypto.java108
-rw-r--r--android/media/MediaCryptoException.java29
-rw-r--r--android/media/MediaDataSource.java61
-rw-r--r--android/media/MediaDescrambler.java217
-rw-r--r--android/media/MediaDescription.java382
-rw-r--r--android/media/MediaDrm.java1330
-rw-r--r--android/media/MediaDrmException.java26
-rw-r--r--android/media/MediaDrmResetException.java28
-rw-r--r--android/media/MediaExtractor.java755
-rw-r--r--android/media/MediaFile.java378
-rw-r--r--android/media/MediaFormat.java1006
-rw-r--r--android/media/MediaHTTPConnection.java418
-rw-r--r--android/media/MediaHTTPService.java101
-rw-r--r--android/media/MediaInserter.java95
-rw-r--r--android/media/MediaMetadata.java846
-rw-r--r--android/media/MediaMetadataEditor.java470
-rw-r--r--android/media/MediaMetadataRetriever.java576
-rw-r--r--android/media/MediaMuxer.java696
-rw-r--r--android/media/MediaPlayer.java5642
-rw-r--r--android/media/MediaRecorder.java1466
-rw-r--r--android/media/MediaRouter.java3016
-rw-r--r--android/media/MediaRouterClientState.java196
-rw-r--r--android/media/MediaScanner.java1970
-rw-r--r--android/media/MediaScannerClient.java36
-rw-r--r--android/media/MediaScannerConnection.java272
-rw-r--r--android/media/MediaSync.java643
-rw-r--r--android/media/MediaSyncEvent.java122
-rw-r--r--android/media/MediaTimeProvider.java90
-rw-r--r--android/media/MediaTimestamp.java85
-rw-r--r--android/media/Metadata.java553
-rw-r--r--android/media/MiniThumbFile.java273
-rw-r--r--android/media/NotProvisionedException.java29
-rw-r--r--android/media/PlaybackParams.java250
-rw-r--r--android/media/PlayerBase.java588
-rw-r--r--android/media/PlayerProxy.java153
-rw-r--r--android/media/Rating.java308
-rw-r--r--android/media/RemoteControlClient.java1025
-rw-r--r--android/media/RemoteController.java695
-rw-r--r--android/media/RemoteDisplay.java167
-rw-r--r--android/media/RemoteDisplayState.java189
-rw-r--r--android/media/ResampleInputStream.java150
-rw-r--r--android/media/ResourceBusyException.java27
-rw-r--r--android/media/Ringtone.java480
-rw-r--r--android/media/RingtoneManager.java1189
-rw-r--r--android/media/SRTRenderer.java202
-rw-r--r--android/media/SoundPool.java615
-rw-r--r--android/media/SubtitleController.java507
-rw-r--r--android/media/SubtitleData.java87
-rw-r--r--android/media/SubtitleTrack.java726
-rw-r--r--android/media/SyncParams.java288
-rw-r--r--android/media/ThumbnailUtils.java522
-rw-r--r--android/media/TimedMetaData.java78
-rw-r--r--android/media/TimedText.java734
-rw-r--r--android/media/ToneGenerator.java897
-rw-r--r--android/media/TtmlRenderer.java746
-rw-r--r--android/media/UnsupportedSchemeException.java27
-rw-r--r--android/media/Utils.java381
-rw-r--r--android/media/VolumeAutomation.java40
-rw-r--r--android/media/VolumePolicy.java113
-rw-r--r--android/media/VolumeProvider.java161
-rw-r--r--android/media/VolumeShaper.java1420
-rw-r--r--android/media/WebVttRenderer.java1866
-rw-r--r--android/media/audiofx/AcousticEchoCanceler.java96
-rw-r--r--android/media/audiofx/AudioEffect.java1361
-rw-r--r--android/media/audiofx/AutomaticGainControl.java96
-rw-r--r--android/media/audiofx/BassBoost.java287
-rw-r--r--android/media/audiofx/EnvironmentalReverb.java661
-rw-r--r--android/media/audiofx/Equalizer.java559
-rw-r--r--android/media/audiofx/LoudnessEnhancer.java290
-rw-r--r--android/media/audiofx/NoiseSuppressor.java98
-rw-r--r--android/media/audiofx/PresetReverb.java303
-rw-r--r--android/media/audiofx/Virtualizer.java629
-rw-r--r--android/media/audiofx/Visualizer.java772
-rw-r--r--android/media/audiopolicy/AudioMix.java375
-rw-r--r--android/media/audiopolicy/AudioMixingRule.java482
-rw-r--r--android/media/audiopolicy/AudioPolicy.java624
-rw-r--r--android/media/audiopolicy/AudioPolicyConfig.java230
-rw-r--r--android/media/browse/MediaBrowser.java1171
-rw-r--r--android/media/browse/MediaBrowserUtils.java72
-rw-r--r--android/media/effect/Effect.java111
-rw-r--r--android/media/effect/EffectContext.java128
-rw-r--r--android/media/effect/EffectFactory.java516
-rw-r--r--android/media/effect/EffectUpdateListener.java36
-rw-r--r--android/media/effect/FilterEffect.java98
-rw-r--r--android/media/effect/FilterGraphEffect.java116
-rw-r--r--android/media/effect/SingleFilterEffect.java95
-rw-r--r--android/media/effect/SizeChangeEffect.java59
-rw-r--r--android/media/effect/effects/AutoFixEffect.java31
-rw-r--r--android/media/effect/effects/BackDropperEffect.java105
-rw-r--r--android/media/effect/effects/BitmapOverlayEffect.java32
-rw-r--r--android/media/effect/effects/BlackWhiteEffect.java31
-rw-r--r--android/media/effect/effects/BrightnessEffect.java32
-rw-r--r--android/media/effect/effects/ColorTemperatureEffect.java31
-rw-r--r--android/media/effect/effects/ContrastEffect.java32
-rw-r--r--android/media/effect/effects/CropEffect.java32
-rw-r--r--android/media/effect/effects/CrossProcessEffect.java31
-rw-r--r--android/media/effect/effects/DocumentaryEffect.java30
-rw-r--r--android/media/effect/effects/DuotoneEffect.java31
-rw-r--r--android/media/effect/effects/FillLightEffect.java31
-rw-r--r--android/media/effect/effects/FisheyeEffect.java32
-rw-r--r--android/media/effect/effects/FlipEffect.java31
-rw-r--r--android/media/effect/effects/GrainEffect.java31
-rw-r--r--android/media/effect/effects/GrayscaleEffect.java31
-rw-r--r--android/media/effect/effects/IdentityEffect.java58
-rw-r--r--android/media/effect/effects/LomoishEffect.java30
-rw-r--r--android/media/effect/effects/NegativeEffect.java31
-rw-r--r--android/media/effect/effects/PosterizeEffect.java31
-rw-r--r--android/media/effect/effects/RedEyeEffect.java32
-rw-r--r--android/media/effect/effects/RotateEffect.java31
-rw-r--r--android/media/effect/effects/SaturateEffect.java31
-rw-r--r--android/media/effect/effects/SepiaEffect.java31
-rw-r--r--android/media/effect/effects/SharpenEffect.java31
-rw-r--r--android/media/effect/effects/StraightenEffect.java31
-rw-r--r--android/media/effect/effects/TintEffect.java31
-rw-r--r--android/media/effect/effects/VignetteEffect.java31
-rw-r--r--android/media/midi/MidiDevice.java308
-rw-r--r--android/media/midi/MidiDeviceInfo.java390
-rw-r--r--android/media/midi/MidiDeviceServer.java452
-rw-r--r--android/media/midi/MidiDeviceService.java145
-rw-r--r--android/media/midi/MidiDeviceStatus.java138
-rw-r--r--android/media/midi/MidiInputPort.java173
-rw-r--r--android/media/midi/MidiManager.java327
-rw-r--r--android/media/midi/MidiOutputPort.java159
-rw-r--r--android/media/midi/MidiPortImpl.java134
-rw-r--r--android/media/midi/MidiReceiver.java133
-rw-r--r--android/media/midi/MidiSender.java62
-rw-r--r--android/media/projection/MediaProjection.java212
-rw-r--r--android/media/projection/MediaProjectionInfo.java93
-rw-r--r--android/media/projection/MediaProjectionManager.java200
-rw-r--r--android/media/session/MediaController.java1116
-rw-r--r--android/media/session/MediaSession.java1468
-rw-r--r--android/media/session/MediaSessionLegacyHelper.java512
-rw-r--r--android/media/session/MediaSessionManager.java690
-rw-r--r--android/media/session/ParcelableVolumeInfo.java80
-rw-r--r--android/media/session/PlaybackState.java1078
-rw-r--r--android/media/soundtrigger/SoundTriggerDetector.java393
-rw-r--r--android/media/soundtrigger/SoundTriggerManager.java327
-rw-r--r--android/media/tv/DvbDeviceInfo.java93
-rw-r--r--android/media/tv/ITvInputSessionWrapper.java383
-rw-r--r--android/media/tv/TvContentRating.java983
-rw-r--r--android/media/tv/TvContentRatingSystemInfo.java111
-rw-r--r--android/media/tv/TvContract.java3141
-rw-r--r--android/media/tv/TvInputHardwareInfo.java255
-rw-r--r--android/media/tv/TvInputInfo.java1115
-rw-r--r--android/media/tv/TvInputManager.java2611
-rw-r--r--android/media/tv/TvInputService.java2141
-rw-r--r--android/media/tv/TvRecordingClient.java405
-rw-r--r--android/media/tv/TvStreamConfig.java177
-rw-r--r--android/media/tv/TvTrackInfo.java498
-rw-r--r--android/media/tv/TvView.java1330
205 files changed, 105831 insertions, 0 deletions
diff --git a/android/media/AmrInputStream.java b/android/media/AmrInputStream.java
new file mode 100644
index 00000000..fb91bbbb
--- /dev/null
+++ b/android/media/AmrInputStream.java
@@ -0,0 +1,195 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.io.InputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import android.media.MediaCodec.BufferInfo;
+import android.util.Log;
+
+
+/**
+ * AmrInputStream
+ * @hide
+ */
+public final class AmrInputStream extends InputStream {
+ private final static String TAG = "AmrInputStream";
+
+ // frame is 20 msec at 8.000 khz
+ private final static int SAMPLES_PER_FRAME = 8000 * 20 / 1000;
+
+ MediaCodec mCodec;
+ BufferInfo mInfo;
+ boolean mSawOutputEOS;
+ boolean mSawInputEOS;
+
+ // pcm input stream
+ private InputStream mInputStream;
+
+ // result amr stream
+ private final byte[] mBuf = new byte[SAMPLES_PER_FRAME * 2];
+ private int mBufIn = 0;
+ private int mBufOut = 0;
+
+ // helper for bytewise read()
+ private byte[] mOneByte = new byte[1];
+
+ /**
+ * Create a new AmrInputStream, which converts 16 bit PCM to AMR
+ * @param inputStream InputStream containing 16 bit PCM.
+ */
+ public AmrInputStream(InputStream inputStream) {
+ mInputStream = inputStream;
+
+ MediaFormat format = new MediaFormat();
+ format.setString(MediaFormat.KEY_MIME, MediaFormat.MIMETYPE_AUDIO_AMR_NB);
+ format.setInteger(MediaFormat.KEY_SAMPLE_RATE, 8000);
+ format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
+ format.setInteger(MediaFormat.KEY_BIT_RATE, 12200);
+
+ MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
+ String name = mcl.findEncoderForFormat(format);
+ if (name != null) {
+ try {
+ mCodec = MediaCodec.createByCodecName(name);
+ mCodec.configure(format,
+ null /* surface */,
+ null /* crypto */,
+ MediaCodec.CONFIGURE_FLAG_ENCODE);
+ mCodec.start();
+ } catch (IOException e) {
+ if (mCodec != null) {
+ mCodec.release();
+ }
+ mCodec = null;
+ }
+ }
+ mInfo = new BufferInfo();
+ }
+
+ @Override
+ public int read() throws IOException {
+ int rtn = read(mOneByte, 0, 1);
+ return rtn == 1 ? (0xff & mOneByte[0]) : -1;
+ }
+
+ @Override
+ public int read(byte[] b) throws IOException {
+ return read(b, 0, b.length);
+ }
+
+ @Override
+ public int read(byte[] b, int offset, int length) throws IOException {
+ if (mCodec == null) {
+ throw new IllegalStateException("not open");
+ }
+
+ if (mBufOut >= mBufIn && !mSawOutputEOS) {
+ // no data left in buffer, refill it
+ mBufOut = 0;
+ mBufIn = 0;
+
+ // first push as much data into the encoder as possible
+ while (!mSawInputEOS) {
+ int index = mCodec.dequeueInputBuffer(0);
+ if (index < 0) {
+ // no input buffer currently available
+ break;
+ } else {
+ int numRead;
+ for (numRead = 0; numRead < SAMPLES_PER_FRAME * 2; ) {
+ int n = mInputStream.read(mBuf, numRead, SAMPLES_PER_FRAME * 2 - numRead);
+ if (n == -1) {
+ mSawInputEOS = true;
+ break;
+ }
+ numRead += n;
+ }
+ ByteBuffer buf = mCodec.getInputBuffer(index);
+ buf.put(mBuf, 0, numRead);
+ mCodec.queueInputBuffer(index,
+ 0 /* offset */,
+ numRead,
+ 0 /* presentationTimeUs */,
+ mSawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0 /* flags */);
+ }
+ }
+
+ // now read encoded data from the encoder (blocking, since we just filled up the
+ // encoder's input with data it should be able to output at least one buffer)
+ while (true) {
+ int index = mCodec.dequeueOutputBuffer(mInfo, -1);
+ if (index >= 0) {
+ mBufIn = mInfo.size;
+ ByteBuffer out = mCodec.getOutputBuffer(index);
+ out.get(mBuf, 0 /* offset */, mBufIn /* length */);
+ mCodec.releaseOutputBuffer(index, false /* render */);
+ if ((mInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ mSawOutputEOS = true;
+ }
+ break;
+ }
+ }
+ }
+
+ if (mBufOut < mBufIn) {
+ // there is data in the buffer
+ if (length > mBufIn - mBufOut) {
+ length = mBufIn - mBufOut;
+ }
+ System.arraycopy(mBuf, mBufOut, b, offset, length);
+ mBufOut += length;
+ return length;
+ }
+
+ if (mSawInputEOS && mSawOutputEOS) {
+ // no more data available in buffer, codec or input stream
+ return -1;
+ }
+
+ // caller should try again
+ return 0;
+ }
+
+ @Override
+ public void close() throws IOException {
+ try {
+ if (mInputStream != null) {
+ mInputStream.close();
+ }
+ } finally {
+ mInputStream = null;
+ try {
+ if (mCodec != null) {
+ mCodec.release();
+ }
+ } finally {
+ mCodec = null;
+ }
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ if (mCodec != null) {
+ Log.w(TAG, "AmrInputStream wasn't closed");
+ mCodec.release();
+ }
+ }
+}
diff --git a/android/media/AsyncPlayer.java b/android/media/AsyncPlayer.java
new file mode 100644
index 00000000..c1a178a2
--- /dev/null
+++ b/android/media/AsyncPlayer.java
@@ -0,0 +1,274 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.content.Context;
+import android.media.PlayerBase;
+import android.net.Uri;
+import android.os.PowerManager;
+import android.os.SystemClock;
+import android.util.Log;
+
+import java.util.LinkedList;
+
+/**
+ * Plays a series of audio URIs, but does all the hard work on another thread
+ * so that any slowness with preparing or loading doesn't block the calling thread.
+ */
+public class AsyncPlayer {
+ private static final int PLAY = 1;
+ private static final int STOP = 2;
+ private static final boolean mDebug = false;
+
+ private static final class Command {
+ int code;
+ Context context;
+ Uri uri;
+ boolean looping;
+ AudioAttributes attributes;
+ long requestTime;
+
+ public String toString() {
+ return "{ code=" + code + " looping=" + looping + " attr=" + attributes
+ + " uri=" + uri + " }";
+ }
+ }
+
+ private final LinkedList<Command> mCmdQueue = new LinkedList();
+
+ private void startSound(Command cmd) {
+ // Preparing can be slow, so if there is something else
+ // is playing, let it continue until we're done, so there
+ // is less of a glitch.
+ try {
+ if (mDebug) Log.d(mTag, "Starting playback");
+ MediaPlayer player = new MediaPlayer();
+ player.setAudioAttributes(cmd.attributes);
+ player.setDataSource(cmd.context, cmd.uri);
+ player.setLooping(cmd.looping);
+ player.prepare();
+ player.start();
+ if (mPlayer != null) {
+ mPlayer.release();
+ }
+ mPlayer = player;
+ long delay = SystemClock.uptimeMillis() - cmd.requestTime;
+ if (delay > 1000) {
+ Log.w(mTag, "Notification sound delayed by " + delay + "msecs");
+ }
+ }
+ catch (Exception e) {
+ Log.w(mTag, "error loading sound for " + cmd.uri, e);
+ }
+ }
+
+ private final class Thread extends java.lang.Thread {
+ Thread() {
+ super("AsyncPlayer-" + mTag);
+ }
+
+ public void run() {
+ while (true) {
+ Command cmd = null;
+
+ synchronized (mCmdQueue) {
+ if (mDebug) Log.d(mTag, "RemoveFirst");
+ cmd = mCmdQueue.removeFirst();
+ }
+
+ switch (cmd.code) {
+ case PLAY:
+ if (mDebug) Log.d(mTag, "PLAY");
+ startSound(cmd);
+ break;
+ case STOP:
+ if (mDebug) Log.d(mTag, "STOP");
+ if (mPlayer != null) {
+ long delay = SystemClock.uptimeMillis() - cmd.requestTime;
+ if (delay > 1000) {
+ Log.w(mTag, "Notification stop delayed by " + delay + "msecs");
+ }
+ mPlayer.stop();
+ mPlayer.release();
+ mPlayer = null;
+ } else {
+ Log.w(mTag, "STOP command without a player");
+ }
+ break;
+ }
+
+ synchronized (mCmdQueue) {
+ if (mCmdQueue.size() == 0) {
+ // nothing left to do, quit
+ // doing this check after we're done prevents the case where they
+ // added it during the operation from spawning two threads and
+ // trying to do them in parallel.
+ mThread = null;
+ releaseWakeLock();
+ return;
+ }
+ }
+ }
+ }
+ }
+
+ private String mTag;
+ private Thread mThread;
+ private MediaPlayer mPlayer;
+ private PowerManager.WakeLock mWakeLock;
+
+ // The current state according to the caller. Reality lags behind
+ // because of the asynchronous nature of this class.
+ private int mState = STOP;
+
+ /**
+ * Construct an AsyncPlayer object.
+ *
+ * @param tag a string to use for debugging
+ */
+ public AsyncPlayer(String tag) {
+ if (tag != null) {
+ mTag = tag;
+ } else {
+ mTag = "AsyncPlayer";
+ }
+ }
+
+ /**
+ * Start playing the sound. It will actually start playing at some
+ * point in the future. There are no guarantees about latency here.
+ * Calling this before another audio file is done playing will stop
+ * that one and start the new one.
+ *
+ * @param context Your application's context.
+ * @param uri The URI to play. (see {@link MediaPlayer#setDataSource(Context, Uri)})
+ * @param looping Whether the audio should loop forever.
+ * (see {@link MediaPlayer#setLooping(boolean)})
+ * @param stream the AudioStream to use.
+ * (see {@link MediaPlayer#setAudioStreamType(int)})
+ * @deprecated use {@link #play(Context, Uri, boolean, AudioAttributes)} instead
+ */
+ public void play(Context context, Uri uri, boolean looping, int stream) {
+ PlayerBase.deprecateStreamTypeForPlayback(stream, "AsyncPlayer", "play()");
+ if (context == null || uri == null) {
+ return;
+ }
+ try {
+ play(context, uri, looping,
+ new AudioAttributes.Builder().setInternalLegacyStreamType(stream).build());
+ } catch (IllegalArgumentException e) {
+ Log.e(mTag, "Call to deprecated AsyncPlayer.play() method caused:", e);
+ }
+ }
+
+ /**
+ * Start playing the sound. It will actually start playing at some
+ * point in the future. There are no guarantees about latency here.
+ * Calling this before another audio file is done playing will stop
+ * that one and start the new one.
+ *
+ * @param context the non-null application's context.
+ * @param uri the non-null URI to play. (see {@link MediaPlayer#setDataSource(Context, Uri)})
+ * @param looping whether the audio should loop forever.
+ * (see {@link MediaPlayer#setLooping(boolean)})
+ * @param attributes the non-null {@link AudioAttributes} to use.
+ * (see {@link MediaPlayer#setAudioAttributes(AudioAttributes)})
+ * @throws IllegalArgumentException
+ */
+ public void play(@NonNull Context context, @NonNull Uri uri, boolean looping,
+ @NonNull AudioAttributes attributes) throws IllegalArgumentException {
+ if (context == null || uri == null || attributes == null) {
+ throw new IllegalArgumentException("Illegal null AsyncPlayer.play() argument");
+ }
+ Command cmd = new Command();
+ cmd.requestTime = SystemClock.uptimeMillis();
+ cmd.code = PLAY;
+ cmd.context = context;
+ cmd.uri = uri;
+ cmd.looping = looping;
+ cmd.attributes = attributes;
+ synchronized (mCmdQueue) {
+ enqueueLocked(cmd);
+ mState = PLAY;
+ }
+ }
+
+ /**
+ * Stop a previously played sound. It can't be played again or unpaused
+ * at this point. Calling this multiple times has no ill effects.
+ */
+ public void stop() {
+ synchronized (mCmdQueue) {
+ // This check allows stop to be called multiple times without starting
+ // a thread that ends up doing nothing.
+ if (mState != STOP) {
+ Command cmd = new Command();
+ cmd.requestTime = SystemClock.uptimeMillis();
+ cmd.code = STOP;
+ enqueueLocked(cmd);
+ mState = STOP;
+ }
+ }
+ }
+
+ private void enqueueLocked(Command cmd) {
+ mCmdQueue.add(cmd);
+ if (mThread == null) {
+ acquireWakeLock();
+ mThread = new Thread();
+ mThread.start();
+ }
+ }
+
+ /**
+ * We want to hold a wake lock while we do the prepare and play. The stop probably is
+ * optional, but it won't hurt to have it too. The problem is that if you start a sound
+ * while you're holding a wake lock (e.g. an alarm starting a notification), you want the
+ * sound to play, but if the CPU turns off before mThread gets to work, it won't. The
+ * simplest way to deal with this is to make it so there is a wake lock held while the
+ * thread is starting or running. You're going to need the WAKE_LOCK permission if you're
+ * going to call this.
+ *
+ * This must be called before the first time play is called.
+ *
+ * @hide
+ */
+ public void setUsesWakeLock(Context context) {
+ if (mWakeLock != null || mThread != null) {
+ // if either of these has happened, we've already played something.
+ // and our releases will be out of sync.
+ throw new RuntimeException("assertion failed mWakeLock=" + mWakeLock
+ + " mThread=" + mThread);
+ }
+ PowerManager pm = (PowerManager)context.getSystemService(Context.POWER_SERVICE);
+ mWakeLock = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, mTag);
+ }
+
+ private void acquireWakeLock() {
+ if (mWakeLock != null) {
+ mWakeLock.acquire();
+ }
+ }
+
+ private void releaseWakeLock() {
+ if (mWakeLock != null) {
+ mWakeLock.release();
+ }
+ }
+}
+
diff --git a/android/media/AudioAttributes.java b/android/media/AudioAttributes.java
new file mode 100644
index 00000000..3b9a5de0
--- /dev/null
+++ b/android/media/AudioAttributes.java
@@ -0,0 +1,1048 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.text.TextUtils;
+import android.util.Log;
+import android.util.SparseIntArray;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Objects;
+import java.util.Set;
+
+/**
+ * A class to encapsulate a collection of attributes describing information about an audio
+ * stream.
+ * <p><code>AudioAttributes</code> supersede the notion of stream types (see for instance
+ * {@link AudioManager#STREAM_MUSIC} or {@link AudioManager#STREAM_ALARM}) for defining the
+ * behavior of audio playback. Attributes allow an application to specify more information than is
+ * conveyed in a stream type by allowing the application to define:
+ * <ul>
+ * <li>usage: "why" you are playing a sound, what is this sound used for. This is achieved with
+ * the "usage" information. Examples of usage are {@link #USAGE_MEDIA} and {@link #USAGE_ALARM}.
+ * These two examples are the closest to stream types, but more detailed use cases are
+ * available. Usage information is more expressive than a stream type, and allows certain
+ * platforms or routing policies to use this information for more refined volume or routing
+ * decisions. Usage is the most important information to supply in <code>AudioAttributes</code>
+ * and it is recommended to build any instance with this information supplied, see
+ * {@link AudioAttributes.Builder} for exceptions.</li>
+ * <li>content type: "what" you are playing. The content type expresses the general category of
+ * the content. This information is optional. But in case it is known (for instance
+ * {@link #CONTENT_TYPE_MOVIE} for a movie streaming service or {@link #CONTENT_TYPE_MUSIC} for
+ * a music playback application) this information might be used by the audio framework to
+ * selectively configure some audio post-processing blocks.</li>
+ * <li>flags: "how" is playback to be affected, see the flag definitions for the specific playback
+ * behaviors they control. </li>
+ * </ul>
+ * <p><code>AudioAttributes</code> are used for example in one of the {@link AudioTrack}
+ * constructors (see {@link AudioTrack#AudioTrack(AudioAttributes, AudioFormat, int, int, int)}),
+ * to configure a {@link MediaPlayer}
+ * (see {@link MediaPlayer#setAudioAttributes(AudioAttributes)} or a
+ * {@link android.app.Notification} (see {@link android.app.Notification#audioAttributes}). An
+ * <code>AudioAttributes</code> instance is built through its builder,
+ * {@link AudioAttributes.Builder}.
+ */
+public final class AudioAttributes implements Parcelable {
+ private final static String TAG = "AudioAttributes";
+
+ /**
+ * Content type value to use when the content type is unknown, or other than the ones defined.
+ */
+ public final static int CONTENT_TYPE_UNKNOWN = 0;
+ /**
+ * Content type value to use when the content type is speech.
+ */
+ public final static int CONTENT_TYPE_SPEECH = 1;
+ /**
+ * Content type value to use when the content type is music.
+ */
+ public final static int CONTENT_TYPE_MUSIC = 2;
+ /**
+ * Content type value to use when the content type is a soundtrack, typically accompanying
+ * a movie or TV program.
+ */
+ public final static int CONTENT_TYPE_MOVIE = 3;
+ /**
+ * Content type value to use when the content type is a sound used to accompany a user
+ * action, such as a beep or sound effect expressing a key click, or event, such as the
+ * type of a sound for a bonus being received in a game. These sounds are mostly synthesized
+ * or short Foley sounds.
+ */
+ public final static int CONTENT_TYPE_SONIFICATION = 4;
+
+ /**
+ * Usage value to use when the usage is unknown.
+ */
+ public final static int USAGE_UNKNOWN = 0;
+ /**
+ * Usage value to use when the usage is media, such as music, or movie
+ * soundtracks.
+ */
+ public final static int USAGE_MEDIA = 1;
+ /**
+ * Usage value to use when the usage is voice communications, such as telephony
+ * or VoIP.
+ */
+ public final static int USAGE_VOICE_COMMUNICATION = 2;
+ /**
+ * Usage value to use when the usage is in-call signalling, such as with
+ * a "busy" beep, or DTMF tones.
+ */
+ public final static int USAGE_VOICE_COMMUNICATION_SIGNALLING = 3;
+ /**
+ * Usage value to use when the usage is an alarm (e.g. wake-up alarm).
+ */
+ public final static int USAGE_ALARM = 4;
+ /**
+ * Usage value to use when the usage is notification. See other
+ * notification usages for more specialized uses.
+ */
+ public final static int USAGE_NOTIFICATION = 5;
+ /**
+ * Usage value to use when the usage is telephony ringtone.
+ */
+ public final static int USAGE_NOTIFICATION_RINGTONE = 6;
+ /**
+ * Usage value to use when the usage is a request to enter/end a
+ * communication, such as a VoIP communication or video-conference.
+ */
+ public final static int USAGE_NOTIFICATION_COMMUNICATION_REQUEST = 7;
+ /**
+ * Usage value to use when the usage is notification for an "instant"
+ * communication such as a chat, or SMS.
+ */
+ public final static int USAGE_NOTIFICATION_COMMUNICATION_INSTANT = 8;
+ /**
+ * Usage value to use when the usage is notification for a
+ * non-immediate type of communication such as e-mail.
+ */
+ public final static int USAGE_NOTIFICATION_COMMUNICATION_DELAYED = 9;
+ /**
+ * Usage value to use when the usage is to attract the user's attention,
+ * such as a reminder or low battery warning.
+ */
+ public final static int USAGE_NOTIFICATION_EVENT = 10;
+ /**
+ * Usage value to use when the usage is for accessibility, such as with
+ * a screen reader.
+ */
+ public final static int USAGE_ASSISTANCE_ACCESSIBILITY = 11;
+ /**
+ * Usage value to use when the usage is driving or navigation directions.
+ */
+ public final static int USAGE_ASSISTANCE_NAVIGATION_GUIDANCE = 12;
+ /**
+ * Usage value to use when the usage is sonification, such as with user
+ * interface sounds.
+ */
+ public final static int USAGE_ASSISTANCE_SONIFICATION = 13;
+ /**
+ * Usage value to use when the usage is for game audio.
+ */
+ public final static int USAGE_GAME = 14;
+ /**
+ * @hide
+ * Usage value to use when feeding audio to the platform and replacing "traditional" audio
+ * source, such as audio capture devices.
+ */
+ public final static int USAGE_VIRTUAL_SOURCE = 15;
+ /**
+ * Usage value to use for audio responses to user queries, audio instructions or help
+ * utterances.
+ */
+ public final static int USAGE_ASSISTANT = 16;
+
+ /**
+ * IMPORTANT: when adding new usage types, add them to SDK_USAGES and update SUPPRESSIBLE_USAGES
+ * if applicable.
+ */
+
+ /**
+ * @hide
+ * Denotes a usage for notifications that do not expect immediate intervention from the user,
+ * will be muted when the Zen mode disables notifications
+ * @see #SUPPRESSIBLE_USAGES
+ */
+ public final static int SUPPRESSIBLE_NOTIFICATION = 1;
+ /**
+ * @hide
+ * Denotes a usage for notifications that do expect immediate intervention from the user,
+ * will be muted when the Zen mode disables calls
+ * @see #SUPPRESSIBLE_USAGES
+ */
+ public final static int SUPPRESSIBLE_CALL = 2;
+ /**
+ * @hide
+ * Denotes a usage that is never going to be muted, even in Total Silence.
+ * @see #SUPPRESSIBLE_USAGES
+ */
+ public final static int SUPPRESSIBLE_NEVER = 3;
+
+ /**
+ * @hide
+ * Array of all usage types for calls and notifications to assign the suppression behavior,
+ * used by the Zen mode restrictions.
+ * @see com.android.server.notification.ZenModeHelper
+ */
+ public static final SparseIntArray SUPPRESSIBLE_USAGES;
+
+ static {
+ SUPPRESSIBLE_USAGES = new SparseIntArray();
+ SUPPRESSIBLE_USAGES.put(USAGE_NOTIFICATION, SUPPRESSIBLE_NOTIFICATION);
+ SUPPRESSIBLE_USAGES.put(USAGE_NOTIFICATION_RINGTONE, SUPPRESSIBLE_CALL);
+ SUPPRESSIBLE_USAGES.put(USAGE_NOTIFICATION_COMMUNICATION_REQUEST,SUPPRESSIBLE_CALL);
+ SUPPRESSIBLE_USAGES.put(USAGE_NOTIFICATION_COMMUNICATION_INSTANT,SUPPRESSIBLE_NOTIFICATION);
+ SUPPRESSIBLE_USAGES.put(USAGE_NOTIFICATION_COMMUNICATION_DELAYED,SUPPRESSIBLE_NOTIFICATION);
+ SUPPRESSIBLE_USAGES.put(USAGE_NOTIFICATION_EVENT, SUPPRESSIBLE_NOTIFICATION);
+ SUPPRESSIBLE_USAGES.put(USAGE_ASSISTANCE_ACCESSIBILITY, SUPPRESSIBLE_NEVER);
+ SUPPRESSIBLE_USAGES.put(USAGE_VOICE_COMMUNICATION, SUPPRESSIBLE_NEVER);
+ }
+
+ /**
+ * @hide
+ * Array of all usage types exposed in the SDK that applications can use.
+ */
+ public final static int[] SDK_USAGES = {
+ USAGE_UNKNOWN,
+ USAGE_MEDIA,
+ USAGE_VOICE_COMMUNICATION,
+ USAGE_VOICE_COMMUNICATION_SIGNALLING,
+ USAGE_ALARM,
+ USAGE_NOTIFICATION,
+ USAGE_NOTIFICATION_RINGTONE,
+ USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
+ USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
+ USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
+ USAGE_NOTIFICATION_EVENT,
+ USAGE_ASSISTANCE_ACCESSIBILITY,
+ USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ USAGE_ASSISTANCE_SONIFICATION,
+ USAGE_GAME,
+ USAGE_ASSISTANT,
+ };
+
+ /**
+ * Flag defining a behavior where the audibility of the sound will be ensured by the system.
+ */
+ public final static int FLAG_AUDIBILITY_ENFORCED = 0x1 << 0;
+ /**
+ * @hide
+ * Flag defining a behavior where the playback of the sound is ensured without
+ * degradation only when going to a secure sink.
+ */
+ // FIXME not guaranteed yet
+ // TODO add in FLAG_ALL_PUBLIC when supported and in public API
+ public final static int FLAG_SECURE = 0x1 << 1;
+ /**
+ * @hide
+ * Flag to enable when the stream is associated with SCO usage.
+ * Internal use only for dealing with legacy STREAM_BLUETOOTH_SCO
+ */
+ public final static int FLAG_SCO = 0x1 << 2;
+ /**
+ * @hide
+ * Flag defining a behavior where the system ensures that the playback of the sound will
+ * be compatible with its use as a broadcast for surrounding people and/or devices.
+ * Ensures audibility with no or minimal post-processing applied.
+ */
+ @SystemApi
+ public final static int FLAG_BEACON = 0x1 << 3;
+
+ /**
+ * Flag requesting the use of an output stream supporting hardware A/V synchronization.
+ */
+ public final static int FLAG_HW_AV_SYNC = 0x1 << 4;
+
+ /**
+ * @hide
+ * Flag requesting capture from the source used for hardware hotword detection.
+ * To be used with capture preset MediaRecorder.AudioSource.HOTWORD or
+ * MediaRecorder.AudioSource.VOICE_RECOGNITION.
+ */
+ @SystemApi
+ public final static int FLAG_HW_HOTWORD = 0x1 << 5;
+
+ /**
+ * @hide
+ * Flag requesting audible playback even under limited interruptions.
+ */
+ @SystemApi
+ public final static int FLAG_BYPASS_INTERRUPTION_POLICY = 0x1 << 6;
+
+ /**
+ * @hide
+ * Flag requesting audible playback even when the underlying stream is muted.
+ */
+ @SystemApi
+ public final static int FLAG_BYPASS_MUTE = 0x1 << 7;
+
+ /**
+ * Flag requesting a low latency path when creating an AudioTrack.
+ * When using this flag, the sample rate must match the native sample rate
+ * of the device. Effects processing is also unavailable.
+ *
+ * Note that if this flag is used without specifying a bufferSizeInBytes then the
+ * AudioTrack's actual buffer size may be too small. It is recommended that a fairly
+ * large buffer should be specified when the AudioTrack is created.
+ * Then the actual size can be reduced by calling
+ * {@link AudioTrack#setBufferSizeInFrames(int)}. The buffer size can be optimized
+ * by lowering it after each write() call until the audio glitches, which is detected by calling
+ * {@link AudioTrack#getUnderrunCount()}. Then the buffer size can be increased
+ * until there are no glitches.
+ * This tuning step should be done while playing silence.
+ * This technique provides a compromise between latency and glitch rate.
+ *
+ * @deprecated Use {@link AudioTrack.Builder#setPerformanceMode(int)} with
+ * {@link AudioTrack#PERFORMANCE_MODE_LOW_LATENCY} to control performance.
+ */
+ public final static int FLAG_LOW_LATENCY = 0x1 << 8;
+
+ /**
+ * @hide
+ * Flag requesting a deep buffer path when creating an {@code AudioTrack}.
+ *
+ * A deep buffer path, if available, may consume less power and is
+ * suitable for media playback where latency is not a concern.
+ * Use {@link AudioTrack.Builder#setPerformanceMode(int)} with
+ * {@link AudioTrack#PERFORMANCE_MODE_POWER_SAVING} to enable.
+ */
+ public final static int FLAG_DEEP_BUFFER = 0x1 << 9;
+
+ private final static int FLAG_ALL = FLAG_AUDIBILITY_ENFORCED | FLAG_SECURE | FLAG_SCO |
+ FLAG_BEACON | FLAG_HW_AV_SYNC | FLAG_HW_HOTWORD | FLAG_BYPASS_INTERRUPTION_POLICY |
+ FLAG_BYPASS_MUTE | FLAG_LOW_LATENCY | FLAG_DEEP_BUFFER;
+ private final static int FLAG_ALL_PUBLIC = FLAG_AUDIBILITY_ENFORCED |
+ FLAG_HW_AV_SYNC | FLAG_LOW_LATENCY;
+
+ private int mUsage = USAGE_UNKNOWN;
+ private int mContentType = CONTENT_TYPE_UNKNOWN;
+ private int mSource = MediaRecorder.AudioSource.AUDIO_SOURCE_INVALID;
+ private int mFlags = 0x0;
+ private HashSet<String> mTags;
+ private String mFormattedTags;
+ private Bundle mBundle; // lazy-initialized, may be null
+
+ private AudioAttributes() {
+ }
+
+ /**
+ * Return the content type.
+ * @return one of the values that can be set in {@link Builder#setContentType(int)}
+ */
+ public int getContentType() {
+ return mContentType;
+ }
+
+ /**
+ * Return the usage.
+ * @return one of the values that can be set in {@link Builder#setUsage(int)}
+ */
+ public int getUsage() {
+ return mUsage;
+ }
+
+ /**
+ * @hide
+ * Return the capture preset.
+ * @return one of the values that can be set in {@link Builder#setCapturePreset(int)} or a
+ * negative value if none has been set.
+ */
+ @SystemApi
+ public int getCapturePreset() {
+ return mSource;
+ }
+
+ /**
+ * Return the flags.
+ * @return a combined mask of all flags
+ */
+ public int getFlags() {
+ // only return the flags that are public
+ return (mFlags & (FLAG_ALL_PUBLIC));
+ }
+
+ /**
+ * @hide
+ * Return all the flags, even the non-public ones.
+ * Internal use only
+ * @return a combined mask of all flags
+ */
+ @SystemApi
+ public int getAllFlags() {
+ return (mFlags & FLAG_ALL);
+ }
+
+ /**
+ * @hide
+ * Return the Bundle of data.
+ * @return a copy of the Bundle for this instance, may be null.
+ */
+ @SystemApi
+ public Bundle getBundle() {
+ if (mBundle == null) {
+ return mBundle;
+ } else {
+ return new Bundle(mBundle);
+ }
+ }
+
+ /**
+ * @hide
+ * Return the set of tags.
+ * @return a read-only set of all tags stored as strings.
+ */
+ public Set<String> getTags() {
+ return Collections.unmodifiableSet(mTags);
+ }
+
+ /**
+ * Builder class for {@link AudioAttributes} objects.
+ * <p> Here is an example where <code>Builder</code> is used to define the
+ * {@link AudioAttributes} to be used by a new <code>AudioTrack</code> instance:
+ *
+ * <pre class="prettyprint">
+ * AudioTrack myTrack = new AudioTrack(
+ * new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_MEDIA)
+ * .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
+ * .build(),
+ * myFormat, myBuffSize, AudioTrack.MODE_STREAM, mySession);
+ * </pre>
+ *
+ * <p>By default all types of information (usage, content type, flags) conveyed by an
+ * <code>AudioAttributes</code> instance are set to "unknown". Unknown information will be
+ * interpreted as a default value that is dependent on the context of use, for instance a
+ * {@link MediaPlayer} will use a default usage of {@link AudioAttributes#USAGE_MEDIA}.
+ */
+ public static class Builder {
+ private int mUsage = USAGE_UNKNOWN;
+ private int mContentType = CONTENT_TYPE_UNKNOWN;
+ private int mSource = MediaRecorder.AudioSource.AUDIO_SOURCE_INVALID;
+ private int mFlags = 0x0;
+ private HashSet<String> mTags = new HashSet<String>();
+ private Bundle mBundle;
+
+ /**
+ * Constructs a new Builder with the defaults.
+ * By default, usage and content type are respectively {@link AudioAttributes#USAGE_UNKNOWN}
+ * and {@link AudioAttributes#CONTENT_TYPE_UNKNOWN}, and flags are 0. It is recommended to
+ * configure the usage (with {@link #setUsage(int)}) or deriving attributes from a legacy
+ * stream type (with {@link #setLegacyStreamType(int)}) before calling {@link #build()}
+ * to override any default playback behavior in terms of routing and volume management.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Constructs a new Builder from a given AudioAttributes
+ * @param aa the AudioAttributes object whose data will be reused in the new Builder.
+ */
+ @SuppressWarnings("unchecked") // for cloning of mTags
+ public Builder(AudioAttributes aa) {
+ mUsage = aa.mUsage;
+ mContentType = aa.mContentType;
+ mFlags = aa.mFlags;
+ mTags = (HashSet<String>) aa.mTags.clone();
+ }
+
+ /**
+ * Combines all of the attributes that have been set and return a new
+ * {@link AudioAttributes} object.
+ * @return a new {@link AudioAttributes} object
+ */
+ @SuppressWarnings("unchecked") // for cloning of mTags
+ public AudioAttributes build() {
+ AudioAttributes aa = new AudioAttributes();
+ aa.mContentType = mContentType;
+ aa.mUsage = mUsage;
+ aa.mSource = mSource;
+ aa.mFlags = mFlags;
+ aa.mTags = (HashSet<String>) mTags.clone();
+ aa.mFormattedTags = TextUtils.join(";", mTags);
+ if (mBundle != null) {
+ aa.mBundle = new Bundle(mBundle);
+ }
+ return aa;
+ }
+
+ /**
+ * Sets the attribute describing what is the intended use of the the audio signal,
+ * such as alarm or ringtone.
+ * @param usage one of {@link AudioAttributes#USAGE_UNKNOWN},
+ * {@link AudioAttributes#USAGE_MEDIA},
+ * {@link AudioAttributes#USAGE_VOICE_COMMUNICATION},
+ * {@link AudioAttributes#USAGE_VOICE_COMMUNICATION_SIGNALLING},
+ * {@link AudioAttributes#USAGE_ALARM}, {@link AudioAttributes#USAGE_NOTIFICATION},
+ * {@link AudioAttributes#USAGE_NOTIFICATION_RINGTONE},
+ * {@link AudioAttributes#USAGE_NOTIFICATION_COMMUNICATION_REQUEST},
+ * {@link AudioAttributes#USAGE_NOTIFICATION_COMMUNICATION_INSTANT},
+ * {@link AudioAttributes#USAGE_NOTIFICATION_COMMUNICATION_DELAYED},
+ * {@link AudioAttributes#USAGE_NOTIFICATION_EVENT},
+ * {@link AudioAttributes#USAGE_ASSISTANT},
+ * {@link AudioAttributes#USAGE_ASSISTANCE_ACCESSIBILITY},
+ * {@link AudioAttributes#USAGE_ASSISTANCE_NAVIGATION_GUIDANCE},
+ * {@link AudioAttributes#USAGE_ASSISTANCE_SONIFICATION},
+ * {@link AudioAttributes#USAGE_GAME}.
+ * @return the same Builder instance.
+ */
+ public Builder setUsage(@AttributeUsage int usage) {
+ switch (usage) {
+ case USAGE_UNKNOWN:
+ case USAGE_MEDIA:
+ case USAGE_VOICE_COMMUNICATION:
+ case USAGE_VOICE_COMMUNICATION_SIGNALLING:
+ case USAGE_ALARM:
+ case USAGE_NOTIFICATION:
+ case USAGE_NOTIFICATION_RINGTONE:
+ case USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+ case USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+ case USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+ case USAGE_NOTIFICATION_EVENT:
+ case USAGE_ASSISTANCE_ACCESSIBILITY:
+ case USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+ case USAGE_ASSISTANCE_SONIFICATION:
+ case USAGE_GAME:
+ case USAGE_VIRTUAL_SOURCE:
+ case USAGE_ASSISTANT:
+ mUsage = usage;
+ break;
+ default:
+ mUsage = USAGE_UNKNOWN;
+ }
+ return this;
+ }
+
+ /**
+ * Sets the attribute describing the content type of the audio signal, such as speech,
+ * or music.
+ * @param contentType the content type values, one of
+ * {@link AudioAttributes#CONTENT_TYPE_MOVIE},
+ * {@link AudioAttributes#CONTENT_TYPE_MUSIC},
+ * {@link AudioAttributes#CONTENT_TYPE_SONIFICATION},
+ * {@link AudioAttributes#CONTENT_TYPE_SPEECH},
+ * {@link AudioAttributes#CONTENT_TYPE_UNKNOWN}.
+ * @return the same Builder instance.
+ */
+ public Builder setContentType(@AttributeContentType int contentType) {
+ switch (contentType) {
+ case CONTENT_TYPE_UNKNOWN:
+ case CONTENT_TYPE_MOVIE:
+ case CONTENT_TYPE_MUSIC:
+ case CONTENT_TYPE_SONIFICATION:
+ case CONTENT_TYPE_SPEECH:
+ mContentType = contentType;
+ break;
+ default:
+ mUsage = CONTENT_TYPE_UNKNOWN;
+ }
+ return this;
+ }
+
+ /**
+ * Sets the combination of flags.
+ *
+ * This is a bitwise OR with the existing flags.
+ * @param flags a combination of {@link AudioAttributes#FLAG_AUDIBILITY_ENFORCED},
+ * {@link AudioAttributes#FLAG_HW_AV_SYNC}.
+ * @return the same Builder instance.
+ */
+ public Builder setFlags(int flags) {
+ flags &= AudioAttributes.FLAG_ALL;
+ mFlags |= flags;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Replaces flags.
+ * @param flags any combination of {@link AudioAttributes#FLAG_ALL}.
+ * @return the same Builder instance.
+ */
+ public Builder replaceFlags(int flags) {
+ mFlags = flags & AudioAttributes.FLAG_ALL;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Adds a Bundle of data
+ * @param bundle a non-null Bundle
+ * @return the same builder instance
+ */
+ @SystemApi
+ public Builder addBundle(@NonNull Bundle bundle) {
+ if (bundle == null) {
+ throw new IllegalArgumentException("Illegal null bundle");
+ }
+ if (mBundle == null) {
+ mBundle = new Bundle(bundle);
+ } else {
+ mBundle.putAll(bundle);
+ }
+ return this;
+ }
+
+ /**
+ * @hide
+ * Add a custom tag stored as a string
+ * @param tag
+ * @return the same Builder instance.
+ */
+ public Builder addTag(String tag) {
+ mTags.add(tag);
+ return this;
+ }
+
+ /**
+ * Sets attributes as inferred from the legacy stream types.
+ * Use this method when building an {@link AudioAttributes} instance to initialize some of
+ * the attributes by information derived from a legacy stream type.
+ * @param streamType one of {@link AudioManager#STREAM_VOICE_CALL},
+ * {@link AudioManager#STREAM_SYSTEM}, {@link AudioManager#STREAM_RING},
+ * {@link AudioManager#STREAM_MUSIC}, {@link AudioManager#STREAM_ALARM},
+ * or {@link AudioManager#STREAM_NOTIFICATION}.
+ * @return the same Builder instance.
+ */
+ public Builder setLegacyStreamType(int streamType) {
+ if (streamType == AudioManager.STREAM_ACCESSIBILITY) {
+ throw new IllegalArgumentException("STREAM_ACCESSIBILITY is not a legacy stream "
+ + "type that was used for audio playback");
+ }
+ return setInternalLegacyStreamType(streamType);
+ }
+
+ /**
+ * @hide
+ * For internal framework use only, enables building from hidden stream types.
+ * @param streamType
+ * @return the same Builder instance.
+ */
+ public Builder setInternalLegacyStreamType(int streamType) {
+ switch(streamType) {
+ case AudioSystem.STREAM_VOICE_CALL:
+ mContentType = CONTENT_TYPE_SPEECH;
+ break;
+ case AudioSystem.STREAM_SYSTEM_ENFORCED:
+ mFlags |= FLAG_AUDIBILITY_ENFORCED;
+ // intended fall through, attributes in common with STREAM_SYSTEM
+ case AudioSystem.STREAM_SYSTEM:
+ mContentType = CONTENT_TYPE_SONIFICATION;
+ break;
+ case AudioSystem.STREAM_RING:
+ mContentType = CONTENT_TYPE_SONIFICATION;
+ break;
+ case AudioSystem.STREAM_MUSIC:
+ mContentType = CONTENT_TYPE_MUSIC;
+ break;
+ case AudioSystem.STREAM_ALARM:
+ mContentType = CONTENT_TYPE_SONIFICATION;
+ break;
+ case AudioSystem.STREAM_NOTIFICATION:
+ mContentType = CONTENT_TYPE_SONIFICATION;
+ break;
+ case AudioSystem.STREAM_BLUETOOTH_SCO:
+ mContentType = CONTENT_TYPE_SPEECH;
+ mFlags |= FLAG_SCO;
+ break;
+ case AudioSystem.STREAM_DTMF:
+ mContentType = CONTENT_TYPE_SONIFICATION;
+ break;
+ case AudioSystem.STREAM_TTS:
+ mContentType = CONTENT_TYPE_SONIFICATION;
+ break;
+ case AudioSystem.STREAM_ACCESSIBILITY:
+ mContentType = CONTENT_TYPE_SPEECH;
+ break;
+ default:
+ Log.e(TAG, "Invalid stream type " + streamType + " for AudioAttributes");
+ }
+ mUsage = usageForStreamType(streamType);
+ return this;
+ }
+
+ /**
+ * @hide
+ * Sets the capture preset.
+ * Use this audio attributes configuration method when building an {@link AudioRecord}
+ * instance with {@link AudioRecord#AudioRecord(AudioAttributes, AudioFormat, int)}.
+ * @param preset one of {@link MediaRecorder.AudioSource#DEFAULT},
+ * {@link MediaRecorder.AudioSource#MIC}, {@link MediaRecorder.AudioSource#CAMCORDER},
+ * {@link MediaRecorder.AudioSource#VOICE_RECOGNITION},
+ * {@link MediaRecorder.AudioSource#VOICE_COMMUNICATION} or
+ * {@link MediaRecorder.AudioSource#UNPROCESSED}
+ * @return the same Builder instance.
+ */
+ @SystemApi
+ public Builder setCapturePreset(int preset) {
+ switch (preset) {
+ case MediaRecorder.AudioSource.DEFAULT:
+ case MediaRecorder.AudioSource.MIC:
+ case MediaRecorder.AudioSource.CAMCORDER:
+ case MediaRecorder.AudioSource.VOICE_RECOGNITION:
+ case MediaRecorder.AudioSource.VOICE_COMMUNICATION:
+ case MediaRecorder.AudioSource.UNPROCESSED:
+ mSource = preset;
+ break;
+ default:
+ Log.e(TAG, "Invalid capture preset " + preset + " for AudioAttributes");
+ }
+ return this;
+ }
+
+ /**
+ * @hide
+ * Same as {@link #setCapturePreset(int)} but authorizes the use of HOTWORD,
+ * REMOTE_SUBMIX and RADIO_TUNER.
+ * @param preset
+ * @return the same Builder instance.
+ */
+ @SystemApi
+ public Builder setInternalCapturePreset(int preset) {
+ if ((preset == MediaRecorder.AudioSource.HOTWORD)
+ || (preset == MediaRecorder.AudioSource.REMOTE_SUBMIX)
+ || (preset == MediaRecorder.AudioSource.RADIO_TUNER)) {
+ mSource = preset;
+ } else {
+ setCapturePreset(preset);
+ }
+ return this;
+ }
+ };
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ /**
+ * @hide
+ * Used to indicate that when parcelling, the tags should be parcelled through the flattened
+ * formatted string, not through the array of strings.
+ * Keep in sync with frameworks/av/media/libmediaplayerservice/MediaPlayerService.cpp
+ * see definition of kAudioAttributesMarshallTagFlattenTags
+ */
+ public final static int FLATTEN_TAGS = 0x1;
+
+ private final static int ATTR_PARCEL_IS_NULL_BUNDLE = -1977;
+ private final static int ATTR_PARCEL_IS_VALID_BUNDLE = 1980;
+
+ /**
+ * When adding tags for writeToParcel(Parcel, int), add them in the list of flags (| NEW_FLAG)
+ */
+ private final static int ALL_PARCEL_FLAGS = FLATTEN_TAGS;
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mUsage);
+ dest.writeInt(mContentType);
+ dest.writeInt(mSource);
+ dest.writeInt(mFlags);
+ dest.writeInt(flags & ALL_PARCEL_FLAGS);
+ if ((flags & FLATTEN_TAGS) == 0) {
+ String[] tagsArray = new String[mTags.size()];
+ mTags.toArray(tagsArray);
+ dest.writeStringArray(tagsArray);
+ } else if ((flags & FLATTEN_TAGS) == FLATTEN_TAGS) {
+ dest.writeString(mFormattedTags);
+ }
+ if (mBundle == null) {
+ dest.writeInt(ATTR_PARCEL_IS_NULL_BUNDLE);
+ } else {
+ dest.writeInt(ATTR_PARCEL_IS_VALID_BUNDLE);
+ dest.writeBundle(mBundle);
+ }
+ }
+
+ private AudioAttributes(Parcel in) {
+ mUsage = in.readInt();
+ mContentType = in.readInt();
+ mSource = in.readInt();
+ mFlags = in.readInt();
+ boolean hasFlattenedTags = ((in.readInt() & FLATTEN_TAGS) == FLATTEN_TAGS);
+ mTags = new HashSet<String>();
+ if (hasFlattenedTags) {
+ mFormattedTags = new String(in.readString());
+ mTags.add(mFormattedTags);
+ } else {
+ String[] tagsArray = in.readStringArray();
+ for (int i = tagsArray.length - 1 ; i >= 0 ; i--) {
+ mTags.add(tagsArray[i]);
+ }
+ mFormattedTags = TextUtils.join(";", mTags);
+ }
+ switch (in.readInt()) {
+ case ATTR_PARCEL_IS_NULL_BUNDLE:
+ mBundle = null;
+ break;
+ case ATTR_PARCEL_IS_VALID_BUNDLE:
+ mBundle = new Bundle(in.readBundle());
+ break;
+ default:
+ Log.e(TAG, "Illegal value unmarshalling AudioAttributes, can't initialize bundle");
+ }
+ }
+
+ public static final Parcelable.Creator<AudioAttributes> CREATOR
+ = new Parcelable.Creator<AudioAttributes>() {
+ /**
+ * Rebuilds an AudioAttributes previously stored with writeToParcel().
+ * @param p Parcel object to read the AudioAttributes from
+ * @return a new AudioAttributes created from the data in the parcel
+ */
+ public AudioAttributes createFromParcel(Parcel p) {
+ return new AudioAttributes(p);
+ }
+ public AudioAttributes[] newArray(int size) {
+ return new AudioAttributes[size];
+ }
+ };
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ AudioAttributes that = (AudioAttributes) o;
+
+ return ((mContentType == that.mContentType)
+ && (mFlags == that.mFlags)
+ && (mSource == that.mSource)
+ && (mUsage == that.mUsage)
+ //mFormattedTags is never null due to assignment in Builder or unmarshalling
+ && (mFormattedTags.equals(that.mFormattedTags)));
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mContentType, mFlags, mSource, mUsage, mFormattedTags, mBundle);
+ }
+
+ @Override
+ public String toString () {
+ return new String("AudioAttributes:"
+ + " usage=" + usageToString()
+ + " content=" + contentTypeToString()
+ + " flags=0x" + Integer.toHexString(mFlags).toUpperCase()
+ + " tags=" + mFormattedTags
+ + " bundle=" + (mBundle == null ? "null" : mBundle.toString()));
+ }
+
+ /** @hide */
+ public String usageToString() {
+ return usageToString(mUsage);
+ }
+
+ /** @hide */
+ public static String usageToString(int usage) {
+ switch(usage) {
+ case USAGE_UNKNOWN:
+ return new String("USAGE_UNKNOWN");
+ case USAGE_MEDIA:
+ return new String("USAGE_MEDIA");
+ case USAGE_VOICE_COMMUNICATION:
+ return new String("USAGE_VOICE_COMMUNICATION");
+ case USAGE_VOICE_COMMUNICATION_SIGNALLING:
+ return new String("USAGE_VOICE_COMMUNICATION_SIGNALLING");
+ case USAGE_ALARM:
+ return new String("USAGE_ALARM");
+ case USAGE_NOTIFICATION:
+ return new String("USAGE_NOTIFICATION");
+ case USAGE_NOTIFICATION_RINGTONE:
+ return new String("USAGE_NOTIFICATION_RINGTONE");
+ case USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+ return new String("USAGE_NOTIFICATION_COMMUNICATION_REQUEST");
+ case USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+ return new String("USAGE_NOTIFICATION_COMMUNICATION_INSTANT");
+ case USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+ return new String("USAGE_NOTIFICATION_COMMUNICATION_DELAYED");
+ case USAGE_NOTIFICATION_EVENT:
+ return new String("USAGE_NOTIFICATION_EVENT");
+ case USAGE_ASSISTANCE_ACCESSIBILITY:
+ return new String("USAGE_ASSISTANCE_ACCESSIBILITY");
+ case USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+ return new String("USAGE_ASSISTANCE_NAVIGATION_GUIDANCE");
+ case USAGE_ASSISTANCE_SONIFICATION:
+ return new String("USAGE_ASSISTANCE_SONIFICATION");
+ case USAGE_GAME:
+ return new String("USAGE_GAME");
+ case USAGE_ASSISTANT:
+ return new String("USAGE_ASSISTANT");
+ default:
+ return new String("unknown usage " + usage);
+ }
+ }
+
+ /** @hide */
+ public String contentTypeToString() {
+ switch(mContentType) {
+ case CONTENT_TYPE_UNKNOWN:
+ return new String("CONTENT_TYPE_UNKNOWN");
+ case CONTENT_TYPE_SPEECH: return new String("CONTENT_TYPE_SPEECH");
+ case CONTENT_TYPE_MUSIC: return new String("CONTENT_TYPE_MUSIC");
+ case CONTENT_TYPE_MOVIE: return new String("CONTENT_TYPE_MOVIE");
+ case CONTENT_TYPE_SONIFICATION: return new String("CONTENT_TYPE_SONIFICATION");
+ default: return new String("unknown content type " + mContentType);
+ }
+ }
+
+ private static int usageForStreamType(int streamType) {
+ switch(streamType) {
+ case AudioSystem.STREAM_VOICE_CALL:
+ return USAGE_VOICE_COMMUNICATION;
+ case AudioSystem.STREAM_SYSTEM_ENFORCED:
+ case AudioSystem.STREAM_SYSTEM:
+ return USAGE_ASSISTANCE_SONIFICATION;
+ case AudioSystem.STREAM_RING:
+ return USAGE_NOTIFICATION_RINGTONE;
+ case AudioSystem.STREAM_MUSIC:
+ return USAGE_MEDIA;
+ case AudioSystem.STREAM_ALARM:
+ return USAGE_ALARM;
+ case AudioSystem.STREAM_NOTIFICATION:
+ return USAGE_NOTIFICATION;
+ case AudioSystem.STREAM_BLUETOOTH_SCO:
+ return USAGE_VOICE_COMMUNICATION;
+ case AudioSystem.STREAM_DTMF:
+ return USAGE_VOICE_COMMUNICATION_SIGNALLING;
+ case AudioSystem.STREAM_ACCESSIBILITY:
+ return USAGE_ASSISTANCE_ACCESSIBILITY;
+ case AudioSystem.STREAM_TTS:
+ default:
+ return USAGE_UNKNOWN;
+ }
+ }
+
+ /**
+ * Returns the stream type matching this {@code AudioAttributes} instance for volume control.
+ * Use this method to derive the stream type needed to configure the volume
+ * control slider in an {@link android.app.Activity} with
+ * {@link android.app.Activity#setVolumeControlStream(int)} for playback conducted with these
+ * attributes.
+ * <BR>Do not use this method to set the stream type on an audio player object
+ * (e.g. {@link AudioTrack}, {@link MediaPlayer}) as this is deprecated,
+ * use {@code AudioAttributes} instead.
+ * @return a valid stream type for {@code Activity} or stream volume control that matches
+ * the attributes, or {@link AudioManager#USE_DEFAULT_STREAM_TYPE} if there isn't a direct
+ * match. Note that {@code USE_DEFAULT_STREAM_TYPE} is not a valid value
+ * for {@link AudioManager#setStreamVolume(int, int, int)}.
+ */
+ public int getVolumeControlStream() {
+ return toVolumeStreamType(true /*fromGetVolumeControlStream*/, this);
+ }
+
+ /**
+ * @hide
+ * Only use to get which stream type should be used for volume control, NOT for audio playback
+ * (all audio playback APIs are supposed to take AudioAttributes as input parameters)
+ * @param aa non-null AudioAttributes.
+ * @return a valid stream type for volume control that matches the attributes.
+ */
+ public static int toLegacyStreamType(@NonNull AudioAttributes aa) {
+ return toVolumeStreamType(false /*fromGetVolumeControlStream*/, aa);
+ }
+
+ private static int toVolumeStreamType(boolean fromGetVolumeControlStream, AudioAttributes aa) {
+ // flags to stream type mapping
+ if ((aa.getFlags() & FLAG_AUDIBILITY_ENFORCED) == FLAG_AUDIBILITY_ENFORCED) {
+ return fromGetVolumeControlStream ?
+ AudioSystem.STREAM_SYSTEM : AudioSystem.STREAM_SYSTEM_ENFORCED;
+ }
+ if ((aa.getFlags() & FLAG_SCO) == FLAG_SCO) {
+ return fromGetVolumeControlStream ?
+ AudioSystem.STREAM_VOICE_CALL : AudioSystem.STREAM_BLUETOOTH_SCO;
+ }
+
+ // usage to stream type mapping
+ switch (aa.getUsage()) {
+ case USAGE_MEDIA:
+ case USAGE_GAME:
+ case USAGE_ASSISTANCE_NAVIGATION_GUIDANCE:
+ case USAGE_ASSISTANT:
+ return AudioSystem.STREAM_MUSIC;
+ case USAGE_ASSISTANCE_SONIFICATION:
+ return AudioSystem.STREAM_SYSTEM;
+ case USAGE_VOICE_COMMUNICATION:
+ return AudioSystem.STREAM_VOICE_CALL;
+ case USAGE_VOICE_COMMUNICATION_SIGNALLING:
+ return fromGetVolumeControlStream ?
+ AudioSystem.STREAM_VOICE_CALL : AudioSystem.STREAM_DTMF;
+ case USAGE_ALARM:
+ return AudioSystem.STREAM_ALARM;
+ case USAGE_NOTIFICATION_RINGTONE:
+ return AudioSystem.STREAM_RING;
+ case USAGE_NOTIFICATION:
+ case USAGE_NOTIFICATION_COMMUNICATION_REQUEST:
+ case USAGE_NOTIFICATION_COMMUNICATION_INSTANT:
+ case USAGE_NOTIFICATION_COMMUNICATION_DELAYED:
+ case USAGE_NOTIFICATION_EVENT:
+ return AudioSystem.STREAM_NOTIFICATION;
+ case USAGE_ASSISTANCE_ACCESSIBILITY:
+ return AudioSystem.STREAM_ACCESSIBILITY;
+ case USAGE_UNKNOWN:
+ return fromGetVolumeControlStream ?
+ AudioManager.USE_DEFAULT_STREAM_TYPE : AudioSystem.STREAM_MUSIC;
+ default:
+ if (fromGetVolumeControlStream) {
+ throw new IllegalArgumentException("Unknown usage value " + aa.getUsage() +
+ " in audio attributes");
+ } else {
+ return AudioSystem.STREAM_MUSIC;
+ }
+ }
+ }
+
+ /** @hide */
+ @IntDef({
+ USAGE_UNKNOWN,
+ USAGE_MEDIA,
+ USAGE_VOICE_COMMUNICATION,
+ USAGE_VOICE_COMMUNICATION_SIGNALLING,
+ USAGE_ALARM,
+ USAGE_NOTIFICATION,
+ USAGE_NOTIFICATION_RINGTONE,
+ USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
+ USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
+ USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
+ USAGE_NOTIFICATION_EVENT,
+ USAGE_ASSISTANCE_ACCESSIBILITY,
+ USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+ USAGE_ASSISTANCE_SONIFICATION,
+ USAGE_GAME,
+ USAGE_ASSISTANT,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AttributeUsage {}
+
+ /** @hide */
+ @IntDef({
+ CONTENT_TYPE_UNKNOWN,
+ CONTENT_TYPE_SPEECH,
+ CONTENT_TYPE_MUSIC,
+ CONTENT_TYPE_MOVIE,
+ CONTENT_TYPE_SONIFICATION
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AttributeContentType {}
+}
diff --git a/android/media/AudioDeviceCallback.java b/android/media/AudioDeviceCallback.java
new file mode 100644
index 00000000..a5b1d240
--- /dev/null
+++ b/android/media/AudioDeviceCallback.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * AudioDeviceCallback defines the mechanism by which applications can receive notifications
+ * of audio device connection and disconnection events.
+ * @see AudioManager#registerAudioDeviceCallback(AudioDeviceCallback, android.os.Handler handler).
+ */
+public abstract class AudioDeviceCallback {
+ /**
+ * Called by the {@link AudioManager} to indicate that one or more audio devices have been
+ * connected.
+ * @param addedDevices An array of {@link AudioDeviceInfo} objects corresponding to any
+ * newly added audio devices.
+ */
+ public void onAudioDevicesAdded(AudioDeviceInfo[] addedDevices) {}
+
+ /**
+ * Called by the {@link AudioManager} to indicate that one or more audio devices have been
+ * disconnected.
+ * @param removedDevices An array of {@link AudioDeviceInfo} objects corresponding to any
+ * newly removed audio devices.
+ */
+ public void onAudioDevicesRemoved(AudioDeviceInfo[] removedDevices) {}
+}
diff --git a/android/media/AudioDeviceInfo.java b/android/media/AudioDeviceInfo.java
new file mode 100644
index 00000000..1b89c966
--- /dev/null
+++ b/android/media/AudioDeviceInfo.java
@@ -0,0 +1,342 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.util.SparseIntArray;
+
+import java.util.TreeSet;
+
+/**
+ * Class to provide information about the audio devices.
+ */
+public final class AudioDeviceInfo {
+
+ /**
+ * A device type associated with an unknown or uninitialized device.
+ */
+ public static final int TYPE_UNKNOWN = 0;
+ /**
+ * A device type describing the attached earphone speaker.
+ */
+ public static final int TYPE_BUILTIN_EARPIECE = 1;
+ /**
+ * A device type describing the speaker system (i.e. a mono speaker or stereo speakers) built
+ * in a device.
+ */
+ public static final int TYPE_BUILTIN_SPEAKER = 2;
+ /**
+ * A device type describing a headset, which is the combination of a headphones and microphone.
+ */
+ public static final int TYPE_WIRED_HEADSET = 3;
+ /**
+ * A device type describing a pair of wired headphones.
+ */
+ public static final int TYPE_WIRED_HEADPHONES = 4;
+ /**
+ * A device type describing an analog line-level connection.
+ */
+ public static final int TYPE_LINE_ANALOG = 5;
+ /**
+ * A device type describing a digital line connection (e.g. SPDIF).
+ */
+ public static final int TYPE_LINE_DIGITAL = 6;
+ /**
+ * A device type describing a Bluetooth device typically used for telephony.
+ */
+ public static final int TYPE_BLUETOOTH_SCO = 7;
+ /**
+ * A device type describing a Bluetooth device supporting the A2DP profile.
+ */
+ public static final int TYPE_BLUETOOTH_A2DP = 8;
+ /**
+ * A device type describing an HDMI connection .
+ */
+ public static final int TYPE_HDMI = 9;
+ /**
+ * A device type describing the Audio Return Channel of an HDMI connection.
+ */
+ public static final int TYPE_HDMI_ARC = 10;
+ /**
+ * A device type describing a USB audio device.
+ */
+ public static final int TYPE_USB_DEVICE = 11;
+ /**
+ * A device type describing a USB audio device in accessory mode.
+ */
+ public static final int TYPE_USB_ACCESSORY = 12;
+ /**
+ * A device type describing the audio device associated with a dock.
+ */
+ public static final int TYPE_DOCK = 13;
+ /**
+ * A device type associated with the transmission of audio signals over FM.
+ */
+ public static final int TYPE_FM = 14;
+ /**
+ * A device type describing the microphone(s) built in a device.
+ */
+ public static final int TYPE_BUILTIN_MIC = 15;
+ /**
+ * A device type for accessing the audio content transmitted over FM.
+ */
+ public static final int TYPE_FM_TUNER = 16;
+ /**
+ * A device type for accessing the audio content transmitted over the TV tuner system.
+ */
+ public static final int TYPE_TV_TUNER = 17;
+ /**
+ * A device type describing the transmission of audio signals over the telephony network.
+ */
+ public static final int TYPE_TELEPHONY = 18;
+ /**
+ * A device type describing the auxiliary line-level connectors.
+ */
+ public static final int TYPE_AUX_LINE = 19;
+ /**
+ * A device type connected over IP.
+ */
+ public static final int TYPE_IP = 20;
+ /**
+ * A type-agnostic device used for communication with external audio systems
+ */
+ public static final int TYPE_BUS = 21;
+ /**
+ * A device type describing a USB audio headset.
+ */
+ public static final int TYPE_USB_HEADSET = 22;
+
+ private final AudioDevicePort mPort;
+
+ AudioDeviceInfo(AudioDevicePort port) {
+ mPort = port;
+ }
+
+ /**
+ * @return The internal device ID.
+ */
+ public int getId() {
+ return mPort.handle().id();
+ }
+
+ /**
+ * @return The human-readable name of the audio device.
+ */
+ public CharSequence getProductName() {
+ String portName = mPort.name();
+ return portName.length() != 0 ? portName : android.os.Build.MODEL;
+ }
+
+ /**
+ * @hide
+ * @return The "address" string of the device. This generally contains device-specific
+ * parameters.
+ */
+ public String getAddress() {
+ return mPort.address();
+ }
+
+ /**
+ * @return true if the audio device is a source for audio data (e.e an input).
+ */
+ public boolean isSource() {
+ return mPort.role() == AudioPort.ROLE_SOURCE;
+ }
+
+ /**
+ * @return true if the audio device is a sink for audio data (i.e. an output).
+ */
+ public boolean isSink() {
+ return mPort.role() == AudioPort.ROLE_SINK;
+ }
+
+ /**
+ * @return An array of sample rates supported by the audio device.
+ *
+ * Note: an empty array indicates that the device supports arbitrary rates.
+ */
+ public @NonNull int[] getSampleRates() {
+ return mPort.samplingRates();
+ }
+
+ /**
+ * @return An array of channel position masks (e.g. {@link AudioFormat#CHANNEL_IN_STEREO},
+ * {@link AudioFormat#CHANNEL_OUT_7POINT1}) for which this audio device can be configured.
+ *
+ * @see AudioFormat
+ *
+ * Note: an empty array indicates that the device supports arbitrary channel masks.
+ */
+ public @NonNull int[] getChannelMasks() {
+ return mPort.channelMasks();
+ }
+
+ /**
+ * @return An array of channel index masks for which this audio device can be configured.
+ *
+ * @see AudioFormat
+ *
+ * Note: an empty array indicates that the device supports arbitrary channel index masks.
+ */
+ public @NonNull int[] getChannelIndexMasks() {
+ return mPort.channelIndexMasks();
+ }
+
+ /**
+ * @return An array of channel counts (1, 2, 4, ...) for which this audio device
+ * can be configured.
+ *
+ * Note: an empty array indicates that the device supports arbitrary channel counts.
+ */
+ public @NonNull int[] getChannelCounts() {
+ TreeSet<Integer> countSet = new TreeSet<Integer>();
+
+ // Channel Masks
+ for (int mask : getChannelMasks()) {
+ countSet.add(isSink() ?
+ AudioFormat.channelCountFromOutChannelMask(mask)
+ : AudioFormat.channelCountFromInChannelMask(mask));
+ }
+
+ // Index Masks
+ for (int index_mask : getChannelIndexMasks()) {
+ countSet.add(Integer.bitCount(index_mask));
+ }
+
+ int[] counts = new int[countSet.size()];
+ int index = 0;
+ for (int count : countSet) {
+ counts[index++] = count;
+ }
+ return counts;
+ }
+
+ /**
+ * @return An array of audio encodings (e.g. {@link AudioFormat#ENCODING_PCM_16BIT},
+ * {@link AudioFormat#ENCODING_PCM_FLOAT}) supported by the audio device.
+ * <code>ENCODING_PCM_FLOAT</code> indicates the device supports more
+ * than 16 bits of integer precision. As there is no AudioFormat constant
+ * specifically defined for 24-bit PCM, the value <code>ENCODING_PCM_FLOAT</code>
+ * indicates that {@link AudioTrack} or {@link AudioRecord} can preserve at least 24 bits of
+ * integer precision to that device.
+ *
+ * @see AudioFormat
+ *
+ * Note: an empty array indicates that the device supports arbitrary encodings.
+ */
+ public @NonNull int[] getEncodings() {
+ return AudioFormat.filterPublicFormats(mPort.formats());
+ }
+
+ /**
+ * @return The device type identifier of the audio device (i.e. TYPE_BUILTIN_SPEAKER).
+ */
+ public int getType() {
+ return INT_TO_EXT_DEVICE_MAPPING.get(mPort.type(), TYPE_UNKNOWN);
+ }
+
+ /** @hide */
+ public static int convertDeviceTypeToInternalDevice(int deviceType) {
+ return EXT_TO_INT_DEVICE_MAPPING.get(deviceType, AudioSystem.DEVICE_NONE);
+ }
+
+ /** @hide */
+ public static int convertInternalDeviceToDeviceType(int intDevice) {
+ return INT_TO_EXT_DEVICE_MAPPING.get(intDevice, TYPE_UNKNOWN);
+ }
+
+ private static final SparseIntArray INT_TO_EXT_DEVICE_MAPPING;
+
+ private static final SparseIntArray EXT_TO_INT_DEVICE_MAPPING;
+
+ static {
+ INT_TO_EXT_DEVICE_MAPPING = new SparseIntArray();
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_EARPIECE, TYPE_BUILTIN_EARPIECE);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_SPEAKER, TYPE_BUILTIN_SPEAKER);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_WIRED_HEADSET, TYPE_WIRED_HEADSET);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_WIRED_HEADPHONE, TYPE_WIRED_HEADPHONES);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLUETOOTH_SCO, TYPE_BLUETOOTH_SCO);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_HEADSET, TYPE_BLUETOOTH_SCO);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_CARKIT, TYPE_BLUETOOTH_SCO);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP, TYPE_BLUETOOTH_A2DP);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES, TYPE_BLUETOOTH_A2DP);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER, TYPE_BLUETOOTH_A2DP);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_HDMI, TYPE_HDMI);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET, TYPE_DOCK);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET, TYPE_DOCK);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_USB_ACCESSORY, TYPE_USB_ACCESSORY);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_USB_DEVICE, TYPE_USB_DEVICE);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_USB_HEADSET, TYPE_USB_HEADSET);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_TELEPHONY_TX, TYPE_TELEPHONY);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_LINE, TYPE_LINE_ANALOG);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_HDMI_ARC, TYPE_HDMI_ARC);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_SPDIF, TYPE_LINE_DIGITAL);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_FM, TYPE_FM);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_AUX_LINE, TYPE_AUX_LINE);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_IP, TYPE_IP);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_OUT_BUS, TYPE_BUS);
+
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_BUILTIN_MIC, TYPE_BUILTIN_MIC);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_BLUETOOTH_SCO_HEADSET, TYPE_BLUETOOTH_SCO);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_WIRED_HEADSET, TYPE_WIRED_HEADSET);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_HDMI, TYPE_HDMI);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_TELEPHONY_RX, TYPE_TELEPHONY);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_BACK_MIC, TYPE_BUILTIN_MIC);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_ANLG_DOCK_HEADSET, TYPE_DOCK);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_DGTL_DOCK_HEADSET, TYPE_DOCK);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_USB_ACCESSORY, TYPE_USB_ACCESSORY);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_USB_DEVICE, TYPE_USB_DEVICE);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_USB_HEADSET, TYPE_USB_HEADSET);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_FM_TUNER, TYPE_FM_TUNER);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_TV_TUNER, TYPE_TV_TUNER);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_LINE, TYPE_LINE_ANALOG);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_SPDIF, TYPE_LINE_DIGITAL);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_BLUETOOTH_A2DP, TYPE_BLUETOOTH_A2DP);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_IP, TYPE_IP);
+ INT_TO_EXT_DEVICE_MAPPING.put(AudioSystem.DEVICE_IN_BUS, TYPE_BUS);
+
+ // not covered here, legacy
+ //AudioSystem.DEVICE_OUT_REMOTE_SUBMIX
+ //AudioSystem.DEVICE_IN_REMOTE_SUBMIX
+
+ // privileges mapping to output device
+ EXT_TO_INT_DEVICE_MAPPING = new SparseIntArray();
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BUILTIN_EARPIECE, AudioSystem.DEVICE_OUT_EARPIECE);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BUILTIN_SPEAKER, AudioSystem.DEVICE_OUT_SPEAKER);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_WIRED_HEADSET, AudioSystem.DEVICE_OUT_WIRED_HEADSET);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_WIRED_HEADPHONES, AudioSystem.DEVICE_OUT_WIRED_HEADPHONE);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_LINE_ANALOG, AudioSystem.DEVICE_OUT_LINE);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_LINE_DIGITAL, AudioSystem.DEVICE_OUT_SPDIF);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BLUETOOTH_SCO, AudioSystem.DEVICE_OUT_BLUETOOTH_SCO);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BLUETOOTH_A2DP, AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_HDMI, AudioSystem.DEVICE_OUT_HDMI);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_HDMI_ARC, AudioSystem.DEVICE_OUT_HDMI_ARC);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_USB_DEVICE, AudioSystem.DEVICE_OUT_USB_DEVICE);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_USB_HEADSET, AudioSystem.DEVICE_OUT_USB_HEADSET);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_USB_ACCESSORY, AudioSystem.DEVICE_OUT_USB_ACCESSORY);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_DOCK, AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_FM, AudioSystem.DEVICE_OUT_FM);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BUILTIN_MIC, AudioSystem.DEVICE_IN_BUILTIN_MIC);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_FM_TUNER, AudioSystem.DEVICE_IN_FM_TUNER);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_TV_TUNER, AudioSystem.DEVICE_IN_TV_TUNER);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_TELEPHONY, AudioSystem.DEVICE_OUT_TELEPHONY_TX);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_AUX_LINE, AudioSystem.DEVICE_OUT_AUX_LINE);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_IP, AudioSystem.DEVICE_OUT_IP);
+ EXT_TO_INT_DEVICE_MAPPING.put(TYPE_BUS, AudioSystem.DEVICE_OUT_BUS);
+ }
+}
+
diff --git a/android/media/AudioDevicePort.java b/android/media/AudioDevicePort.java
new file mode 100644
index 00000000..aea39a3a
--- /dev/null
+++ b/android/media/AudioDevicePort.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioSystem;
+
+/**
+ * The AudioDevicePort is a specialized type of AudioPort
+ * describing an input (e.g microphone) or output device (e.g speaker)
+ * of the system.
+ * An AudioDevicePort is an AudioPort controlled by the audio HAL, almost always a physical
+ * device at the boundary of the audio system.
+ * In addition to base audio port attributes, the device descriptor contains:
+ * - the device type (e.g AudioManager.DEVICE_OUT_SPEAKER)
+ * - the device address (e.g MAC adddress for AD2P sink).
+ * @see AudioPort
+ * @hide
+ */
+
+public class AudioDevicePort extends AudioPort {
+
+ private final int mType;
+ private final String mAddress;
+
+ AudioDevicePort(AudioHandle handle, String deviceName,
+ int[] samplingRates, int[] channelMasks, int[] channelIndexMasks,
+ int[] formats, AudioGain[] gains, int type, String address) {
+ super(handle,
+ (AudioManager.isInputDevice(type) == true) ?
+ AudioPort.ROLE_SOURCE : AudioPort.ROLE_SINK,
+ deviceName, samplingRates, channelMasks, channelIndexMasks, formats, gains);
+ mType = type;
+ mAddress = address;
+ }
+
+ /**
+ * Get the device type (e.g AudioManager.DEVICE_OUT_SPEAKER)
+ */
+ public int type() {
+ return mType;
+ }
+
+ /**
+ * Get the device address. Address format varies with the device type.
+ * - USB devices ({@link AudioManager#DEVICE_OUT_USB_DEVICE},
+ * {@link AudioManager#DEVICE_IN_USB_DEVICE}) use an address composed of the ALSA card number
+ * and device number: "card=2;device=1"
+ * - Bluetooth devices ({@link AudioManager#DEVICE_OUT_BLUETOOTH_SCO},
+ * {@link AudioManager#DEVICE_OUT_BLUETOOTH_SCO}, {@link AudioManager#DEVICE_OUT_BLUETOOTH_A2DP})
+ * use the MAC address of the bluetooth device in the form "00:11:22:AA:BB:CC" as reported by
+ * {@link BluetoothDevice#getAddress()}.
+ * - Deivces that do not have an address will indicate an empty string "".
+ */
+ public String address() {
+ return mAddress;
+ }
+
+ /**
+ * Build a specific configuration of this audio device port for use by methods
+ * like AudioManager.connectAudioPatch().
+ */
+ public AudioDevicePortConfig buildConfig(int samplingRate, int channelMask, int format,
+ AudioGainConfig gain) {
+ return new AudioDevicePortConfig(this, samplingRate, channelMask, format, gain);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || !(o instanceof AudioDevicePort)) {
+ return false;
+ }
+ AudioDevicePort other = (AudioDevicePort)o;
+ if (mType != other.type()) {
+ return false;
+ }
+ if (mAddress == null && other.address() != null) {
+ return false;
+ }
+ if (!mAddress.equals(other.address())) {
+ return false;
+ }
+ return super.equals(o);
+ }
+
+ @Override
+ public String toString() {
+ String type = (mRole == ROLE_SOURCE ?
+ AudioSystem.getInputDeviceName(mType) :
+ AudioSystem.getOutputDeviceName(mType));
+ return "{" + super.toString()
+ + ", mType: " + type
+ + ", mAddress: " + mAddress
+ + "}";
+ }
+}
diff --git a/android/media/AudioDevicePortConfig.java b/android/media/AudioDevicePortConfig.java
new file mode 100644
index 00000000..e468a535
--- /dev/null
+++ b/android/media/AudioDevicePortConfig.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An AudioDevicePortConfig describes a possible configuration of an output or input device
+ * (speaker, headphone, microphone ...).
+ * It is used to specify a sink or source when creating a connection with
+ * AudioManager.connectAudioPatch().
+ * An AudioDevicePortConfig is obtained from AudioDevicePort.buildConfig().
+ * @hide
+ */
+
+public class AudioDevicePortConfig extends AudioPortConfig {
+ AudioDevicePortConfig(AudioDevicePort devicePort, int samplingRate, int channelMask,
+ int format, AudioGainConfig gain) {
+ super((AudioPort)devicePort, samplingRate, channelMask, format, gain);
+ }
+
+ AudioDevicePortConfig(AudioDevicePortConfig config) {
+ this(config.port(), config.samplingRate(), config.channelMask(), config.format(),
+ config.gain());
+ }
+
+ /**
+ * Returns the audio device port this AudioDevicePortConfig is issued from.
+ */
+ public AudioDevicePort port() {
+ return (AudioDevicePort)mPort;
+ }
+}
+
diff --git a/android/media/AudioFocusInfo.java b/android/media/AudioFocusInfo.java
new file mode 100644
index 00000000..6d9c5e2a
--- /dev/null
+++ b/android/media/AudioFocusInfo.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.SystemApi;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.util.Objects;
+
+/**
+ * @hide
+ * A class to encapsulate information about an audio focus owner or request.
+ */
+@SystemApi
+public final class AudioFocusInfo implements Parcelable {
+
+ private final AudioAttributes mAttributes;
+ private final int mClientUid;
+ private final String mClientId;
+ private final String mPackageName;
+ private final int mSdkTarget;
+ private int mGainRequest;
+ private int mLossReceived;
+ private int mFlags;
+
+
+ /**
+ * Class constructor
+ * @param aa
+ * @param clientId
+ * @param packageName
+ * @param gainRequest
+ * @param lossReceived
+ * @param flags
+ * @hide
+ */
+ public AudioFocusInfo(AudioAttributes aa, int clientUid, String clientId, String packageName,
+ int gainRequest, int lossReceived, int flags, int sdk) {
+ mAttributes = aa == null ? new AudioAttributes.Builder().build() : aa;
+ mClientUid = clientUid;
+ mClientId = clientId == null ? "" : clientId;
+ mPackageName = packageName == null ? "" : packageName;
+ mGainRequest = gainRequest;
+ mLossReceived = lossReceived;
+ mFlags = flags;
+ mSdkTarget = sdk;
+ }
+
+
+ /**
+ * The audio attributes for the audio focus request.
+ * @return non-null {@link AudioAttributes}.
+ */
+ @SystemApi
+ public AudioAttributes getAttributes() { return mAttributes; }
+
+ @SystemApi
+ public int getClientUid() { return mClientUid; }
+
+ @SystemApi
+ public String getClientId() { return mClientId; }
+
+ @SystemApi
+ public String getPackageName() { return mPackageName; }
+
+ /**
+ * The type of audio focus gain request.
+ * @return one of {@link AudioManager#AUDIOFOCUS_GAIN},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}.
+ */
+ @SystemApi
+ public int getGainRequest() { return mGainRequest; }
+
+ /**
+ * The type of audio focus loss that was received by the
+ * {@link AudioManager.OnAudioFocusChangeListener} if one was set.
+ * @return 0 if focus wasn't lost, or one of {@link AudioManager#AUDIOFOCUS_LOSS},
+ * {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT} or
+ * {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}.
+ */
+ @SystemApi
+ public int getLossReceived() { return mLossReceived; }
+
+ /** @hide */
+ public int getSdkTarget() { return mSdkTarget; }
+
+ /** @hide */
+ public void clearLossReceived() { mLossReceived = 0; }
+
+ /**
+ * The flags set in the audio focus request.
+ * @return 0 or a combination of {link AudioManager#AUDIOFOCUS_FLAG_DELAY_OK},
+ * {@link AudioManager#AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS}, and
+ * {@link AudioManager#AUDIOFOCUS_FLAG_LOCK}.
+ */
+ @SystemApi
+ public int getFlags() { return mFlags; }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ mAttributes.writeToParcel(dest, flags);
+ dest.writeInt(mClientUid);
+ dest.writeString(mClientId);
+ dest.writeString(mPackageName);
+ dest.writeInt(mGainRequest);
+ dest.writeInt(mLossReceived);
+ dest.writeInt(mFlags);
+ dest.writeInt(mSdkTarget);
+ }
+
+ @SystemApi
+ @Override
+ public int hashCode() {
+ return Objects.hash(mAttributes, mClientUid, mClientId, mPackageName, mGainRequest, mFlags);
+ }
+
+ @SystemApi
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ AudioFocusInfo other = (AudioFocusInfo) obj;
+ if (!mAttributes.equals(other.mAttributes)) {
+ return false;
+ }
+ if (mClientUid != other.mClientUid) {
+ return false;
+ }
+ if (!mClientId.equals(other.mClientId)) {
+ return false;
+ }
+ if (!mPackageName.equals(other.mPackageName)) {
+ return false;
+ }
+ if (mGainRequest != other.mGainRequest) {
+ return false;
+ }
+ if (mLossReceived != other.mLossReceived) {
+ return false;
+ }
+ if (mFlags != other.mFlags) {
+ return false;
+ }
+ if (mSdkTarget != other.mSdkTarget) {
+ return false;
+ }
+ return true;
+ }
+
+ public static final Parcelable.Creator<AudioFocusInfo> CREATOR
+ = new Parcelable.Creator<AudioFocusInfo>() {
+
+ public AudioFocusInfo createFromParcel(Parcel in) {
+ return new AudioFocusInfo(
+ AudioAttributes.CREATOR.createFromParcel(in), //AudioAttributes aa
+ in.readInt(), // int clientUid
+ in.readString(), //String clientId
+ in.readString(), //String packageName
+ in.readInt(), //int gainRequest
+ in.readInt(), //int lossReceived
+ in.readInt(), //int flags
+ in.readInt() //int sdkTarget
+ );
+ }
+
+ public AudioFocusInfo[] newArray(int size) {
+ return new AudioFocusInfo[size];
+ }
+ };
+}
diff --git a/android/media/AudioFocusRequest.java b/android/media/AudioFocusRequest.java
new file mode 100644
index 00000000..de59ac39
--- /dev/null
+++ b/android/media/AudioFocusRequest.java
@@ -0,0 +1,549 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.SystemApi;
+import android.media.AudioManager.OnAudioFocusChangeListener;
+import android.os.Handler;
+import android.os.Looper;
+
+/**
+ * A class to encapsulate information about an audio focus request.
+ * An {@code AudioFocusRequest} instance is built by {@link Builder}, and is used to
+ * request and abandon audio focus, respectively
+ * with {@link AudioManager#requestAudioFocus(AudioFocusRequest)} and
+ * {@link AudioManager#abandonAudioFocusRequest(AudioFocusRequest)}.
+ *
+ * <h3>What is audio focus?</h3>
+ * <p>Audio focus is a concept introduced in API 8. It is used to convey the fact that a user can
+ * only focus on a single audio stream at a time, e.g. listening to music or a podcast, but not
+ * both at the same time. In some cases, multiple audio streams can be playing at the same time,
+ * but there is only one the user would really listen to (focus on), while the other plays in
+ * the background. An example of this is driving directions being spoken while music plays at
+ * a reduced volume (a.k.a. ducking).
+ * <p>When an application requests audio focus, it expresses its intention to “own” audio focus to
+ * play audio. Let’s review the different types of focus requests, the return value after a request,
+ * and the responses to a loss.
+ * <p class="note">Note: applications should not play anything until granted focus.</p>
+ *
+ * <h3>The different types of focus requests</h3>
+ * <p>There are four focus request types. A successful focus request with each will yield different
+ * behaviors by the system and the other application that previously held audio focus.
+ * <ul>
+ * <li>{@link AudioManager#AUDIOFOCUS_GAIN} expresses the fact that your application is now the
+ * sole source of audio that the user is listening to. The duration of the audio playback is
+ * unknown, and is possibly very long: after the user finishes interacting with your application,
+ * (s)he doesn’t expect another audio stream to resume. Examples of uses of this focus gain are
+ * for music playback, for a game or a video player.</li>
+ *
+ * <li>{@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT} is for a situation when you know your
+ * application is temporarily grabbing focus from the current owner, but the user expects playback
+ * to go back to where it was once your application no longer requires audio focus. An example is
+ * for playing an alarm, or during a VoIP call. The playback is known to be finite: the alarm will
+ * time-out or be dismissed, the VoIP call has a beginning and an end. When any of those events
+ * ends, and if the user was listening to music when it started, the user expects music to resume,
+ * but didn’t wish to listen to both at the same time.</li>
+ *
+ * <li>{@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK}: this focus request type is similar
+ * to {@code AUDIOFOCUS_GAIN_TRANSIENT} for the temporary aspect of the focus request, but it also
+ * expresses the fact during the time you own focus, you allow another application to keep playing
+ * at a reduced volume, “ducked”. Examples are when playing driving directions or notifications,
+ * it’s ok for music to keep playing, but not loud enough that it would prevent the directions to
+ * be hard to understand. A typical attenuation by the “ducked” application is a factor of 0.2f
+ * (or -14dB), that can for instance be applied with {@code MediaPlayer.setVolume(0.2f)} when
+ * using this class for playback.</li>
+ *
+ * <li>{@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE} is also for a temporary request,
+ * but also expresses that your application expects the device to not play anything else. This is
+ * typically used if you are doing audio recording or speech recognition, and don’t want for
+ * examples notifications to be played by the system during that time.</li>
+ * </ul>
+ *
+ * <p>An {@code AudioFocusRequest} instance always contains one of the four types of requests
+ * explained above. It is passed when building an {@code AudioFocusRequest} instance with its
+ * builder in the {@link Builder} constructor
+ * {@link AudioFocusRequest.Builder#AudioFocusRequest.Builder(int)}, or
+ * with {@link AudioFocusRequest.Builder#setFocusGain(int)} after copying an existing instance with
+ * {@link AudioFocusRequest.Builder#AudioFocusRequest.Builder(AudioFocusRequest)}.
+ *
+ * <h3>Qualifying your focus request</h3>
+ * <h4>Use case requiring a focus request</h4>
+ * <p>Any focus request is qualified by the {@link AudioAttributes}
+ * (see {@link Builder#setAudioAttributes(AudioAttributes)}) that describe the audio use case that
+ * will follow the request (once it's successful or granted). It is recommended to use the
+ * same {@code AudioAttributes} for the request as the attributes you are using for audio/media
+ * playback.
+ * <br>If no attributes are set, default attributes of {@link AudioAttributes#USAGE_MEDIA} are used.
+ *
+ * <h4>Delayed focus</h4>
+ * <p>Audio focus can be "locked" by the system for a number of reasons: during a phone call, when
+ * the car to which the device is connected plays an emergency message... To support these
+ * situations, the application can request to be notified when its request is fulfilled, by flagging
+ * its request as accepting delayed focus, with {@link Builder#setAcceptsDelayedFocusGain(boolean)}.
+ * <br>If focus is requested while being locked by the system,
+ * {@link AudioManager#requestAudioFocus(AudioFocusRequest)} will return
+ * {@link AudioManager#AUDIOFOCUS_REQUEST_DELAYED}. When focus isn't locked anymore, the focus
+ * listener set with {@link Builder#setOnAudioFocusChangeListener(OnAudioFocusChangeListener)}
+ * or with {@link Builder#setOnAudioFocusChangeListener(OnAudioFocusChangeListener, Handler)} will
+ * be called to notify the application it now owns audio focus.
+ *
+ * <h4>Pausing vs ducking</h4>
+ * <p>When an application requested audio focus with
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK}, the system will duck the current focus
+ * owner.
+ * <p class="note">Note: this behavior is <b>new for Android O</b>, whereas applications targeting
+ * SDK level up to API 25 had to implement the ducking themselves when they received a focus
+ * loss of {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}.
+ * <p>But ducking is not always the behavior expected by the user. A typical example is when the
+ * device plays driving directions while the user is listening to an audio book or podcast, and
+ * expects the audio playback to pause, instead of duck, as it is hard to understand a navigation
+ * prompt and spoken content at the same time. Therefore the system will not automatically duck
+ * when it detects it would be ducking spoken content: such content is detected when the
+ * {@code AudioAttributes} of the player are qualified by
+ * {@link AudioAttributes#CONTENT_TYPE_SPEECH}. Refer for instance to
+ * {@link AudioAttributes.Builder#setContentType(int)} and
+ * {@link MediaPlayer#setAudioAttributes(AudioAttributes)} if you are writing a media playback
+ * application for audio book, podcasts... Since the system will not automatically duck applications
+ * that play speech, it calls their focus listener instead to notify them of
+ * {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}, so they can pause instead. Note that
+ * this behavior is independent of the use of {@code AudioFocusRequest}, but tied to the use
+ * of {@code AudioAttributes}.
+ * <p>If your application requires pausing instead of ducking for any other reason than playing
+ * speech, you can also declare so with {@link Builder#setWillPauseWhenDucked(boolean)}, which will
+ * cause the system to call your focus listener instead of automatically ducking.
+ *
+ * <h4>Example</h4>
+ * <p>The example below covers the following steps to be found in any application that would play
+ * audio, and use audio focus. Here we play an audio book, and our application is intended to pause
+ * rather than duck when it loses focus. These steps consist in:
+ * <ul>
+ * <li>Creating {@code AudioAttributes} to be used for the playback and the focus request.</li>
+ * <li>Configuring and creating the {@code AudioFocusRequest} instance that defines the intended
+ * focus behaviors.</li>
+ * <li>Requesting audio focus and checking the return code to see if playback can happen right
+ * away, or is delayed.</li>
+ * <li>Implementing a focus change listener to respond to focus gains and losses.</li>
+ * </ul>
+ * <p>
+ * <pre class="prettyprint">
+ * // initialization of the audio attributes and focus request
+ * mAudioManager = (AudioManager) Context.getSystemService(Context.AUDIO_SERVICE);
+ * mPlaybackAttributes = new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_MEDIA)
+ * .setContentType(AudioAttributes.CONTENT_TYPE_SPEECH)
+ * .build();
+ * mFocusRequest = new AudioFocusRequest.Builder(AudioManager.AUDIOFOCUS_GAIN)
+ * .setAudioAttributes(mPlaybackAttributes)
+ * .setAcceptsDelayedFocusGain(true)
+ * .setWillPauseWhenDucked(true)
+ * .setOnAudioFocusChangeListener(this, mMyHandler)
+ * .build();
+ * mMediaPlayer = new MediaPlayer();
+ * mMediaPlayer.setAudioAttributes(mPlaybackAttributes);
+ * final Object mFocusLock = new Object();
+ *
+ * boolean mPlaybackDelayed = false;
+ *
+ * // requesting audio focus
+ * int res = mAudioManager.requestAudioFocus(mFocusRequest);
+ * synchronized (mFocusLock) {
+ * if (res == AudioManager.AUDIOFOCUS_REQUEST_FAILED) {
+ * mPlaybackDelayed = false;
+ * } else if (res == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
+ * mPlaybackDelayed = false;
+ * playbackNow();
+ * } else if (res == AudioManager.AUDIOFOCUS_REQUEST_DELAYED) {
+ * mPlaybackDelayed = true;
+ * }
+ * }
+ *
+ * // implementation of the OnAudioFocusChangeListener
+ * &#64;Override
+ * public void onAudioFocusChange(int focusChange) {
+ * switch (focusChange) {
+ * case AudioManager.AUDIOFOCUS_GAIN:
+ * if (mPlaybackDelayed || mResumeOnFocusGain) {
+ * synchronized (mFocusLock) {
+ * mPlaybackDelayed = false;
+ * mResumeOnFocusGain = false;
+ * }
+ * playbackNow();
+ * }
+ * break;
+ * case AudioManager.AUDIOFOCUS_LOSS:
+ * synchronized (mFocusLock) {
+ * // this is not a transient loss, we shouldn't automatically resume for now
+ * mResumeOnFocusGain = false;
+ * mPlaybackDelayed = false;
+ * }
+ * pausePlayback();
+ * break;
+ * case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
+ * case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
+ * // we handle all transient losses the same way because we never duck audio books
+ * synchronized (mFocusLock) {
+ * // we should only resume if playback was interrupted
+ * mResumeOnFocusGain = mMediaPlayer.isPlaying();
+ * mPlaybackDelayed = false;
+ * }
+ * pausePlayback();
+ * break;
+ * }
+ * }
+ *
+ * // Important:
+ * // Also set "mResumeOnFocusGain" to false when the user pauses or stops playback: this way your
+ * // application doesn't automatically restart when it gains focus, even though the user had
+ * // stopped it.
+ * </pre>
+ */
+
+public final class AudioFocusRequest {
+
+ // default attributes for the request when not specified
+ private final static AudioAttributes FOCUS_DEFAULT_ATTR = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_MEDIA).build();
+
+ private final OnAudioFocusChangeListener mFocusListener; // may be null
+ private final Handler mListenerHandler; // may be null
+ private final AudioAttributes mAttr; // never null
+ private final int mFocusGain;
+ private final int mFlags;
+
+ private AudioFocusRequest(OnAudioFocusChangeListener listener, Handler handler,
+ AudioAttributes attr, int focusGain, int flags) {
+ mFocusListener = listener;
+ mListenerHandler = handler;
+ mFocusGain = focusGain;
+ mAttr = attr;
+ mFlags = flags;
+ }
+
+ /**
+ * @hide
+ * Checks whether a focus gain constant is a valid value for an audio focus request.
+ * @param focusGain value to check
+ * @return true if focusGain is a valid value for an audio focus request.
+ */
+ final static boolean isValidFocusGain(int focusGain) {
+ switch (focusGain) {
+ case AudioManager.AUDIOFOCUS_GAIN:
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT:
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK:
+ case AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * @hide
+ * Returns the focus change listener set for this {@code AudioFocusRequest}.
+ * @return null if no {@link AudioManager.OnAudioFocusChangeListener} was set.
+ */
+ public @Nullable OnAudioFocusChangeListener getOnAudioFocusChangeListener() {
+ return mFocusListener;
+ }
+
+ /**
+ * @hide
+ * Returns the {@link Handler} to be used for the focus change listener.
+ * @return the same {@code Handler} set in.
+ * {@link Builder#setOnAudioFocusChangeListener(OnAudioFocusChangeListener, Handler)}, or null
+ * if no listener was set.
+ */
+ public @Nullable Handler getOnAudioFocusChangeListenerHandler() {
+ return mListenerHandler;
+ }
+
+ /**
+ * Returns the {@link AudioAttributes} set for this {@code AudioFocusRequest}, or the default
+ * attributes if none were set.
+ * @return non-null {@link AudioAttributes}.
+ */
+ public @NonNull AudioAttributes getAudioAttributes() {
+ return mAttr;
+ }
+
+ /**
+ * Returns the type of audio focus request configured for this {@code AudioFocusRequest}.
+ * @return one of {@link AudioManager#AUDIOFOCUS_GAIN},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK}, and
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}.
+ */
+ public int getFocusGain() {
+ return mFocusGain;
+ }
+
+ /**
+ * Returns whether the application that would use this {@code AudioFocusRequest} would pause
+ * when it is requested to duck.
+ * @return the duck/pause behavior.
+ */
+ public boolean willPauseWhenDucked() {
+ return (mFlags & AudioManager.AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS)
+ == AudioManager.AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS;
+ }
+
+ /**
+ * Returns whether the application that would use this {@code AudioFocusRequest} supports
+ * a focus gain granted after a temporary request failure.
+ * @return whether delayed focus gain is supported.
+ */
+ public boolean acceptsDelayedFocusGain() {
+ return (mFlags & AudioManager.AUDIOFOCUS_FLAG_DELAY_OK)
+ == AudioManager.AUDIOFOCUS_FLAG_DELAY_OK;
+ }
+
+ /**
+ * @hide
+ * Returns whether audio focus will be locked (i.e. focus cannot change) as a result of this
+ * focus request being successful.
+ * @return whether this request will lock focus.
+ */
+ @SystemApi
+ public boolean locksFocus() {
+ return (mFlags & AudioManager.AUDIOFOCUS_FLAG_LOCK)
+ == AudioManager.AUDIOFOCUS_FLAG_LOCK;
+ }
+
+ int getFlags() {
+ return mFlags;
+ }
+
+ /**
+ * Builder class for {@link AudioFocusRequest} objects.
+ * <p>See {@link AudioFocusRequest} for an example of building an instance with this builder.
+ * <br>The default values for the instance to be built are:
+ * <table>
+ * <tr><td>focus listener and handler</td><td>none</td></tr>
+ * <tr><td>{@code AudioAttributes}</td><td>attributes with usage set to
+ * {@link AudioAttributes#USAGE_MEDIA}</td></tr>
+ * <tr><td>pauses on duck</td><td>false</td></tr>
+ * <tr><td>supports delayed focus grant</td><td>false</td></tr>
+ * </table>
+ */
+ public static final class Builder {
+ private OnAudioFocusChangeListener mFocusListener;
+ private Handler mListenerHandler;
+ private AudioAttributes mAttr = FOCUS_DEFAULT_ATTR;
+ private int mFocusGain;
+ private boolean mPausesOnDuck = false;
+ private boolean mDelayedFocus = false;
+ private boolean mFocusLocked = false;
+
+ /**
+ * Constructs a new {@code Builder}, and specifies how audio focus
+ * will be requested. Valid values for focus requests are
+ * {@link AudioManager#AUDIOFOCUS_GAIN}, {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT},
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK}, and
+ * {@link AudioManager#AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}.
+ * <p>By default there is no focus change listener, delayed focus is not supported, ducking
+ * is suitable for the application, and the <code>AudioAttributes</code>
+ * have a usage of {@link AudioAttributes#USAGE_MEDIA}.
+ * @param focusGain the type of audio focus gain that will be requested
+ * @throws IllegalArgumentException thrown when an invalid focus gain type is used
+ */
+ public Builder(int focusGain) {
+ setFocusGain(focusGain);
+ }
+
+ /**
+ * Constructs a new {@code Builder} with all the properties of the {@code AudioFocusRequest}
+ * passed as parameter.
+ * Use this method when you want a new request to differ only by some properties.
+ * @param requestToCopy the non-null {@code AudioFocusRequest} to build a duplicate from.
+ * @throws IllegalArgumentException thrown when a null {@code AudioFocusRequest} is used.
+ */
+ public Builder(@NonNull AudioFocusRequest requestToCopy) {
+ if (requestToCopy == null) {
+ throw new IllegalArgumentException("Illegal null AudioFocusRequest");
+ }
+ mAttr = requestToCopy.mAttr;
+ mFocusListener = requestToCopy.mFocusListener;
+ mListenerHandler = requestToCopy.mListenerHandler;
+ mFocusGain = requestToCopy.mFocusGain;
+ mPausesOnDuck = requestToCopy.willPauseWhenDucked();
+ mDelayedFocus = requestToCopy.acceptsDelayedFocusGain();
+ }
+
+ /**
+ * Sets the type of focus gain that will be requested.
+ * Use this method to replace the focus gain when building a request by modifying an
+ * existing {@code AudioFocusRequest} instance.
+ * @param focusGain the type of audio focus gain that will be requested.
+ * @return this {@code Builder} instance
+ * @throws IllegalArgumentException thrown when an invalid focus gain type is used
+ */
+ public @NonNull Builder setFocusGain(int focusGain) {
+ if (!isValidFocusGain(focusGain)) {
+ throw new IllegalArgumentException("Illegal audio focus gain type " + focusGain);
+ }
+ mFocusGain = focusGain;
+ return this;
+ }
+
+ /**
+ * Sets the listener called when audio focus changes after being requested with
+ * {@link AudioManager#requestAudioFocus(AudioFocusRequest)}, and until being abandoned
+ * with {@link AudioManager#abandonAudioFocusRequest(AudioFocusRequest)}.
+ * Note that only focus changes (gains and losses) affecting the focus owner are reported,
+ * not gains and losses of other focus requesters in the system.<br>
+ * Notifications are delivered on the main {@link Looper}.
+ * @param listener the listener receiving the focus change notifications.
+ * @return this {@code Builder} instance.
+ * @throws NullPointerException thrown when a null focus listener is used.
+ */
+ public @NonNull Builder setOnAudioFocusChangeListener(
+ @NonNull OnAudioFocusChangeListener listener) {
+ if (listener == null) {
+ throw new NullPointerException("Illegal null focus listener");
+ }
+ mFocusListener = listener;
+ mListenerHandler = null;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Internal listener setter, no null checks on listener nor handler
+ * @param listener
+ * @param handler
+ * @return this {@code Builder} instance.
+ */
+ @NonNull Builder setOnAudioFocusChangeListenerInt(
+ OnAudioFocusChangeListener listener, Handler handler) {
+ mFocusListener = listener;
+ mListenerHandler = handler;
+ return this;
+ }
+
+ /**
+ * Sets the listener called when audio focus changes after being requested with
+ * {@link AudioManager#requestAudioFocus(AudioFocusRequest)}, and until being abandoned
+ * with {@link AudioManager#abandonAudioFocusRequest(AudioFocusRequest)}.
+ * Note that only focus changes (gains and losses) affecting the focus owner are reported,
+ * not gains and losses of other focus requesters in the system.
+ * @param listener the listener receiving the focus change notifications.
+ * @param handler the {@link Handler} for the thread on which to execute
+ * the notifications.
+ * @return this {@code Builder} instance.
+ * @throws NullPointerException thrown when a null focus listener or handler is used.
+ */
+ public @NonNull Builder setOnAudioFocusChangeListener(
+ @NonNull OnAudioFocusChangeListener listener, @NonNull Handler handler) {
+ if (listener == null || handler == null) {
+ throw new NullPointerException("Illegal null focus listener or handler");
+ }
+ mFocusListener = listener;
+ mListenerHandler = handler;
+ return this;
+ }
+
+ /**
+ * Sets the {@link AudioAttributes} to be associated with the focus request, and which
+ * describe the use case for which focus is requested.
+ * As the focus requests typically precede audio playback, this information is used on
+ * certain platforms to declare the subsequent playback use case. It is therefore good
+ * practice to use in this method the same {@code AudioAttributes} as used for
+ * playback, see for example {@link MediaPlayer#setAudioAttributes(AudioAttributes)} in
+ * {@code MediaPlayer} or {@link AudioTrack.Builder#setAudioAttributes(AudioAttributes)}
+ * in {@code AudioTrack}.
+ * @param attributes the {@link AudioAttributes} for the focus request.
+ * @return this {@code Builder} instance.
+ * @throws NullPointerException thrown when using null for the attributes.
+ */
+ public @NonNull Builder setAudioAttributes(@NonNull AudioAttributes attributes) {
+ if (attributes == null) {
+ throw new NullPointerException("Illegal null AudioAttributes");
+ }
+ mAttr = attributes;
+ return this;
+ }
+
+ /**
+ * Declare the intended behavior of the application with regards to audio ducking.
+ * See more details in the {@link AudioFocusRequest} class documentation.
+ * @param pauseOnDuck use {@code true} if the application intends to pause audio playback
+ * when losing focus with {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}.
+ * If {@code true}, note that you must also set a focus listener to receive such an
+ * event, with
+ * {@link #setOnAudioFocusChangeListener(OnAudioFocusChangeListener, Handler)}.
+ * @return this {@code Builder} instance.
+ */
+ public @NonNull Builder setWillPauseWhenDucked(boolean pauseOnDuck) {
+ mPausesOnDuck = pauseOnDuck;
+ return this;
+ }
+
+ /**
+ * Marks this focus request as compatible with delayed focus.
+ * See more details about delayed focus in the {@link AudioFocusRequest} class
+ * documentation.
+ * @param acceptsDelayedFocusGain use {@code true} if the application supports delayed
+ * focus. If {@code true}, note that you must also set a focus listener to be notified
+ * of delayed focus gain, with
+ * {@link #setOnAudioFocusChangeListener(OnAudioFocusChangeListener, Handler)}.
+ * @return this {@code Builder} instance
+ */
+ public @NonNull Builder setAcceptsDelayedFocusGain(boolean acceptsDelayedFocusGain) {
+ mDelayedFocus = acceptsDelayedFocusGain;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Marks this focus request as locking audio focus so granting is temporarily disabled.
+ * This feature can only be used by owners of a registered
+ * {@link android.media.audiopolicy.AudioPolicy} in
+ * {@link AudioManager#requestAudioFocus(AudioFocusRequest, android.media.audiopolicy.AudioPolicy)}.
+ * Setting to false is the same as the default behavior.
+ * @param focusLocked true when locking focus
+ * @return this {@code Builder} instance
+ */
+ @SystemApi
+ public @NonNull Builder setLocksFocus(boolean focusLocked) {
+ mFocusLocked = focusLocked;
+ return this;
+ }
+
+ /**
+ * Builds a new {@code AudioFocusRequest} instance combining all the information gathered
+ * by this {@code Builder}'s configuration methods.
+ * @return the {@code AudioFocusRequest} instance qualified by all the properties set
+ * on this {@code Builder}.
+ * @throws IllegalStateException thrown when attempting to build a focus request that is set
+ * to accept delayed focus, or to pause on duck, but no focus change listener was set.
+ */
+ public AudioFocusRequest build() {
+ if ((mDelayedFocus || mPausesOnDuck) && (mFocusListener == null)) {
+ throw new IllegalStateException(
+ "Can't use delayed focus or pause on duck without a listener");
+ }
+ final int flags = 0
+ | (mDelayedFocus ? AudioManager.AUDIOFOCUS_FLAG_DELAY_OK : 0)
+ | (mPausesOnDuck ? AudioManager.AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS : 0)
+ | (mFocusLocked ? AudioManager.AUDIOFOCUS_FLAG_LOCK : 0);
+ return new AudioFocusRequest(mFocusListener, mListenerHandler,
+ mAttr, mFocusGain, flags);
+ }
+ }
+}
diff --git a/android/media/AudioFormat.java b/android/media/AudioFormat.java
new file mode 100644
index 00000000..93fc3da5
--- /dev/null
+++ b/android/media/AudioFormat.java
@@ -0,0 +1,1033 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.Arrays;
+import java.util.Objects;
+
+/**
+ * The {@link AudioFormat} class is used to access a number of audio format and
+ * channel configuration constants. They are for instance used
+ * in {@link AudioTrack} and {@link AudioRecord}, as valid values in individual parameters of
+ * constructors like {@link AudioTrack#AudioTrack(int, int, int, int, int, int)}, where the fourth
+ * parameter is one of the <code>AudioFormat.ENCODING_*</code> constants.
+ * The <code>AudioFormat</code> constants are also used in {@link MediaFormat} to specify
+ * audio related values commonly used in media, such as for {@link MediaFormat#KEY_CHANNEL_MASK}.
+ * <p>The {@link AudioFormat.Builder} class can be used to create instances of
+ * the <code>AudioFormat</code> format class.
+ * Refer to
+ * {@link AudioFormat.Builder} for documentation on the mechanics of the configuration and building
+ * of such instances. Here we describe the main concepts that the <code>AudioFormat</code> class
+ * allow you to convey in each instance, they are:
+ * <ol>
+ * <li><a href="#sampleRate">sample rate</a>
+ * <li><a href="#encoding">encoding</a>
+ * <li><a href="#channelMask">channel masks</a>
+ * </ol>
+ * <p>Closely associated with the <code>AudioFormat</code> is the notion of an
+ * <a href="#audioFrame">audio frame</a>, which is used throughout the documentation
+ * to represent the minimum size complete unit of audio data.
+ *
+ * <h4 id="sampleRate">Sample rate</h4>
+ * <p>Expressed in Hz, the sample rate in an <code>AudioFormat</code> instance expresses the number
+ * of audio samples for each channel per second in the content you are playing or recording. It is
+ * not the sample rate
+ * at which content is rendered or produced. For instance a sound at a media sample rate of 8000Hz
+ * can be played on a device operating at a sample rate of 48000Hz; the sample rate conversion is
+ * automatically handled by the platform, it will not play at 6x speed.
+ *
+ * <p>As of API {@link android.os.Build.VERSION_CODES#M},
+ * sample rates up to 192kHz are supported
+ * for <code>AudioRecord</code> and <code>AudioTrack</code>, with sample rate conversion
+ * performed as needed.
+ * To improve efficiency and avoid lossy conversions, it is recommended to match the sample rate
+ * for <code>AudioRecord</code> and <code>AudioTrack</code> to the endpoint device
+ * sample rate, and limit the sample rate to no more than 48kHz unless there are special
+ * device capabilities that warrant a higher rate.
+ *
+ * <h4 id="encoding">Encoding</h4>
+ * <p>Audio encoding is used to describe the bit representation of audio data, which can be
+ * either linear PCM or compressed audio, such as AC3 or DTS.
+ * <p>For linear PCM, the audio encoding describes the sample size, 8 bits, 16 bits, or 32 bits,
+ * and the sample representation, integer or float.
+ * <ul>
+ * <li> {@link #ENCODING_PCM_8BIT}: The audio sample is a 8 bit unsigned integer in the
+ * range [0, 255], with a 128 offset for zero. This is typically stored as a Java byte in a
+ * byte array or ByteBuffer. Since the Java byte is <em>signed</em>,
+ * be careful with math operations and conversions as the most significant bit is inverted.
+ * </li>
+ * <li> {@link #ENCODING_PCM_16BIT}: The audio sample is a 16 bit signed integer
+ * typically stored as a Java short in a short array, but when the short
+ * is stored in a ByteBuffer, it is native endian (as compared to the default Java big endian).
+ * The short has full range from [-32768, 32767],
+ * and is sometimes interpreted as fixed point Q.15 data.
+ * </li>
+ * <li> {@link #ENCODING_PCM_FLOAT}: Introduced in
+ * API {@link android.os.Build.VERSION_CODES#LOLLIPOP}, this encoding specifies that
+ * the audio sample is a 32 bit IEEE single precision float. The sample can be
+ * manipulated as a Java float in a float array, though within a ByteBuffer
+ * it is stored in native endian byte order.
+ * The nominal range of <code>ENCODING_PCM_FLOAT</code> audio data is [-1.0, 1.0].
+ * It is implementation dependent whether the positive maximum of 1.0 is included
+ * in the interval. Values outside of the nominal range are clamped before
+ * sending to the endpoint device. Beware that
+ * the handling of NaN is undefined; subnormals may be treated as zero; and
+ * infinities are generally clamped just like other values for <code>AudioTrack</code>
+ * &ndash; try to avoid infinities because they can easily generate a NaN.
+ * <br>
+ * To achieve higher audio bit depth than a signed 16 bit integer short,
+ * it is recommended to use <code>ENCODING_PCM_FLOAT</code> for audio capture, processing,
+ * and playback.
+ * Floats are efficiently manipulated by modern CPUs,
+ * have greater precision than 24 bit signed integers,
+ * and have greater dynamic range than 32 bit signed integers.
+ * <code>AudioRecord</code> as of API {@link android.os.Build.VERSION_CODES#M} and
+ * <code>AudioTrack</code> as of API {@link android.os.Build.VERSION_CODES#LOLLIPOP}
+ * support <code>ENCODING_PCM_FLOAT</code>.
+ * </li>
+ * </ul>
+ * <p>For compressed audio, the encoding specifies the method of compression,
+ * for example {@link #ENCODING_AC3} and {@link #ENCODING_DTS}. The compressed
+ * audio data is typically stored as bytes in
+ * a byte array or ByteBuffer. When a compressed audio encoding is specified
+ * for an <code>AudioTrack</code>, it creates a direct (non-mixed) track
+ * for output to an endpoint (such as HDMI) capable of decoding the compressed audio.
+ * For (most) other endpoints, which are not capable of decoding such compressed audio,
+ * you will need to decode the data first, typically by creating a {@link MediaCodec}.
+ * Alternatively, one may use {@link MediaPlayer} for playback of compressed
+ * audio files or streams.
+ * <p>When compressed audio is sent out through a direct <code>AudioTrack</code>,
+ * it need not be written in exact multiples of the audio access unit;
+ * this differs from <code>MediaCodec</code> input buffers.
+ *
+ * <h4 id="channelMask">Channel mask</h4>
+ * <p>Channel masks are used in <code>AudioTrack</code> and <code>AudioRecord</code> to describe
+ * the samples and their arrangement in the audio frame. They are also used in the endpoint (e.g.
+ * a USB audio interface, a DAC connected to headphones) to specify allowable configurations of a
+ * particular device.
+ * <br>As of API {@link android.os.Build.VERSION_CODES#M}, there are two types of channel masks:
+ * channel position masks and channel index masks.
+ *
+ * <h5 id="channelPositionMask">Channel position masks</h5>
+ * Channel position masks are the original Android channel masks, and are used since API
+ * {@link android.os.Build.VERSION_CODES#BASE}.
+ * For input and output, they imply a positional nature - the location of a speaker or a microphone
+ * for recording or playback.
+ * <br>For a channel position mask, each allowed channel position corresponds to a bit in the
+ * channel mask. If that channel position is present in the audio frame, that bit is set,
+ * otherwise it is zero. The order of the bits (from lsb to msb) corresponds to the order of that
+ * position's sample in the audio frame.
+ * <br>The canonical channel position masks by channel count are as follows:
+ * <br><table>
+ * <tr><td>channel count</td><td>channel position mask</td></tr>
+ * <tr><td>1</td><td>{@link #CHANNEL_OUT_MONO}</td></tr>
+ * <tr><td>2</td><td>{@link #CHANNEL_OUT_STEREO}</td></tr>
+ * <tr><td>3</td><td>{@link #CHANNEL_OUT_STEREO} | {@link #CHANNEL_OUT_FRONT_CENTER}</td></tr>
+ * <tr><td>4</td><td>{@link #CHANNEL_OUT_QUAD}</td></tr>
+ * <tr><td>5</td><td>{@link #CHANNEL_OUT_QUAD} | {@link #CHANNEL_OUT_FRONT_CENTER}</td></tr>
+ * <tr><td>6</td><td>{@link #CHANNEL_OUT_5POINT1}</td></tr>
+ * <tr><td>7</td><td>{@link #CHANNEL_OUT_5POINT1} | {@link #CHANNEL_OUT_BACK_CENTER}</td></tr>
+ * <tr><td>8</td><td>{@link #CHANNEL_OUT_7POINT1_SURROUND}</td></tr>
+ * </table>
+ * <br>These masks are an ORed composite of individual channel masks. For example
+ * {@link #CHANNEL_OUT_STEREO} is composed of {@link #CHANNEL_OUT_FRONT_LEFT} and
+ * {@link #CHANNEL_OUT_FRONT_RIGHT}.
+ *
+ * <h5 id="channelIndexMask">Channel index masks</h5>
+ * Channel index masks are introduced in API {@link android.os.Build.VERSION_CODES#M}. They allow
+ * the selection of a particular channel from the source or sink endpoint by number, i.e. the first
+ * channel, the second channel, and so forth. This avoids problems with artificially assigning
+ * positions to channels of an endpoint, or figuring what the i<sup>th</sup> position bit is within
+ * an endpoint's channel position mask etc.
+ * <br>Here's an example where channel index masks address this confusion: dealing with a 4 channel
+ * USB device. Using a position mask, and based on the channel count, this would be a
+ * {@link #CHANNEL_OUT_QUAD} device, but really one is only interested in channel 0
+ * through channel 3. The USB device would then have the following individual bit channel masks:
+ * {@link #CHANNEL_OUT_FRONT_LEFT},
+ * {@link #CHANNEL_OUT_FRONT_RIGHT}, {@link #CHANNEL_OUT_BACK_LEFT}
+ * and {@link #CHANNEL_OUT_BACK_RIGHT}. But which is channel 0 and which is
+ * channel 3?
+ * <br>For a channel index mask, each channel number is represented as a bit in the mask, from the
+ * lsb (channel 0) upwards to the msb, numerically this bit value is
+ * <code>1 << channelNumber</code>.
+ * A set bit indicates that channel is present in the audio frame, otherwise it is cleared.
+ * The order of the bits also correspond to that channel number's sample order in the audio frame.
+ * <br>For the previous 4 channel USB device example, the device would have a channel index mask
+ * <code>0xF</code>. Suppose we wanted to select only the first and the third channels; this would
+ * correspond to a channel index mask <code>0x5</code> (the first and third bits set). If an
+ * <code>AudioTrack</code> uses this channel index mask, the audio frame would consist of two
+ * samples, the first sample of each frame routed to channel 0, and the second sample of each frame
+ * routed to channel 2.
+ * The canonical channel index masks by channel count are given by the formula
+ * <code>(1 << channelCount) - 1</code>.
+ *
+ * <h5>Use cases</h5>
+ * <ul>
+ * <li><i>Channel position mask for an endpoint:</i> <code>CHANNEL_OUT_FRONT_LEFT</code>,
+ * <code>CHANNEL_OUT_FRONT_CENTER</code>, etc. for HDMI home theater purposes.
+ * <li><i>Channel position mask for an audio stream:</i> Creating an <code>AudioTrack</code>
+ * to output movie content, where 5.1 multichannel output is to be written.
+ * <li><i>Channel index mask for an endpoint:</i> USB devices for which input and output do not
+ * correspond to left or right speaker or microphone.
+ * <li><i>Channel index mask for an audio stream:</i> An <code>AudioRecord</code> may only want the
+ * third and fourth audio channels of the endpoint (i.e. the second channel pair), and not care the
+ * about position it corresponds to, in which case the channel index mask is <code>0xC</code>.
+ * Multichannel <code>AudioRecord</code> sessions should use channel index masks.
+ * </ul>
+ * <h4 id="audioFrame">Audio Frame</h4>
+ * <p>For linear PCM, an audio frame consists of a set of samples captured at the same time,
+ * whose count and
+ * channel association are given by the <a href="#channelMask">channel mask</a>,
+ * and whose sample contents are specified by the <a href="#encoding">encoding</a>.
+ * For example, a stereo 16 bit PCM frame consists of
+ * two 16 bit linear PCM samples, with a frame size of 4 bytes.
+ * For compressed audio, an audio frame may alternately
+ * refer to an access unit of compressed data bytes that is logically grouped together for
+ * decoding and bitstream access (e.g. {@link MediaCodec}),
+ * or a single byte of compressed data (e.g. {@link AudioTrack#getBufferSizeInFrames()
+ * AudioTrack.getBufferSizeInFrames()}),
+ * or the linear PCM frame result from decoding the compressed data
+ * (e.g.{@link AudioTrack#getPlaybackHeadPosition()
+ * AudioTrack.getPlaybackHeadPosition()}),
+ * depending on the context where audio frame is used.
+ */
+public final class AudioFormat implements Parcelable {
+
+ //---------------------------------------------------------
+ // Constants
+ //--------------------
+ /** Invalid audio data format */
+ public static final int ENCODING_INVALID = 0;
+ /** Default audio data format */
+ public static final int ENCODING_DEFAULT = 1;
+
+ // These values must be kept in sync with core/jni/android_media_AudioFormat.h
+ // Also sync av/services/audiopolicy/managerdefault/ConfigParsingUtils.h
+ /** Audio data format: PCM 16 bit per sample. Guaranteed to be supported by devices. */
+ public static final int ENCODING_PCM_16BIT = 2;
+ /** Audio data format: PCM 8 bit per sample. Not guaranteed to be supported by devices. */
+ public static final int ENCODING_PCM_8BIT = 3;
+ /** Audio data format: single-precision floating-point per sample */
+ public static final int ENCODING_PCM_FLOAT = 4;
+ /** Audio data format: AC-3 compressed */
+ public static final int ENCODING_AC3 = 5;
+ /** Audio data format: E-AC-3 compressed */
+ public static final int ENCODING_E_AC3 = 6;
+ /** Audio data format: DTS compressed */
+ public static final int ENCODING_DTS = 7;
+ /** Audio data format: DTS HD compressed */
+ public static final int ENCODING_DTS_HD = 8;
+ /** Audio data format: MP3 compressed
+ * @hide
+ * */
+ public static final int ENCODING_MP3 = 9;
+ /** Audio data format: AAC LC compressed
+ * @hide
+ * */
+ public static final int ENCODING_AAC_LC = 10;
+ /** Audio data format: AAC HE V1 compressed
+ * @hide
+ * */
+ public static final int ENCODING_AAC_HE_V1 = 11;
+ /** Audio data format: AAC HE V2 compressed
+ * @hide
+ * */
+ public static final int ENCODING_AAC_HE_V2 = 12;
+ /** Audio data format: compressed audio wrapped in PCM for HDMI
+ * or S/PDIF passthrough.
+ * IEC61937 uses a stereo stream of 16-bit samples as the wrapper.
+ * So the channel mask for the track must be {@link #CHANNEL_OUT_STEREO}.
+ * Data should be written to the stream in a short[] array.
+ * If the data is written in a byte[] array then there may be endian problems
+ * on some platforms when converting to short internally.
+ */
+ public static final int ENCODING_IEC61937 = 13;
+ /** Audio data format: DOLBY TRUEHD compressed
+ **/
+ public static final int ENCODING_DOLBY_TRUEHD = 14;
+
+ /** @hide */
+ public static String toLogFriendlyEncoding(int enc) {
+ switch(enc) {
+ case ENCODING_INVALID:
+ return "ENCODING_INVALID";
+ case ENCODING_PCM_16BIT:
+ return "ENCODING_PCM_16BIT";
+ case ENCODING_PCM_8BIT:
+ return "ENCODING_PCM_8BIT";
+ case ENCODING_PCM_FLOAT:
+ return "ENCODING_PCM_FLOAT";
+ case ENCODING_AC3:
+ return "ENCODING_AC3";
+ case ENCODING_E_AC3:
+ return "ENCODING_E_AC3";
+ case ENCODING_DTS:
+ return "ENCODING_DTS";
+ case ENCODING_DTS_HD:
+ return "ENCODING_DTS_HD";
+ case ENCODING_MP3:
+ return "ENCODING_MP3";
+ case ENCODING_AAC_LC:
+ return "ENCODING_AAC_LC";
+ case ENCODING_AAC_HE_V1:
+ return "ENCODING_AAC_HE_V1";
+ case ENCODING_AAC_HE_V2:
+ return "ENCODING_AAC_HE_V2";
+ case ENCODING_IEC61937:
+ return "ENCODING_IEC61937";
+ case ENCODING_DOLBY_TRUEHD:
+ return "ENCODING_DOLBY_TRUEHD";
+ default :
+ return "invalid encoding " + enc;
+ }
+ }
+
+ /** Invalid audio channel configuration */
+ /** @deprecated Use {@link #CHANNEL_INVALID} instead. */
+ @Deprecated public static final int CHANNEL_CONFIGURATION_INVALID = 0;
+ /** Default audio channel configuration */
+ /** @deprecated Use {@link #CHANNEL_OUT_DEFAULT} or {@link #CHANNEL_IN_DEFAULT} instead. */
+ @Deprecated public static final int CHANNEL_CONFIGURATION_DEFAULT = 1;
+ /** Mono audio configuration */
+ /** @deprecated Use {@link #CHANNEL_OUT_MONO} or {@link #CHANNEL_IN_MONO} instead. */
+ @Deprecated public static final int CHANNEL_CONFIGURATION_MONO = 2;
+ /** Stereo (2 channel) audio configuration */
+ /** @deprecated Use {@link #CHANNEL_OUT_STEREO} or {@link #CHANNEL_IN_STEREO} instead. */
+ @Deprecated public static final int CHANNEL_CONFIGURATION_STEREO = 3;
+
+ /** Invalid audio channel mask */
+ public static final int CHANNEL_INVALID = 0;
+ /** Default audio channel mask */
+ public static final int CHANNEL_OUT_DEFAULT = 1;
+
+ // Output channel mask definitions below are translated to the native values defined in
+ // in /system/media/audio/include/system/audio.h in the JNI code of AudioTrack
+ public static final int CHANNEL_OUT_FRONT_LEFT = 0x4;
+ public static final int CHANNEL_OUT_FRONT_RIGHT = 0x8;
+ public static final int CHANNEL_OUT_FRONT_CENTER = 0x10;
+ public static final int CHANNEL_OUT_LOW_FREQUENCY = 0x20;
+ public static final int CHANNEL_OUT_BACK_LEFT = 0x40;
+ public static final int CHANNEL_OUT_BACK_RIGHT = 0x80;
+ public static final int CHANNEL_OUT_FRONT_LEFT_OF_CENTER = 0x100;
+ public static final int CHANNEL_OUT_FRONT_RIGHT_OF_CENTER = 0x200;
+ public static final int CHANNEL_OUT_BACK_CENTER = 0x400;
+ public static final int CHANNEL_OUT_SIDE_LEFT = 0x800;
+ public static final int CHANNEL_OUT_SIDE_RIGHT = 0x1000;
+ /** @hide */
+ public static final int CHANNEL_OUT_TOP_CENTER = 0x2000;
+ /** @hide */
+ public static final int CHANNEL_OUT_TOP_FRONT_LEFT = 0x4000;
+ /** @hide */
+ public static final int CHANNEL_OUT_TOP_FRONT_CENTER = 0x8000;
+ /** @hide */
+ public static final int CHANNEL_OUT_TOP_FRONT_RIGHT = 0x10000;
+ /** @hide */
+ public static final int CHANNEL_OUT_TOP_BACK_LEFT = 0x20000;
+ /** @hide */
+ public static final int CHANNEL_OUT_TOP_BACK_CENTER = 0x40000;
+ /** @hide */
+ public static final int CHANNEL_OUT_TOP_BACK_RIGHT = 0x80000;
+
+ public static final int CHANNEL_OUT_MONO = CHANNEL_OUT_FRONT_LEFT;
+ public static final int CHANNEL_OUT_STEREO = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT);
+ // aka QUAD_BACK
+ public static final int CHANNEL_OUT_QUAD = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT);
+ /** @hide */
+ public static final int CHANNEL_OUT_QUAD_SIDE = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT);
+ public static final int CHANNEL_OUT_SURROUND = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_BACK_CENTER);
+ // aka 5POINT1_BACK
+ public static final int CHANNEL_OUT_5POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT);
+ /** @hide */
+ public static final int CHANNEL_OUT_5POINT1_SIDE = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY |
+ CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT);
+ // different from AUDIO_CHANNEL_OUT_7POINT1 used internally, and not accepted by AudioRecord.
+ /** @deprecated Not the typical 7.1 surround configuration. Use {@link #CHANNEL_OUT_7POINT1_SURROUND} instead. */
+ @Deprecated public static final int CHANNEL_OUT_7POINT1 = (CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_LOW_FREQUENCY | CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT |
+ CHANNEL_OUT_FRONT_LEFT_OF_CENTER | CHANNEL_OUT_FRONT_RIGHT_OF_CENTER);
+ // matches AUDIO_CHANNEL_OUT_7POINT1
+ public static final int CHANNEL_OUT_7POINT1_SURROUND = (
+ CHANNEL_OUT_FRONT_LEFT | CHANNEL_OUT_FRONT_CENTER | CHANNEL_OUT_FRONT_RIGHT |
+ CHANNEL_OUT_SIDE_LEFT | CHANNEL_OUT_SIDE_RIGHT |
+ CHANNEL_OUT_BACK_LEFT | CHANNEL_OUT_BACK_RIGHT |
+ CHANNEL_OUT_LOW_FREQUENCY);
+ // CHANNEL_OUT_ALL is not yet defined; if added then it should match AUDIO_CHANNEL_OUT_ALL
+
+ /** Minimum value for sample rate,
+ * assuming AudioTrack and AudioRecord share the same limitations.
+ * @hide
+ */
+ // never unhide
+ public static final int SAMPLE_RATE_HZ_MIN = 4000;
+ /** Maximum value for sample rate,
+ * assuming AudioTrack and AudioRecord share the same limitations.
+ * @hide
+ */
+ // never unhide
+ public static final int SAMPLE_RATE_HZ_MAX = 192000;
+ /** Sample rate will be a route-dependent value.
+ * For AudioTrack, it is usually the sink sample rate,
+ * and for AudioRecord it is usually the source sample rate.
+ */
+ public static final int SAMPLE_RATE_UNSPECIFIED = 0;
+
+ /**
+ * @hide
+ * Return the input channel mask corresponding to an output channel mask.
+ * This can be used for submix rerouting for the mask of the recorder to map to that of the mix.
+ * @param outMask a combination of the CHANNEL_OUT_* definitions, but not CHANNEL_OUT_DEFAULT
+ * @return a combination of CHANNEL_IN_* definitions matching an output channel mask
+ * @throws IllegalArgumentException
+ */
+ public static int inChannelMaskFromOutChannelMask(int outMask) throws IllegalArgumentException {
+ if (outMask == CHANNEL_OUT_DEFAULT) {
+ throw new IllegalArgumentException(
+ "Illegal CHANNEL_OUT_DEFAULT channel mask for input.");
+ }
+ switch (channelCountFromOutChannelMask(outMask)) {
+ case 1:
+ return CHANNEL_IN_MONO;
+ case 2:
+ return CHANNEL_IN_STEREO;
+ default:
+ throw new IllegalArgumentException("Unsupported channel configuration for input.");
+ }
+ }
+
+ /**
+ * @hide
+ * Return the number of channels from an input channel mask
+ * @param mask a combination of the CHANNEL_IN_* definitions, even CHANNEL_IN_DEFAULT
+ * @return number of channels for the mask
+ */
+ public static int channelCountFromInChannelMask(int mask) {
+ return Integer.bitCount(mask);
+ }
+ /**
+ * @hide
+ * Return the number of channels from an output channel mask
+ * @param mask a combination of the CHANNEL_OUT_* definitions, but not CHANNEL_OUT_DEFAULT
+ * @return number of channels for the mask
+ */
+ public static int channelCountFromOutChannelMask(int mask) {
+ return Integer.bitCount(mask);
+ }
+ /**
+ * @hide
+ * Return a channel mask ready to be used by native code
+ * @param mask a combination of the CHANNEL_OUT_* definitions, but not CHANNEL_OUT_DEFAULT
+ * @return a native channel mask
+ */
+ public static int convertChannelOutMaskToNativeMask(int javaMask) {
+ return (javaMask >> 2);
+ }
+
+ /**
+ * @hide
+ * Return a java output channel mask
+ * @param mask a native channel mask
+ * @return a combination of the CHANNEL_OUT_* definitions
+ */
+ public static int convertNativeChannelMaskToOutMask(int nativeMask) {
+ return (nativeMask << 2);
+ }
+
+ public static final int CHANNEL_IN_DEFAULT = 1;
+ // These directly match native
+ public static final int CHANNEL_IN_LEFT = 0x4;
+ public static final int CHANNEL_IN_RIGHT = 0x8;
+ public static final int CHANNEL_IN_FRONT = 0x10;
+ public static final int CHANNEL_IN_BACK = 0x20;
+ public static final int CHANNEL_IN_LEFT_PROCESSED = 0x40;
+ public static final int CHANNEL_IN_RIGHT_PROCESSED = 0x80;
+ public static final int CHANNEL_IN_FRONT_PROCESSED = 0x100;
+ public static final int CHANNEL_IN_BACK_PROCESSED = 0x200;
+ public static final int CHANNEL_IN_PRESSURE = 0x400;
+ public static final int CHANNEL_IN_X_AXIS = 0x800;
+ public static final int CHANNEL_IN_Y_AXIS = 0x1000;
+ public static final int CHANNEL_IN_Z_AXIS = 0x2000;
+ public static final int CHANNEL_IN_VOICE_UPLINK = 0x4000;
+ public static final int CHANNEL_IN_VOICE_DNLINK = 0x8000;
+ public static final int CHANNEL_IN_MONO = CHANNEL_IN_FRONT;
+ public static final int CHANNEL_IN_STEREO = (CHANNEL_IN_LEFT | CHANNEL_IN_RIGHT);
+ /** @hide */
+ public static final int CHANNEL_IN_FRONT_BACK = CHANNEL_IN_FRONT | CHANNEL_IN_BACK;
+ // CHANNEL_IN_ALL is not yet defined; if added then it should match AUDIO_CHANNEL_IN_ALL
+
+ /** @hide */
+ public static int getBytesPerSample(int audioFormat)
+ {
+ switch (audioFormat) {
+ case ENCODING_PCM_8BIT:
+ return 1;
+ case ENCODING_PCM_16BIT:
+ case ENCODING_IEC61937:
+ case ENCODING_DEFAULT:
+ return 2;
+ case ENCODING_PCM_FLOAT:
+ return 4;
+ case ENCODING_INVALID:
+ default:
+ throw new IllegalArgumentException("Bad audio format " + audioFormat);
+ }
+ }
+
+ /** @hide */
+ public static boolean isValidEncoding(int audioFormat)
+ {
+ switch (audioFormat) {
+ case ENCODING_PCM_8BIT:
+ case ENCODING_PCM_16BIT:
+ case ENCODING_PCM_FLOAT:
+ case ENCODING_AC3:
+ case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
+ case ENCODING_MP3:
+ case ENCODING_AAC_LC:
+ case ENCODING_AAC_HE_V1:
+ case ENCODING_AAC_HE_V2:
+ case ENCODING_IEC61937:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /** @hide */
+ public static boolean isPublicEncoding(int audioFormat)
+ {
+ switch (audioFormat) {
+ case ENCODING_PCM_8BIT:
+ case ENCODING_PCM_16BIT:
+ case ENCODING_PCM_FLOAT:
+ case ENCODING_AC3:
+ case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
+ case ENCODING_IEC61937:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /** @hide */
+ public static boolean isEncodingLinearPcm(int audioFormat)
+ {
+ switch (audioFormat) {
+ case ENCODING_PCM_8BIT:
+ case ENCODING_PCM_16BIT:
+ case ENCODING_PCM_FLOAT:
+ case ENCODING_DEFAULT:
+ return true;
+ case ENCODING_AC3:
+ case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
+ case ENCODING_MP3:
+ case ENCODING_AAC_LC:
+ case ENCODING_AAC_HE_V1:
+ case ENCODING_AAC_HE_V2:
+ case ENCODING_IEC61937: // wrapped in PCM but compressed
+ return false;
+ case ENCODING_INVALID:
+ default:
+ throw new IllegalArgumentException("Bad audio format " + audioFormat);
+ }
+ }
+
+ /** @hide */
+ public static boolean isEncodingLinearFrames(int audioFormat)
+ {
+ switch (audioFormat) {
+ case ENCODING_PCM_8BIT:
+ case ENCODING_PCM_16BIT:
+ case ENCODING_PCM_FLOAT:
+ case ENCODING_IEC61937: // same size as stereo PCM
+ case ENCODING_DEFAULT:
+ return true;
+ case ENCODING_AC3:
+ case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
+ case ENCODING_MP3:
+ case ENCODING_AAC_LC:
+ case ENCODING_AAC_HE_V1:
+ case ENCODING_AAC_HE_V2:
+ return false;
+ case ENCODING_INVALID:
+ default:
+ throw new IllegalArgumentException("Bad audio format " + audioFormat);
+ }
+ }
+ /**
+ * Returns an array of public encoding values extracted from an array of
+ * encoding values.
+ * @hide
+ */
+ public static int[] filterPublicFormats(int[] formats) {
+ if (formats == null) {
+ return null;
+ }
+ int[] myCopy = Arrays.copyOf(formats, formats.length);
+ int size = 0;
+ for (int i = 0; i < myCopy.length; i++) {
+ if (isPublicEncoding(myCopy[i])) {
+ if (size != i) {
+ myCopy[size] = myCopy[i];
+ }
+ size++;
+ }
+ }
+ return Arrays.copyOf(myCopy, size);
+ }
+
+ /** @removed */
+ public AudioFormat()
+ {
+ throw new UnsupportedOperationException("There is no valid usage of this constructor");
+ }
+
+ /**
+ * Private constructor with an ignored argument to differentiate from the removed default ctor
+ * @param ignoredArgument
+ */
+ private AudioFormat(int ignoredArgument) {
+ }
+
+ /**
+ * Constructor used by the JNI. Parameters are not checked for validity.
+ */
+ // Update sound trigger JNI in core/jni/android_hardware_SoundTrigger.cpp when modifying this
+ // constructor
+ private AudioFormat(int encoding, int sampleRate, int channelMask, int channelIndexMask) {
+ mEncoding = encoding;
+ mSampleRate = sampleRate;
+ mChannelMask = channelMask;
+ mChannelIndexMask = channelIndexMask;
+ mPropertySetMask = AUDIO_FORMAT_HAS_PROPERTY_ENCODING |
+ AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE |
+ AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK |
+ AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK;
+ }
+
+ /** @hide */
+ public final static int AUDIO_FORMAT_HAS_PROPERTY_NONE = 0x0;
+ /** @hide */
+ public final static int AUDIO_FORMAT_HAS_PROPERTY_ENCODING = 0x1 << 0;
+ /** @hide */
+ public final static int AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE = 0x1 << 1;
+ /** @hide */
+ public final static int AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK = 0x1 << 2;
+ /** @hide */
+ public final static int AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK = 0x1 << 3;
+
+ private int mEncoding;
+ private int mSampleRate;
+ private int mChannelMask;
+ private int mChannelIndexMask;
+ private int mPropertySetMask;
+
+ /**
+ * Return the encoding.
+ * See the section on <a href="#encoding">encodings</a> for more information about the different
+ * types of supported audio encoding.
+ * @return one of the values that can be set in {@link Builder#setEncoding(int)} or
+ * {@link AudioFormat#ENCODING_INVALID} if not set.
+ */
+ public int getEncoding() {
+ if ((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_ENCODING) == 0) {
+ return ENCODING_INVALID;
+ }
+ return mEncoding;
+ }
+
+ /**
+ * Return the sample rate.
+ * @return one of the values that can be set in {@link Builder#setSampleRate(int)} or
+ * {@link #SAMPLE_RATE_UNSPECIFIED} if not set.
+ */
+ public int getSampleRate() {
+ return mSampleRate;
+ }
+
+ /**
+ * Return the channel mask.
+ * See the section on <a href="#channelMask">channel masks</a> for more information about
+ * the difference between index-based masks(as returned by {@link #getChannelIndexMask()}) and
+ * the position-based mask returned by this function.
+ * @return one of the values that can be set in {@link Builder#setChannelMask(int)} or
+ * {@link AudioFormat#CHANNEL_INVALID} if not set.
+ */
+ public int getChannelMask() {
+ if ((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) == 0) {
+ return CHANNEL_INVALID;
+ }
+ return mChannelMask;
+ }
+
+ /**
+ * Return the channel index mask.
+ * See the section on <a href="#channelMask">channel masks</a> for more information about
+ * the difference between index-based masks, and position-based masks (as returned
+ * by {@link #getChannelMask()}).
+ * @return one of the values that can be set in {@link Builder#setChannelIndexMask(int)} or
+ * {@link AudioFormat#CHANNEL_INVALID} if not set or an invalid mask was used.
+ */
+ public int getChannelIndexMask() {
+ if ((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) == 0) {
+ return CHANNEL_INVALID;
+ }
+ return mChannelIndexMask;
+ }
+
+ /**
+ * Return the channel count.
+ * @return the channel count derived from the channel position mask or the channel index mask.
+ * Zero is returned if both the channel position mask and the channel index mask are not set.
+ */
+ public int getChannelCount() {
+ final int channelIndexCount = Integer.bitCount(getChannelIndexMask());
+ int channelCount = channelCountFromOutChannelMask(getChannelMask());
+ if (channelCount == 0) {
+ channelCount = channelIndexCount;
+ } else if (channelCount != channelIndexCount && channelIndexCount != 0) {
+ channelCount = 0; // position and index channel count mismatch
+ }
+ return channelCount;
+ }
+
+ /** @hide */
+ public int getPropertySetMask() {
+ return mPropertySetMask;
+ }
+
+ /** @hide */
+ public String toLogFriendlyString() {
+ return String.format("%dch %dHz %s",
+ getChannelCount(), mSampleRate, toLogFriendlyEncoding(mEncoding));
+ }
+
+ /**
+ * Builder class for {@link AudioFormat} objects.
+ * Use this class to configure and create an AudioFormat instance. By setting format
+ * characteristics such as audio encoding, channel mask or sample rate, you indicate which
+ * of those are to vary from the default behavior on this device wherever this audio format
+ * is used. See {@link AudioFormat} for a complete description of the different parameters that
+ * can be used to configure an <code>AudioFormat</code> instance.
+ * <p>{@link AudioFormat} is for instance used in
+ * {@link AudioTrack#AudioTrack(AudioAttributes, AudioFormat, int, int, int)}. In this
+ * constructor, every format characteristic set on the <code>Builder</code> (e.g. with
+ * {@link #setSampleRate(int)}) will alter the default values used by an
+ * <code>AudioTrack</code>. In this case for audio playback with <code>AudioTrack</code>, the
+ * sample rate set in the <code>Builder</code> would override the platform output sample rate
+ * which would otherwise be selected by default.
+ */
+ public static class Builder {
+ private int mEncoding = ENCODING_INVALID;
+ private int mSampleRate = SAMPLE_RATE_UNSPECIFIED;
+ private int mChannelMask = CHANNEL_INVALID;
+ private int mChannelIndexMask = 0;
+ private int mPropertySetMask = AUDIO_FORMAT_HAS_PROPERTY_NONE;
+
+ /**
+ * Constructs a new Builder with none of the format characteristics set.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Constructs a new Builder from a given {@link AudioFormat}.
+ * @param af the {@link AudioFormat} object whose data will be reused in the new Builder.
+ */
+ public Builder(AudioFormat af) {
+ mEncoding = af.mEncoding;
+ mSampleRate = af.mSampleRate;
+ mChannelMask = af.mChannelMask;
+ mChannelIndexMask = af.mChannelIndexMask;
+ mPropertySetMask = af.mPropertySetMask;
+ }
+
+ /**
+ * Combines all of the format characteristics that have been set and return a new
+ * {@link AudioFormat} object.
+ * @return a new {@link AudioFormat} object
+ */
+ public AudioFormat build() {
+ AudioFormat af = new AudioFormat(1980/*ignored*/);
+ af.mEncoding = mEncoding;
+ // not calling setSampleRate is equivalent to calling
+ // setSampleRate(SAMPLE_RATE_UNSPECIFIED)
+ af.mSampleRate = mSampleRate;
+ af.mChannelMask = mChannelMask;
+ af.mChannelIndexMask = mChannelIndexMask;
+ af.mPropertySetMask = mPropertySetMask;
+ return af;
+ }
+
+ /**
+ * Sets the data encoding format.
+ * @param encoding one of {@link AudioFormat#ENCODING_DEFAULT},
+ * {@link AudioFormat#ENCODING_PCM_8BIT},
+ * {@link AudioFormat#ENCODING_PCM_16BIT},
+ * {@link AudioFormat#ENCODING_PCM_FLOAT},
+ * {@link AudioFormat#ENCODING_AC3},
+ * {@link AudioFormat#ENCODING_E_AC3}.
+ * {@link AudioFormat#ENCODING_DTS},
+ * {@link AudioFormat#ENCODING_DTS_HD}.
+ * @return the same Builder instance.
+ * @throws java.lang.IllegalArgumentException
+ */
+ public Builder setEncoding(@Encoding int encoding) throws IllegalArgumentException {
+ switch (encoding) {
+ case ENCODING_DEFAULT:
+ mEncoding = ENCODING_PCM_16BIT;
+ break;
+ case ENCODING_PCM_8BIT:
+ case ENCODING_PCM_16BIT:
+ case ENCODING_PCM_FLOAT:
+ case ENCODING_AC3:
+ case ENCODING_E_AC3:
+ case ENCODING_DTS:
+ case ENCODING_DTS_HD:
+ case ENCODING_IEC61937:
+ mEncoding = encoding;
+ break;
+ case ENCODING_INVALID:
+ default:
+ throw new IllegalArgumentException("Invalid encoding " + encoding);
+ }
+ mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_ENCODING;
+ return this;
+ }
+
+ /**
+ * Sets the channel position mask.
+ * The channel position mask specifies the association between audio samples in a frame
+ * with named endpoint channels. The samples in the frame correspond to the
+ * named set bits in the channel position mask, in ascending bit order.
+ * See {@link #setChannelIndexMask(int)} to specify channels
+ * based on endpoint numbered channels. This <a href="#channelPositionMask>description of
+ * channel position masks</a> covers the concept in more details.
+ * @param channelMask describes the configuration of the audio channels.
+ * <p> For output, the channelMask can be an OR-ed combination of
+ * channel position masks, e.g.
+ * {@link AudioFormat#CHANNEL_OUT_FRONT_LEFT},
+ * {@link AudioFormat#CHANNEL_OUT_FRONT_RIGHT},
+ * {@link AudioFormat#CHANNEL_OUT_FRONT_CENTER},
+ * {@link AudioFormat#CHANNEL_OUT_LOW_FREQUENCY}
+ * {@link AudioFormat#CHANNEL_OUT_BACK_LEFT},
+ * {@link AudioFormat#CHANNEL_OUT_BACK_RIGHT},
+ * {@link AudioFormat#CHANNEL_OUT_BACK_CENTER},
+ * {@link AudioFormat#CHANNEL_OUT_SIDE_LEFT},
+ * {@link AudioFormat#CHANNEL_OUT_SIDE_RIGHT}.
+ * <p> For a valid {@link AudioTrack} channel position mask,
+ * the following conditions apply:
+ * <br> (1) at most eight channel positions may be used;
+ * <br> (2) right/left pairs should be matched.
+ * <p> For input or {@link AudioRecord}, the mask should be
+ * {@link AudioFormat#CHANNEL_IN_MONO} or
+ * {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is
+ * guaranteed to work on all devices.
+ * @return the same <code>Builder</code> instance.
+ * @throws IllegalArgumentException if the channel mask is invalid or
+ * if both channel index mask and channel position mask
+ * are specified but do not have the same channel count.
+ */
+ public @NonNull Builder setChannelMask(int channelMask) {
+ if (channelMask == CHANNEL_INVALID) {
+ throw new IllegalArgumentException("Invalid zero channel mask");
+ } else if (/* channelMask != 0 && */ mChannelIndexMask != 0 &&
+ Integer.bitCount(channelMask) != Integer.bitCount(mChannelIndexMask)) {
+ throw new IllegalArgumentException("Mismatched channel count for mask " +
+ Integer.toHexString(channelMask).toUpperCase());
+ }
+ mChannelMask = channelMask;
+ mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK;
+ return this;
+ }
+
+ /**
+ * Sets the channel index mask.
+ * A channel index mask specifies the association of audio samples in the frame
+ * with numbered endpoint channels. The i-th bit in the channel index
+ * mask corresponds to the i-th endpoint channel.
+ * For example, an endpoint with four channels is represented
+ * as index mask bits 0 through 3. This <a href="#channelIndexMask>description of channel
+ * index masks</a> covers the concept in more details.
+ * See {@link #setChannelMask(int)} for a positional mask interpretation.
+ * <p> Both {@link AudioTrack} and {@link AudioRecord} support
+ * a channel index mask.
+ * If a channel index mask is specified it is used,
+ * otherwise the channel position mask specified
+ * by <code>setChannelMask</code> is used.
+ * For <code>AudioTrack</code> and <code>AudioRecord</code>,
+ * a channel position mask is not required if a channel index mask is specified.
+ *
+ * @param channelIndexMask describes the configuration of the audio channels.
+ * <p> For output, the <code>channelIndexMask</code> is an OR-ed combination of
+ * bits representing the mapping of <code>AudioTrack</code> write samples
+ * to output sink channels.
+ * For example, a mask of <code>0xa</code>, or binary <code>1010</code>,
+ * means the <code>AudioTrack</code> write frame consists of two samples,
+ * which are routed to the second and the fourth channels of the output sink.
+ * Unmatched output sink channels are zero filled and unmatched
+ * <code>AudioTrack</code> write samples are dropped.
+ * <p> For input, the <code>channelIndexMask</code> is an OR-ed combination of
+ * bits representing the mapping of input source channels to
+ * <code>AudioRecord</code> read samples.
+ * For example, a mask of <code>0x5</code>, or binary
+ * <code>101</code>, will read from the first and third channel of the input
+ * source device and store them in the first and second sample of the
+ * <code>AudioRecord</code> read frame.
+ * Unmatched input source channels are dropped and
+ * unmatched <code>AudioRecord</code> read samples are zero filled.
+ * @return the same <code>Builder</code> instance.
+ * @throws IllegalArgumentException if the channel index mask is invalid or
+ * if both channel index mask and channel position mask
+ * are specified but do not have the same channel count.
+ */
+ public @NonNull Builder setChannelIndexMask(int channelIndexMask) {
+ if (channelIndexMask == 0) {
+ throw new IllegalArgumentException("Invalid zero channel index mask");
+ } else if (/* channelIndexMask != 0 && */ mChannelMask != 0 &&
+ Integer.bitCount(channelIndexMask) != Integer.bitCount(mChannelMask)) {
+ throw new IllegalArgumentException("Mismatched channel count for index mask " +
+ Integer.toHexString(channelIndexMask).toUpperCase());
+ }
+ mChannelIndexMask = channelIndexMask;
+ mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK;
+ return this;
+ }
+
+ /**
+ * Sets the sample rate.
+ * @param sampleRate the sample rate expressed in Hz
+ * @return the same Builder instance.
+ * @throws java.lang.IllegalArgumentException
+ */
+ public Builder setSampleRate(int sampleRate) throws IllegalArgumentException {
+ // TODO Consider whether to keep the MIN and MAX range checks here.
+ // It is not necessary and poses the problem of defining the limits independently from
+ // native implementation or platform capabilities.
+ if (((sampleRate < SAMPLE_RATE_HZ_MIN) || (sampleRate > SAMPLE_RATE_HZ_MAX)) &&
+ sampleRate != SAMPLE_RATE_UNSPECIFIED) {
+ throw new IllegalArgumentException("Invalid sample rate " + sampleRate);
+ }
+ mSampleRate = sampleRate;
+ mPropertySetMask |= AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE;
+ return this;
+ }
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || getClass() != o.getClass()) return false;
+
+ AudioFormat that = (AudioFormat) o;
+
+ if (mPropertySetMask != that.mPropertySetMask) return false;
+
+ // return false if any of the properties is set and the values differ
+ return !((((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0)
+ && (mEncoding != that.mEncoding))
+ || (((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_SAMPLE_RATE) != 0)
+ && (mSampleRate != that.mSampleRate))
+ || (((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0)
+ && (mChannelMask != that.mChannelMask))
+ || (((mPropertySetMask & AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0)
+ && (mChannelIndexMask != that.mChannelIndexMask)));
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mPropertySetMask, mSampleRate, mEncoding, mChannelMask,
+ mChannelIndexMask);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mPropertySetMask);
+ dest.writeInt(mEncoding);
+ dest.writeInt(mSampleRate);
+ dest.writeInt(mChannelMask);
+ dest.writeInt(mChannelIndexMask);
+ }
+
+ private AudioFormat(Parcel in) {
+ mPropertySetMask = in.readInt();
+ mEncoding = in.readInt();
+ mSampleRate = in.readInt();
+ mChannelMask = in.readInt();
+ mChannelIndexMask = in.readInt();
+ }
+
+ public static final Parcelable.Creator<AudioFormat> CREATOR =
+ new Parcelable.Creator<AudioFormat>() {
+ public AudioFormat createFromParcel(Parcel p) {
+ return new AudioFormat(p);
+ }
+ public AudioFormat[] newArray(int size) {
+ return new AudioFormat[size];
+ }
+ };
+
+ @Override
+ public String toString () {
+ return new String("AudioFormat:"
+ + " props=" + mPropertySetMask
+ + " enc=" + mEncoding
+ + " chan=0x" + Integer.toHexString(mChannelMask).toUpperCase()
+ + " chan_index=0x" + Integer.toHexString(mChannelIndexMask).toUpperCase()
+ + " rate=" + mSampleRate);
+ }
+
+ /** @hide */
+ @IntDef({
+ ENCODING_DEFAULT,
+ ENCODING_PCM_8BIT,
+ ENCODING_PCM_16BIT,
+ ENCODING_PCM_FLOAT,
+ ENCODING_AC3,
+ ENCODING_E_AC3,
+ ENCODING_DTS,
+ ENCODING_DTS_HD,
+ ENCODING_IEC61937
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Encoding {}
+
+}
diff --git a/android/media/AudioGain.java b/android/media/AudioGain.java
new file mode 100644
index 00000000..57709d52
--- /dev/null
+++ b/android/media/AudioGain.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The AudioGain describes a gain controller. Gain controllers are exposed by
+ * audio ports when the gain is configurable at this port's input or output.
+ * Gain values are expressed in millibels.
+ * A gain controller has the following attributes:
+ * - mode: defines modes of operation or features
+ * MODE_JOINT: all channel gains are controlled simultaneously
+ * MODE_CHANNELS: each channel gain is controlled individually
+ * MODE_RAMP: ramps can be applied when gain changes
+ * - channel mask: indicates for which channels the gain can be controlled
+ * - min value: minimum gain value in millibel
+ * - max value: maximum gain value in millibel
+ * - default value: gain value after reset in millibel
+ * - step value: granularity of gain control in millibel
+ * - min ramp duration: minimum ramp duration in milliseconds
+ * - max ramp duration: maximum ramp duration in milliseconds
+ *
+ * This object is always created by the framework and read only by applications.
+ * Applications get a list of AudioGainDescriptors from AudioPortDescriptor.gains() and can build a
+ * valid gain configuration from AudioGain.buildConfig()
+ * @hide
+ */
+public class AudioGain {
+
+ /**
+ * Bit of AudioGain.mode() field indicating that
+ * all channel gains are controlled simultaneously
+ */
+ public static final int MODE_JOINT = 1;
+ /**
+ * Bit of AudioGain.mode() field indicating that
+ * each channel gain is controlled individually
+ */
+ public static final int MODE_CHANNELS = 2;
+ /**
+ * Bit of AudioGain.mode() field indicating that
+ * ramps can be applied when gain changes. The type of ramp (linear, log etc...) is
+ * implementation specific.
+ */
+ public static final int MODE_RAMP = 4;
+
+ private final int mIndex;
+ private final int mMode;
+ private final int mChannelMask;
+ private final int mMinValue;
+ private final int mMaxValue;
+ private final int mDefaultValue;
+ private final int mStepValue;
+ private final int mRampDurationMinMs;
+ private final int mRampDurationMaxMs;
+
+ // The channel mask passed to the constructor is as specified in AudioFormat
+ // (e.g. AudioFormat.CHANNEL_OUT_STEREO)
+ AudioGain(int index, int mode, int channelMask,
+ int minValue, int maxValue, int defaultValue, int stepValue,
+ int rampDurationMinMs, int rampDurationMaxMs) {
+ mIndex = index;
+ mMode = mode;
+ mChannelMask = channelMask;
+ mMinValue = minValue;
+ mMaxValue = maxValue;
+ mDefaultValue = defaultValue;
+ mStepValue = stepValue;
+ mRampDurationMinMs = rampDurationMinMs;
+ mRampDurationMaxMs = rampDurationMaxMs;
+ }
+
+ /**
+ * Bit field indicating supported modes of operation
+ */
+ public int mode() {
+ return mMode;
+ }
+
+ /**
+ * Indicates for which channels the gain can be controlled
+ * (e.g. AudioFormat.CHANNEL_OUT_STEREO)
+ */
+ public int channelMask() {
+ return mChannelMask;
+ }
+
+ /**
+ * Minimum gain value in millibel
+ */
+ public int minValue() {
+ return mMinValue;
+ }
+
+ /**
+ * Maximum gain value in millibel
+ */
+ public int maxValue() {
+ return mMaxValue;
+ }
+
+ /**
+ * Default gain value in millibel
+ */
+ public int defaultValue() {
+ return mDefaultValue;
+ }
+
+ /**
+ * Granularity of gain control in millibel
+ */
+ public int stepValue() {
+ return mStepValue;
+ }
+
+ /**
+ * Minimum ramp duration in milliseconds
+ * 0 if MODE_RAMP not set
+ */
+ public int rampDurationMinMs() {
+ return mRampDurationMinMs;
+ }
+
+ /**
+ * Maximum ramp duration in milliseconds
+ * 0 if MODE_RAMP not set
+ */
+ public int rampDurationMaxMs() {
+ return mRampDurationMaxMs;
+ }
+
+ /**
+ * Build a valid gain configuration for this gain controller for use by
+ * AudioPortDescriptor.setGain()
+ * @param mode: desired mode of operation
+ * @param channelMask: channels of which the gain should be modified.
+ * @param values: gain values for each channels.
+ * @param rampDurationMs: ramp duration if mode MODE_RAMP is set.
+ * ignored if MODE_JOINT.
+ */
+ public AudioGainConfig buildConfig(int mode, int channelMask,
+ int[] values, int rampDurationMs) {
+ //TODO: check params here
+ return new AudioGainConfig(mIndex, this, mode, channelMask, values, rampDurationMs);
+ }
+}
diff --git a/android/media/AudioGainConfig.java b/android/media/AudioGainConfig.java
new file mode 100644
index 00000000..ea616799
--- /dev/null
+++ b/android/media/AudioGainConfig.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The AudioGainConfig is used by APIs setting or getting values on a given gain
+ * controller. It contains a valid configuration (value, channels...) for a gain controller
+ * exposed by an audio port.
+ * @see AudioGain
+ * @see AudioPort
+ * @hide
+ */
+public class AudioGainConfig {
+ AudioGain mGain;
+ private final int mIndex;
+ private final int mMode;
+ private final int mChannelMask;
+ private final int mValues[];
+ private final int mRampDurationMs;
+
+ AudioGainConfig(int index, AudioGain gain, int mode, int channelMask,
+ int[] values, int rampDurationMs) {
+ mIndex = index;
+ mGain = gain;
+ mMode = mode;
+ mChannelMask = channelMask;
+ mValues = values;
+ mRampDurationMs = rampDurationMs;
+ }
+
+ /**
+ * get the index of the parent gain.
+ * frameworks use only.
+ */
+ int index() {
+ return mIndex;
+ }
+
+ /**
+ * Bit field indicating requested modes of operation. See {@link AudioGain#MODE_JOINT},
+ * {@link AudioGain#MODE_CHANNELS}, {@link AudioGain#MODE_RAMP}
+ */
+ public int mode() {
+ return mMode;
+ }
+
+ /**
+ * Indicates for which channels the gain is set.
+ * See {@link AudioFormat#CHANNEL_OUT_STEREO}, {@link AudioFormat#CHANNEL_OUT_MONO} ...
+ */
+ public int channelMask() {
+ return mChannelMask;
+ }
+
+ /**
+ * Gain values for each channel in the order of bits set in
+ * channelMask() from LSB to MSB
+ */
+ public int[] values() {
+ return mValues;
+ }
+
+ /**
+ * Ramp duration in milliseconds. N/A if mode() does not
+ * specify MODE_RAMP.
+ */
+ public int rampDurationMs() {
+ return mRampDurationMs;
+ }
+}
diff --git a/android/media/AudioHandle.java b/android/media/AudioHandle.java
new file mode 100644
index 00000000..6493dac1
--- /dev/null
+++ b/android/media/AudioHandle.java
@@ -0,0 +1,54 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The AudioHandle is used by the audio framework implementation to
+ * uniquely identify a particular component of the routing topology
+ * (AudioPort or AudioPatch)
+ * It is not visible or used at the API.
+ */
+class AudioHandle {
+ private final int mId;
+
+ AudioHandle(int id) {
+ mId = id;
+ }
+
+ int id() {
+ return mId;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || !(o instanceof AudioHandle)) {
+ return false;
+ }
+ AudioHandle ah = (AudioHandle)o;
+ return mId == ah.id();
+ }
+
+ @Override
+ public int hashCode() {
+ return mId;
+ }
+
+ @Override
+ public String toString() {
+ return Integer.toString(mId);
+ }
+}
diff --git a/android/media/AudioManager.java b/android/media/AudioManager.java
new file mode 100644
index 00000000..15754574
--- /dev/null
+++ b/android/media/AudioManager.java
@@ -0,0 +1,4535 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.RequiresPermission;
+import android.annotation.SdkConstant;
+import android.annotation.SdkConstant.SdkConstantType;
+import android.annotation.SuppressLint;
+import android.annotation.SystemApi;
+import android.annotation.SystemService;
+import android.app.NotificationManager;
+import android.app.PendingIntent;
+import android.bluetooth.BluetoothDevice;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.media.audiopolicy.AudioPolicy;
+import android.media.session.MediaController;
+import android.media.session.MediaSession;
+import android.media.session.MediaSessionLegacyHelper;
+import android.media.session.MediaSessionManager;
+import android.os.Binder;
+import android.os.Build;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.Process;
+import android.os.RemoteException;
+import android.os.SystemClock;
+import android.os.ServiceManager;
+import android.os.UserHandle;
+import android.provider.Settings;
+import android.util.ArrayMap;
+import android.util.Log;
+import android.util.Pair;
+import android.view.KeyEvent;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * AudioManager provides access to volume and ringer mode control.
+ */
+@SystemService(Context.AUDIO_SERVICE)
+public class AudioManager {
+
+ private Context mOriginalContext;
+ private Context mApplicationContext;
+ private long mVolumeKeyUpTime;
+ private final boolean mUseVolumeKeySounds;
+ private final boolean mUseFixedVolume;
+ private static final String TAG = "AudioManager";
+ private static final boolean DEBUG = false;
+ private static final AudioPortEventHandler sAudioPortEventHandler = new AudioPortEventHandler();
+
+ /**
+ * Broadcast intent, a hint for applications that audio is about to become
+ * 'noisy' due to a change in audio outputs. For example, this intent may
+ * be sent when a wired headset is unplugged, or when an A2DP audio
+ * sink is disconnected, and the audio system is about to automatically
+ * switch audio route to the speaker. Applications that are controlling
+ * audio streams may consider pausing, reducing volume or some other action
+ * on receipt of this intent so as not to surprise the user with audio
+ * from the speaker.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_AUDIO_BECOMING_NOISY = "android.media.AUDIO_BECOMING_NOISY";
+
+ /**
+ * Sticky broadcast intent action indicating that the ringer mode has
+ * changed. Includes the new ringer mode.
+ *
+ * @see #EXTRA_RINGER_MODE
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String RINGER_MODE_CHANGED_ACTION = "android.media.RINGER_MODE_CHANGED";
+
+ /**
+ * @hide
+ * Sticky broadcast intent action indicating that the internal ringer mode has
+ * changed. Includes the new ringer mode.
+ *
+ * @see #EXTRA_RINGER_MODE
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String INTERNAL_RINGER_MODE_CHANGED_ACTION =
+ "android.media.INTERNAL_RINGER_MODE_CHANGED_ACTION";
+
+ /**
+ * The new ringer mode.
+ *
+ * @see #RINGER_MODE_CHANGED_ACTION
+ * @see #RINGER_MODE_NORMAL
+ * @see #RINGER_MODE_SILENT
+ * @see #RINGER_MODE_VIBRATE
+ */
+ public static final String EXTRA_RINGER_MODE = "android.media.EXTRA_RINGER_MODE";
+
+ /**
+ * Broadcast intent action indicating that the vibrate setting has
+ * changed. Includes the vibrate type and its new setting.
+ *
+ * @see #EXTRA_VIBRATE_TYPE
+ * @see #EXTRA_VIBRATE_SETTING
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode and listen to {@link #RINGER_MODE_CHANGED_ACTION} instead.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String VIBRATE_SETTING_CHANGED_ACTION =
+ "android.media.VIBRATE_SETTING_CHANGED";
+
+ /**
+ * @hide Broadcast intent when the volume for a particular stream type changes.
+ * Includes the stream, the new volume and previous volumes.
+ * Notes:
+ * - for internal platform use only, do not make public,
+ * - never used for "remote" volume changes
+ *
+ * @see #EXTRA_VOLUME_STREAM_TYPE
+ * @see #EXTRA_VOLUME_STREAM_VALUE
+ * @see #EXTRA_PREV_VOLUME_STREAM_VALUE
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String VOLUME_CHANGED_ACTION = "android.media.VOLUME_CHANGED_ACTION";
+
+ /**
+ * @hide Broadcast intent when the devices for a particular stream type changes.
+ * Includes the stream, the new devices and previous devices.
+ * Notes:
+ * - for internal platform use only, do not make public,
+ * - never used for "remote" volume changes
+ *
+ * @see #EXTRA_VOLUME_STREAM_TYPE
+ * @see #EXTRA_VOLUME_STREAM_DEVICES
+ * @see #EXTRA_PREV_VOLUME_STREAM_DEVICES
+ * @see #getDevicesForStream
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String STREAM_DEVICES_CHANGED_ACTION =
+ "android.media.STREAM_DEVICES_CHANGED_ACTION";
+
+ /**
+ * @hide Broadcast intent when a stream mute state changes.
+ * Includes the stream that changed and the new mute state
+ *
+ * @see #EXTRA_VOLUME_STREAM_TYPE
+ * @see #EXTRA_STREAM_VOLUME_MUTED
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String STREAM_MUTE_CHANGED_ACTION =
+ "android.media.STREAM_MUTE_CHANGED_ACTION";
+
+ /**
+ * @hide Broadcast intent when the master mute state changes.
+ * Includes the the new volume
+ *
+ * @see #EXTRA_MASTER_VOLUME_MUTED
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String MASTER_MUTE_CHANGED_ACTION =
+ "android.media.MASTER_MUTE_CHANGED_ACTION";
+
+ /**
+ * The new vibrate setting for a particular type.
+ *
+ * @see #VIBRATE_SETTING_CHANGED_ACTION
+ * @see #EXTRA_VIBRATE_TYPE
+ * @see #VIBRATE_SETTING_ON
+ * @see #VIBRATE_SETTING_OFF
+ * @see #VIBRATE_SETTING_ONLY_SILENT
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode and listen to {@link #RINGER_MODE_CHANGED_ACTION} instead.
+ */
+ public static final String EXTRA_VIBRATE_SETTING = "android.media.EXTRA_VIBRATE_SETTING";
+
+ /**
+ * The vibrate type whose setting has changed.
+ *
+ * @see #VIBRATE_SETTING_CHANGED_ACTION
+ * @see #VIBRATE_TYPE_NOTIFICATION
+ * @see #VIBRATE_TYPE_RINGER
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode and listen to {@link #RINGER_MODE_CHANGED_ACTION} instead.
+ */
+ public static final String EXTRA_VIBRATE_TYPE = "android.media.EXTRA_VIBRATE_TYPE";
+
+ /**
+ * @hide The stream type for the volume changed intent.
+ */
+ public static final String EXTRA_VOLUME_STREAM_TYPE = "android.media.EXTRA_VOLUME_STREAM_TYPE";
+
+ /**
+ * @hide
+ * The stream type alias for the volume changed intent.
+ * For instance the intent may indicate a change of the {@link #STREAM_NOTIFICATION} stream
+ * type (as indicated by the {@link #EXTRA_VOLUME_STREAM_TYPE} extra), but this is also
+ * reflected by a change of the volume of its alias, {@link #STREAM_RING} on some devices,
+ * {@link #STREAM_MUSIC} on others (e.g. a television).
+ */
+ public static final String EXTRA_VOLUME_STREAM_TYPE_ALIAS =
+ "android.media.EXTRA_VOLUME_STREAM_TYPE_ALIAS";
+
+ /**
+ * @hide The volume associated with the stream for the volume changed intent.
+ */
+ public static final String EXTRA_VOLUME_STREAM_VALUE =
+ "android.media.EXTRA_VOLUME_STREAM_VALUE";
+
+ /**
+ * @hide The previous volume associated with the stream for the volume changed intent.
+ */
+ public static final String EXTRA_PREV_VOLUME_STREAM_VALUE =
+ "android.media.EXTRA_PREV_VOLUME_STREAM_VALUE";
+
+ /**
+ * @hide The devices associated with the stream for the stream devices changed intent.
+ */
+ public static final String EXTRA_VOLUME_STREAM_DEVICES =
+ "android.media.EXTRA_VOLUME_STREAM_DEVICES";
+
+ /**
+ * @hide The previous devices associated with the stream for the stream devices changed intent.
+ */
+ public static final String EXTRA_PREV_VOLUME_STREAM_DEVICES =
+ "android.media.EXTRA_PREV_VOLUME_STREAM_DEVICES";
+
+ /**
+ * @hide The new master volume mute state for the master mute changed intent.
+ * Value is boolean
+ */
+ public static final String EXTRA_MASTER_VOLUME_MUTED =
+ "android.media.EXTRA_MASTER_VOLUME_MUTED";
+
+ /**
+ * @hide The new stream volume mute state for the stream mute changed intent.
+ * Value is boolean
+ */
+ public static final String EXTRA_STREAM_VOLUME_MUTED =
+ "android.media.EXTRA_STREAM_VOLUME_MUTED";
+
+ /**
+ * Broadcast Action: Wired Headset plugged in or unplugged.
+ *
+ * You <em>cannot</em> receive this through components declared
+ * in manifests, only by explicitly registering for it with
+ * {@link Context#registerReceiver(BroadcastReceiver, IntentFilter)
+ * Context.registerReceiver()}.
+ *
+ * <p>The intent will have the following extra values:
+ * <ul>
+ * <li><em>state</em> - 0 for unplugged, 1 for plugged. </li>
+ * <li><em>name</em> - Headset type, human readable string </li>
+ * <li><em>microphone</em> - 1 if headset has a microphone, 0 otherwise </li>
+ * </ul>
+ * </ul>
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_HEADSET_PLUG =
+ "android.intent.action.HEADSET_PLUG";
+
+ /**
+ * Broadcast Action: A sticky broadcast indicating an HDMI cable was plugged or unplugged.
+ *
+ * The intent will have the following extra values: {@link #EXTRA_AUDIO_PLUG_STATE},
+ * {@link #EXTRA_MAX_CHANNEL_COUNT}, {@link #EXTRA_ENCODINGS}.
+ * <p>It can only be received by explicitly registering for it with
+ * {@link Context#registerReceiver(BroadcastReceiver, IntentFilter)}.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_HDMI_AUDIO_PLUG =
+ "android.media.action.HDMI_AUDIO_PLUG";
+
+ /**
+ * Extra used in {@link #ACTION_HDMI_AUDIO_PLUG} to communicate whether HDMI is plugged in
+ * or unplugged.
+ * An integer value of 1 indicates a plugged-in state, 0 is unplugged.
+ */
+ public static final String EXTRA_AUDIO_PLUG_STATE = "android.media.extra.AUDIO_PLUG_STATE";
+
+ /**
+ * Extra used in {@link #ACTION_HDMI_AUDIO_PLUG} to define the maximum number of channels
+ * supported by the HDMI device.
+ * The corresponding integer value is only available when the device is plugged in (as expressed
+ * by {@link #EXTRA_AUDIO_PLUG_STATE}).
+ */
+ public static final String EXTRA_MAX_CHANNEL_COUNT = "android.media.extra.MAX_CHANNEL_COUNT";
+
+ /**
+ * Extra used in {@link #ACTION_HDMI_AUDIO_PLUG} to define the audio encodings supported by
+ * the connected HDMI device.
+ * The corresponding array of encoding values is only available when the device is plugged in
+ * (as expressed by {@link #EXTRA_AUDIO_PLUG_STATE}). Encoding values are defined in
+ * {@link AudioFormat} (for instance see {@link AudioFormat#ENCODING_PCM_16BIT}). Use
+ * {@link android.content.Intent#getIntArrayExtra(String)} to retrieve the encoding values.
+ */
+ public static final String EXTRA_ENCODINGS = "android.media.extra.ENCODINGS";
+
+ /** Used to identify the volume of audio streams for phone calls */
+ public static final int STREAM_VOICE_CALL = AudioSystem.STREAM_VOICE_CALL;
+ /** Used to identify the volume of audio streams for system sounds */
+ public static final int STREAM_SYSTEM = AudioSystem.STREAM_SYSTEM;
+ /** Used to identify the volume of audio streams for the phone ring */
+ public static final int STREAM_RING = AudioSystem.STREAM_RING;
+ /** Used to identify the volume of audio streams for music playback */
+ public static final int STREAM_MUSIC = AudioSystem.STREAM_MUSIC;
+ /** Used to identify the volume of audio streams for alarms */
+ public static final int STREAM_ALARM = AudioSystem.STREAM_ALARM;
+ /** Used to identify the volume of audio streams for notifications */
+ public static final int STREAM_NOTIFICATION = AudioSystem.STREAM_NOTIFICATION;
+ /** @hide Used to identify the volume of audio streams for phone calls when connected
+ * to bluetooth */
+ public static final int STREAM_BLUETOOTH_SCO = AudioSystem.STREAM_BLUETOOTH_SCO;
+ /** @hide Used to identify the volume of audio streams for enforced system sounds
+ * in certain countries (e.g camera in Japan) */
+ public static final int STREAM_SYSTEM_ENFORCED = AudioSystem.STREAM_SYSTEM_ENFORCED;
+ /** Used to identify the volume of audio streams for DTMF Tones */
+ public static final int STREAM_DTMF = AudioSystem.STREAM_DTMF;
+ /** @hide Used to identify the volume of audio streams exclusively transmitted through the
+ * speaker (TTS) of the device */
+ public static final int STREAM_TTS = AudioSystem.STREAM_TTS;
+ /** Used to identify the volume of audio streams for accessibility prompts */
+ public static final int STREAM_ACCESSIBILITY = AudioSystem.STREAM_ACCESSIBILITY;
+
+ /** Number of audio streams */
+ /**
+ * @deprecated Do not iterate on volume stream type values.
+ */
+ @Deprecated public static final int NUM_STREAMS = AudioSystem.NUM_STREAMS;
+
+ /**
+ * Increase the ringer volume.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_RAISE = 1;
+
+ /**
+ * Decrease the ringer volume.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_LOWER = -1;
+
+ /**
+ * Maintain the previous ringer volume. This may be useful when needing to
+ * show the volume toast without actually modifying the volume.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_SAME = 0;
+
+ /**
+ * Mute the volume. Has no effect if the stream is already muted.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_MUTE = -100;
+
+ /**
+ * Unmute the volume. Has no effect if the stream is not muted.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_UNMUTE = 100;
+
+ /**
+ * Toggle the mute state. If muted the stream will be unmuted. If not muted
+ * the stream will be muted.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int ADJUST_TOGGLE_MUTE = 101;
+
+ /** @hide */
+ public static final String adjustToString(int adj) {
+ switch (adj) {
+ case ADJUST_RAISE: return "ADJUST_RAISE";
+ case ADJUST_LOWER: return "ADJUST_LOWER";
+ case ADJUST_SAME: return "ADJUST_SAME";
+ case ADJUST_MUTE: return "ADJUST_MUTE";
+ case ADJUST_UNMUTE: return "ADJUST_UNMUTE";
+ case ADJUST_TOGGLE_MUTE: return "ADJUST_TOGGLE_MUTE";
+ default: return new StringBuilder("unknown adjust mode ").append(adj).toString();
+ }
+ }
+
+ // Flags should be powers of 2!
+
+ /**
+ * Show a toast containing the current volume.
+ *
+ * @see #adjustStreamVolume(int, int, int)
+ * @see #adjustVolume(int, int)
+ * @see #setStreamVolume(int, int, int)
+ * @see #setRingerMode(int)
+ */
+ public static final int FLAG_SHOW_UI = 1 << 0;
+
+ /**
+ * Whether to include ringer modes as possible options when changing volume.
+ * For example, if true and volume level is 0 and the volume is adjusted
+ * with {@link #ADJUST_LOWER}, then the ringer mode may switch the silent or
+ * vibrate mode.
+ * <p>
+ * By default this is on for the ring stream. If this flag is included,
+ * this behavior will be present regardless of the stream type being
+ * affected by the ringer mode.
+ *
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public static final int FLAG_ALLOW_RINGER_MODES = 1 << 1;
+
+ /**
+ * Whether to play a sound when changing the volume.
+ * <p>
+ * If this is given to {@link #adjustVolume(int, int)} or
+ * {@link #adjustSuggestedStreamVolume(int, int, int)}, it may be ignored
+ * in some cases (for example, the decided stream type is not
+ * {@link AudioManager#STREAM_RING}, or the volume is being adjusted
+ * downward).
+ *
+ * @see #adjustStreamVolume(int, int, int)
+ * @see #adjustVolume(int, int)
+ * @see #setStreamVolume(int, int, int)
+ */
+ public static final int FLAG_PLAY_SOUND = 1 << 2;
+
+ /**
+ * Removes any sounds/vibrate that may be in the queue, or are playing (related to
+ * changing volume).
+ */
+ public static final int FLAG_REMOVE_SOUND_AND_VIBRATE = 1 << 3;
+
+ /**
+ * Whether to vibrate if going into the vibrate ringer mode.
+ */
+ public static final int FLAG_VIBRATE = 1 << 4;
+
+ /**
+ * Indicates to VolumePanel that the volume slider should be disabled as user
+ * cannot change the stream volume
+ * @hide
+ */
+ public static final int FLAG_FIXED_VOLUME = 1 << 5;
+
+ /**
+ * Indicates the volume set/adjust call is for Bluetooth absolute volume
+ * @hide
+ */
+ public static final int FLAG_BLUETOOTH_ABS_VOLUME = 1 << 6;
+
+ /**
+ * Adjusting the volume was prevented due to silent mode, display a hint in the UI.
+ * @hide
+ */
+ public static final int FLAG_SHOW_SILENT_HINT = 1 << 7;
+
+ /**
+ * Indicates the volume call is for Hdmi Cec system audio volume
+ * @hide
+ */
+ public static final int FLAG_HDMI_SYSTEM_AUDIO_VOLUME = 1 << 8;
+
+ /**
+ * Indicates that this should only be handled if media is actively playing.
+ * @hide
+ */
+ public static final int FLAG_ACTIVE_MEDIA_ONLY = 1 << 9;
+
+ /**
+ * Like FLAG_SHOW_UI, but only dialog warnings and confirmations, no sliders.
+ * @hide
+ */
+ public static final int FLAG_SHOW_UI_WARNINGS = 1 << 10;
+
+ /**
+ * Adjusting the volume down from vibrated was prevented, display a hint in the UI.
+ * @hide
+ */
+ public static final int FLAG_SHOW_VIBRATE_HINT = 1 << 11;
+
+ /**
+ * Adjusting the volume due to a hardware key press.
+ * @hide
+ */
+ public static final int FLAG_FROM_KEY = 1 << 12;
+
+ private static final String[] FLAG_NAMES = {
+ "FLAG_SHOW_UI",
+ "FLAG_ALLOW_RINGER_MODES",
+ "FLAG_PLAY_SOUND",
+ "FLAG_REMOVE_SOUND_AND_VIBRATE",
+ "FLAG_VIBRATE",
+ "FLAG_FIXED_VOLUME",
+ "FLAG_BLUETOOTH_ABS_VOLUME",
+ "FLAG_SHOW_SILENT_HINT",
+ "FLAG_HDMI_SYSTEM_AUDIO_VOLUME",
+ "FLAG_ACTIVE_MEDIA_ONLY",
+ "FLAG_SHOW_UI_WARNINGS",
+ "FLAG_SHOW_VIBRATE_HINT",
+ "FLAG_FROM_KEY",
+ };
+
+ /** @hide */
+ public static String flagsToString(int flags) {
+ final StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < FLAG_NAMES.length; i++) {
+ final int flag = 1 << i;
+ if ((flags & flag) != 0) {
+ if (sb.length() > 0) {
+ sb.append(',');
+ }
+ sb.append(FLAG_NAMES[i]);
+ flags &= ~flag;
+ }
+ }
+ if (flags != 0) {
+ if (sb.length() > 0) {
+ sb.append(',');
+ }
+ sb.append(flags);
+ }
+ return sb.toString();
+ }
+
+ /**
+ * Ringer mode that will be silent and will not vibrate. (This overrides the
+ * vibrate setting.)
+ *
+ * @see #setRingerMode(int)
+ * @see #getRingerMode()
+ */
+ public static final int RINGER_MODE_SILENT = 0;
+
+ /**
+ * Ringer mode that will be silent and will vibrate. (This will cause the
+ * phone ringer to always vibrate, but the notification vibrate to only
+ * vibrate if set.)
+ *
+ * @see #setRingerMode(int)
+ * @see #getRingerMode()
+ */
+ public static final int RINGER_MODE_VIBRATE = 1;
+
+ /**
+ * Ringer mode that may be audible and may vibrate. It will be audible if
+ * the volume before changing out of this mode was audible. It will vibrate
+ * if the vibrate setting is on.
+ *
+ * @see #setRingerMode(int)
+ * @see #getRingerMode()
+ */
+ public static final int RINGER_MODE_NORMAL = 2;
+
+ /**
+ * Maximum valid ringer mode value. Values must start from 0 and be contiguous.
+ * @hide
+ */
+ public static final int RINGER_MODE_MAX = RINGER_MODE_NORMAL;
+
+ /**
+ * Vibrate type that corresponds to the ringer.
+ *
+ * @see #setVibrateSetting(int, int)
+ * @see #getVibrateSetting(int)
+ * @see #shouldVibrate(int)
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode that can be queried via {@link #getRingerMode()}.
+ */
+ public static final int VIBRATE_TYPE_RINGER = 0;
+
+ /**
+ * Vibrate type that corresponds to notifications.
+ *
+ * @see #setVibrateSetting(int, int)
+ * @see #getVibrateSetting(int)
+ * @see #shouldVibrate(int)
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode that can be queried via {@link #getRingerMode()}.
+ */
+ public static final int VIBRATE_TYPE_NOTIFICATION = 1;
+
+ /**
+ * Vibrate setting that suggests to never vibrate.
+ *
+ * @see #setVibrateSetting(int, int)
+ * @see #getVibrateSetting(int)
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode that can be queried via {@link #getRingerMode()}.
+ */
+ public static final int VIBRATE_SETTING_OFF = 0;
+
+ /**
+ * Vibrate setting that suggests to vibrate when possible.
+ *
+ * @see #setVibrateSetting(int, int)
+ * @see #getVibrateSetting(int)
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode that can be queried via {@link #getRingerMode()}.
+ */
+ public static final int VIBRATE_SETTING_ON = 1;
+
+ /**
+ * Vibrate setting that suggests to only vibrate when in the vibrate ringer
+ * mode.
+ *
+ * @see #setVibrateSetting(int, int)
+ * @see #getVibrateSetting(int)
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode that can be queried via {@link #getRingerMode()}.
+ */
+ public static final int VIBRATE_SETTING_ONLY_SILENT = 2;
+
+ /**
+ * Suggests using the default stream type. This may not be used in all
+ * places a stream type is needed.
+ */
+ public static final int USE_DEFAULT_STREAM_TYPE = Integer.MIN_VALUE;
+
+ private static IAudioService sService;
+
+ /**
+ * @hide
+ * For test purposes only, will throw NPE with some methods that require a Context.
+ */
+ public AudioManager() {
+ mUseVolumeKeySounds = true;
+ mUseFixedVolume = false;
+ }
+
+ /**
+ * @hide
+ */
+ public AudioManager(Context context) {
+ setContext(context);
+ mUseVolumeKeySounds = getContext().getResources().getBoolean(
+ com.android.internal.R.bool.config_useVolumeKeySounds);
+ mUseFixedVolume = getContext().getResources().getBoolean(
+ com.android.internal.R.bool.config_useFixedVolume);
+ }
+
+ private Context getContext() {
+ if (mApplicationContext == null) {
+ setContext(mOriginalContext);
+ }
+ if (mApplicationContext != null) {
+ return mApplicationContext;
+ }
+ return mOriginalContext;
+ }
+
+ private void setContext(Context context) {
+ mApplicationContext = context.getApplicationContext();
+ if (mApplicationContext != null) {
+ mOriginalContext = null;
+ } else {
+ mOriginalContext = context;
+ }
+ }
+
+ private static IAudioService getService()
+ {
+ if (sService != null) {
+ return sService;
+ }
+ IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE);
+ sService = IAudioService.Stub.asInterface(b);
+ return sService;
+ }
+
+ /**
+ * Sends a simulated key event for a media button.
+ * To simulate a key press, you must first send a KeyEvent built with a
+ * {@link KeyEvent#ACTION_DOWN} action, then another event with the {@link KeyEvent#ACTION_UP}
+ * action.
+ * <p>The key event will be sent to the current media key event consumer which registered with
+ * {@link AudioManager#registerMediaButtonEventReceiver(PendingIntent)}.
+ * @param keyEvent a {@link KeyEvent} instance whose key code is one of
+ * {@link KeyEvent#KEYCODE_MUTE},
+ * {@link KeyEvent#KEYCODE_HEADSETHOOK},
+ * {@link KeyEvent#KEYCODE_MEDIA_PLAY},
+ * {@link KeyEvent#KEYCODE_MEDIA_PAUSE},
+ * {@link KeyEvent#KEYCODE_MEDIA_PLAY_PAUSE},
+ * {@link KeyEvent#KEYCODE_MEDIA_STOP},
+ * {@link KeyEvent#KEYCODE_MEDIA_NEXT},
+ * {@link KeyEvent#KEYCODE_MEDIA_PREVIOUS},
+ * {@link KeyEvent#KEYCODE_MEDIA_REWIND},
+ * {@link KeyEvent#KEYCODE_MEDIA_RECORD},
+ * {@link KeyEvent#KEYCODE_MEDIA_FAST_FORWARD},
+ * {@link KeyEvent#KEYCODE_MEDIA_CLOSE},
+ * {@link KeyEvent#KEYCODE_MEDIA_EJECT},
+ * or {@link KeyEvent#KEYCODE_MEDIA_AUDIO_TRACK}.
+ */
+ public void dispatchMediaKeyEvent(KeyEvent keyEvent) {
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.sendMediaButtonEvent(keyEvent, false);
+ }
+
+ /**
+ * @hide
+ */
+ public void preDispatchKeyEvent(KeyEvent event, int stream) {
+ /*
+ * If the user hits another key within the play sound delay, then
+ * cancel the sound
+ */
+ int keyCode = event.getKeyCode();
+ if (keyCode != KeyEvent.KEYCODE_VOLUME_DOWN && keyCode != KeyEvent.KEYCODE_VOLUME_UP
+ && keyCode != KeyEvent.KEYCODE_VOLUME_MUTE
+ && mVolumeKeyUpTime + AudioSystem.PLAY_SOUND_DELAY > SystemClock.uptimeMillis()) {
+ /*
+ * The user has hit another key during the delay (e.g., 300ms)
+ * since the last volume key up, so cancel any sounds.
+ */
+ adjustSuggestedStreamVolume(ADJUST_SAME,
+ stream, AudioManager.FLAG_REMOVE_SOUND_AND_VIBRATE);
+ }
+ }
+
+ /**
+ * Indicates if the device implements a fixed volume policy.
+ * <p>Some devices may not have volume control and may operate at a fixed volume,
+ * and may not enable muting or changing the volume of audio streams.
+ * This method will return true on such devices.
+ * <p>The following APIs have no effect when volume is fixed:
+ * <ul>
+ * <li> {@link #adjustVolume(int, int)}
+ * <li> {@link #adjustSuggestedStreamVolume(int, int, int)}
+ * <li> {@link #adjustStreamVolume(int, int, int)}
+ * <li> {@link #setStreamVolume(int, int, int)}
+ * <li> {@link #setRingerMode(int)}
+ * <li> {@link #setStreamSolo(int, boolean)}
+ * <li> {@link #setStreamMute(int, boolean)}
+ * </ul>
+ */
+ public boolean isVolumeFixed() {
+ return mUseFixedVolume;
+ }
+
+ /**
+ * Adjusts the volume of a particular stream by one step in a direction.
+ * <p>
+ * This method should only be used by applications that replace the platform-wide
+ * management of audio settings or the main telephony application.
+ *
+ * @param streamType The stream type to adjust. One of {@link #STREAM_VOICE_CALL},
+ * {@link #STREAM_SYSTEM}, {@link #STREAM_RING}, {@link #STREAM_MUSIC},
+ * {@link #STREAM_ALARM} or {@link #STREAM_ACCESSIBILITY}.
+ * @param direction The direction to adjust the volume. One of
+ * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE}, or
+ * {@link #ADJUST_SAME}.
+ * @param flags One or more flags.
+ * @see #adjustVolume(int, int)
+ * @see #setStreamVolume(int, int, int)
+ */
+ public void adjustStreamVolume(int streamType, int direction, int flags) {
+ final IAudioService service = getService();
+ try {
+ service.adjustStreamVolume(streamType, direction, flags,
+ getContext().getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Adjusts the volume of the most relevant stream. For example, if a call is
+ * active, it will have the highest priority regardless of if the in-call
+ * screen is showing. Another example, if music is playing in the background
+ * and a call is not active, the music stream will be adjusted.
+ * <p>
+ * This method should only be used by applications that replace the
+ * platform-wide management of audio settings or the main telephony
+ * application.
+ * <p>
+ * This method has no effect if the device implements a fixed volume policy
+ * as indicated by {@link #isVolumeFixed()}.
+ *
+ * @param direction The direction to adjust the volume. One of
+ * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE},
+ * {@link #ADJUST_SAME}, {@link #ADJUST_MUTE},
+ * {@link #ADJUST_UNMUTE}, or {@link #ADJUST_TOGGLE_MUTE}.
+ * @param flags One or more flags.
+ * @see #adjustSuggestedStreamVolume(int, int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ * @see #setStreamVolume(int, int, int)
+ * @see #isVolumeFixed()
+ */
+ public void adjustVolume(int direction, int flags) {
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.sendAdjustVolumeBy(USE_DEFAULT_STREAM_TYPE, direction, flags);
+ }
+
+ /**
+ * Adjusts the volume of the most relevant stream, or the given fallback
+ * stream.
+ * <p>
+ * This method should only be used by applications that replace the
+ * platform-wide management of audio settings or the main telephony
+ * application.
+ * <p>
+ * This method has no effect if the device implements a fixed volume policy
+ * as indicated by {@link #isVolumeFixed()}.
+ *
+ * @param direction The direction to adjust the volume. One of
+ * {@link #ADJUST_LOWER}, {@link #ADJUST_RAISE},
+ * {@link #ADJUST_SAME}, {@link #ADJUST_MUTE},
+ * {@link #ADJUST_UNMUTE}, or {@link #ADJUST_TOGGLE_MUTE}.
+ * @param suggestedStreamType The stream type that will be used if there
+ * isn't a relevant stream. {@link #USE_DEFAULT_STREAM_TYPE} is
+ * valid here.
+ * @param flags One or more flags.
+ * @see #adjustVolume(int, int)
+ * @see #adjustStreamVolume(int, int, int)
+ * @see #setStreamVolume(int, int, int)
+ * @see #isVolumeFixed()
+ */
+ public void adjustSuggestedStreamVolume(int direction, int suggestedStreamType, int flags) {
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.sendAdjustVolumeBy(suggestedStreamType, direction, flags);
+ }
+
+ /** @hide */
+ public void setMasterMute(boolean mute, int flags) {
+ final IAudioService service = getService();
+ try {
+ service.setMasterMute(mute, flags, getContext().getOpPackageName(),
+ UserHandle.getCallingUserId());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the current ringtone mode.
+ *
+ * @return The current ringtone mode, one of {@link #RINGER_MODE_NORMAL},
+ * {@link #RINGER_MODE_SILENT}, or {@link #RINGER_MODE_VIBRATE}.
+ * @see #setRingerMode(int)
+ */
+ public int getRingerMode() {
+ final IAudioService service = getService();
+ try {
+ return service.getRingerModeExternal();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Checks valid ringer mode values.
+ *
+ * @return true if the ringer mode indicated is valid, false otherwise.
+ *
+ * @see #setRingerMode(int)
+ * @hide
+ */
+ public static boolean isValidRingerMode(int ringerMode) {
+ if (ringerMode < 0 || ringerMode > RINGER_MODE_MAX) {
+ return false;
+ }
+ final IAudioService service = getService();
+ try {
+ return service.isValidRingerMode(ringerMode);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the maximum volume index for a particular stream.
+ *
+ * @param streamType The stream type whose maximum volume index is returned.
+ * @return The maximum valid volume index for the stream.
+ * @see #getStreamVolume(int)
+ */
+ public int getStreamMaxVolume(int streamType) {
+ final IAudioService service = getService();
+ try {
+ return service.getStreamMaxVolume(streamType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the minimum volume index for a particular stream.
+ *
+ * @param streamType The stream type whose minimum volume index is returned.
+ * @return The minimum valid volume index for the stream.
+ * @see #getStreamVolume(int)
+ * @hide
+ */
+ public int getStreamMinVolume(int streamType) {
+ final IAudioService service = getService();
+ try {
+ return service.getStreamMinVolume(streamType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the current volume index for a particular stream.
+ *
+ * @param streamType The stream type whose volume index is returned.
+ * @return The current volume index for the stream.
+ * @see #getStreamMaxVolume(int)
+ * @see #setStreamVolume(int, int, int)
+ */
+ public int getStreamVolume(int streamType) {
+ final IAudioService service = getService();
+ try {
+ return service.getStreamVolume(streamType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Get last audible volume before stream was muted.
+ *
+ * @hide
+ */
+ public int getLastAudibleStreamVolume(int streamType) {
+ final IAudioService service = getService();
+ try {
+ return service.getLastAudibleStreamVolume(streamType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Get the stream type whose volume is driving the UI sounds volume.
+ * UI sounds are screen lock/unlock, camera shutter, key clicks...
+ * It is assumed that this stream type is also tied to ringer mode changes.
+ * @hide
+ */
+ public int getUiSoundsStreamType() {
+ final IAudioService service = getService();
+ try {
+ return service.getUiSoundsStreamType();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Sets the ringer mode.
+ * <p>
+ * Silent mode will mute the volume and will not vibrate. Vibrate mode will
+ * mute the volume and vibrate. Normal mode will be audible and may vibrate
+ * according to user settings.
+ * <p>This method has no effect if the device implements a fixed volume policy
+ * as indicated by {@link #isVolumeFixed()}.
+ * * <p>From N onward, ringer mode adjustments that would toggle Do Not Disturb are not allowed
+ * unless the app has been granted Do Not Disturb Access.
+ * See {@link NotificationManager#isNotificationPolicyAccessGranted()}.
+ * @param ringerMode The ringer mode, one of {@link #RINGER_MODE_NORMAL},
+ * {@link #RINGER_MODE_SILENT}, or {@link #RINGER_MODE_VIBRATE}.
+ * @see #getRingerMode()
+ * @see #isVolumeFixed()
+ */
+ public void setRingerMode(int ringerMode) {
+ if (!isValidRingerMode(ringerMode)) {
+ return;
+ }
+ final IAudioService service = getService();
+ try {
+ service.setRingerModeExternal(ringerMode, getContext().getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Sets the volume index for a particular stream.
+ * <p>This method has no effect if the device implements a fixed volume policy
+ * as indicated by {@link #isVolumeFixed()}.
+ * <p>From N onward, volume adjustments that would toggle Do Not Disturb are not allowed unless
+ * the app has been granted Do Not Disturb Access.
+ * See {@link NotificationManager#isNotificationPolicyAccessGranted()}.
+ * @param streamType The stream whose volume index should be set.
+ * @param index The volume index to set. See
+ * {@link #getStreamMaxVolume(int)} for the largest valid value.
+ * @param flags One or more flags.
+ * @see #getStreamMaxVolume(int)
+ * @see #getStreamVolume(int)
+ * @see #isVolumeFixed()
+ */
+ public void setStreamVolume(int streamType, int index, int flags) {
+ final IAudioService service = getService();
+ try {
+ service.setStreamVolume(streamType, index, flags, getContext().getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Solo or unsolo a particular stream.
+ * <p>
+ * Do not use. This method has been deprecated and is now a no-op.
+ * {@link #requestAudioFocus} should be used for exclusive audio playback.
+ *
+ * @param streamType The stream to be soloed/unsoloed.
+ * @param state The required solo state: true for solo ON, false for solo
+ * OFF
+ * @see #isVolumeFixed()
+ * @deprecated Do not use. If you need exclusive audio playback use
+ * {@link #requestAudioFocus}.
+ */
+ @Deprecated
+ public void setStreamSolo(int streamType, boolean state) {
+ Log.w(TAG, "setStreamSolo has been deprecated. Do not use.");
+ }
+
+ /**
+ * Mute or unmute an audio stream.
+ * <p>
+ * This method should only be used by applications that replace the
+ * platform-wide management of audio settings or the main telephony
+ * application.
+ * <p>
+ * This method has no effect if the device implements a fixed volume policy
+ * as indicated by {@link #isVolumeFixed()}.
+ * <p>
+ * This method was deprecated in API level 22. Prior to API level 22 this
+ * method had significantly different behavior and should be used carefully.
+ * The following applies only to pre-22 platforms:
+ * <ul>
+ * <li>The mute command is protected against client process death: if a
+ * process with an active mute request on a stream dies, this stream will be
+ * unmuted automatically.</li>
+ * <li>The mute requests for a given stream are cumulative: the AudioManager
+ * can receive several mute requests from one or more clients and the stream
+ * will be unmuted only when the same number of unmute requests are
+ * received.</li>
+ * <li>For a better user experience, applications MUST unmute a muted stream
+ * in onPause() and mute is again in onResume() if appropriate.</li>
+ * </ul>
+ *
+ * @param streamType The stream to be muted/unmuted.
+ * @param state The required mute state: true for mute ON, false for mute
+ * OFF
+ * @see #isVolumeFixed()
+ * @deprecated Use {@link #adjustStreamVolume(int, int, int)} with
+ * {@link #ADJUST_MUTE} or {@link #ADJUST_UNMUTE} instead.
+ */
+ @Deprecated
+ public void setStreamMute(int streamType, boolean state) {
+ Log.w(TAG, "setStreamMute is deprecated. adjustStreamVolume should be used instead.");
+ int direction = state ? ADJUST_MUTE : ADJUST_UNMUTE;
+ if (streamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
+ adjustSuggestedStreamVolume(direction, streamType, 0);
+ } else {
+ adjustStreamVolume(streamType, direction, 0);
+ }
+ }
+
+ /**
+ * Returns the current mute state for a particular stream.
+ *
+ * @param streamType The stream to get mute state for.
+ * @return The mute state for the given stream.
+ * @see #adjustStreamVolume(int, int, int)
+ */
+ public boolean isStreamMute(int streamType) {
+ final IAudioService service = getService();
+ try {
+ return service.isStreamMute(streamType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * get master mute state.
+ *
+ * @hide
+ */
+ public boolean isMasterMute() {
+ final IAudioService service = getService();
+ try {
+ return service.isMasterMute();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * forces the stream controlled by hard volume keys
+ * specifying streamType == -1 releases control to the
+ * logic.
+ *
+ * @hide
+ */
+ public void forceVolumeControlStream(int streamType) {
+ final IAudioService service = getService();
+ try {
+ service.forceVolumeControlStream(streamType, mICallBack);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns whether a particular type should vibrate according to user
+ * settings and the current ringer mode.
+ * <p>
+ * This shouldn't be needed by most clients that use notifications to
+ * vibrate. The notification manager will not vibrate if the policy doesn't
+ * allow it, so the client should always set a vibrate pattern and let the
+ * notification manager control whether or not to actually vibrate.
+ *
+ * @param vibrateType The type of vibrate. One of
+ * {@link #VIBRATE_TYPE_NOTIFICATION} or
+ * {@link #VIBRATE_TYPE_RINGER}.
+ * @return Whether the type should vibrate at the instant this method is
+ * called.
+ * @see #setVibrateSetting(int, int)
+ * @see #getVibrateSetting(int)
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode that can be queried via {@link #getRingerMode()}.
+ */
+ public boolean shouldVibrate(int vibrateType) {
+ final IAudioService service = getService();
+ try {
+ return service.shouldVibrate(vibrateType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns whether the user's vibrate setting for a vibrate type.
+ * <p>
+ * This shouldn't be needed by most clients that want to vibrate, instead
+ * see {@link #shouldVibrate(int)}.
+ *
+ * @param vibrateType The type of vibrate. One of
+ * {@link #VIBRATE_TYPE_NOTIFICATION} or
+ * {@link #VIBRATE_TYPE_RINGER}.
+ * @return The vibrate setting, one of {@link #VIBRATE_SETTING_ON},
+ * {@link #VIBRATE_SETTING_OFF}, or
+ * {@link #VIBRATE_SETTING_ONLY_SILENT}.
+ * @see #setVibrateSetting(int, int)
+ * @see #shouldVibrate(int)
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode that can be queried via {@link #getRingerMode()}.
+ */
+ public int getVibrateSetting(int vibrateType) {
+ final IAudioService service = getService();
+ try {
+ return service.getVibrateSetting(vibrateType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Sets the setting for when the vibrate type should vibrate.
+ * <p>
+ * This method should only be used by applications that replace the platform-wide
+ * management of audio settings or the main telephony application.
+ *
+ * @param vibrateType The type of vibrate. One of
+ * {@link #VIBRATE_TYPE_NOTIFICATION} or
+ * {@link #VIBRATE_TYPE_RINGER}.
+ * @param vibrateSetting The vibrate setting, one of
+ * {@link #VIBRATE_SETTING_ON},
+ * {@link #VIBRATE_SETTING_OFF}, or
+ * {@link #VIBRATE_SETTING_ONLY_SILENT}.
+ * @see #getVibrateSetting(int)
+ * @see #shouldVibrate(int)
+ * @deprecated Applications should maintain their own vibrate policy based on
+ * current ringer mode that can be queried via {@link #getRingerMode()}.
+ */
+ public void setVibrateSetting(int vibrateType, int vibrateSetting) {
+ final IAudioService service = getService();
+ try {
+ service.setVibrateSetting(vibrateType, vibrateSetting);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Sets the speakerphone on or off.
+ * <p>
+ * This method should only be used by applications that replace the platform-wide
+ * management of audio settings or the main telephony application.
+ *
+ * @param on set <var>true</var> to turn on speakerphone;
+ * <var>false</var> to turn it off
+ */
+ public void setSpeakerphoneOn(boolean on){
+ final IAudioService service = getService();
+ try {
+ service.setSpeakerphoneOn(on);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Checks whether the speakerphone is on or off.
+ *
+ * @return true if speakerphone is on, false if it's off
+ */
+ public boolean isSpeakerphoneOn() {
+ final IAudioService service = getService();
+ try {
+ return service.isSpeakerphoneOn();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ //====================================================================
+ // Bluetooth SCO control
+ /**
+ * Sticky broadcast intent action indicating that the Bluetooth SCO audio
+ * connection state has changed. The intent contains on extra {@link #EXTRA_SCO_AUDIO_STATE}
+ * indicating the new state which is either {@link #SCO_AUDIO_STATE_DISCONNECTED}
+ * or {@link #SCO_AUDIO_STATE_CONNECTED}
+ *
+ * @see #startBluetoothSco()
+ * @deprecated Use {@link #ACTION_SCO_AUDIO_STATE_UPDATED} instead
+ */
+ @Deprecated
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_SCO_AUDIO_STATE_CHANGED =
+ "android.media.SCO_AUDIO_STATE_CHANGED";
+
+ /**
+ * Sticky broadcast intent action indicating that the Bluetooth SCO audio
+ * connection state has been updated.
+ * <p>This intent has two extras:
+ * <ul>
+ * <li> {@link #EXTRA_SCO_AUDIO_STATE} - The new SCO audio state. </li>
+ * <li> {@link #EXTRA_SCO_AUDIO_PREVIOUS_STATE}- The previous SCO audio state. </li>
+ * </ul>
+ * <p> EXTRA_SCO_AUDIO_STATE or EXTRA_SCO_AUDIO_PREVIOUS_STATE can be any of:
+ * <ul>
+ * <li> {@link #SCO_AUDIO_STATE_DISCONNECTED}, </li>
+ * <li> {@link #SCO_AUDIO_STATE_CONNECTING} or </li>
+ * <li> {@link #SCO_AUDIO_STATE_CONNECTED}, </li>
+ * </ul>
+ * @see #startBluetoothSco()
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_SCO_AUDIO_STATE_UPDATED =
+ "android.media.ACTION_SCO_AUDIO_STATE_UPDATED";
+
+ /**
+ * Extra for intent {@link #ACTION_SCO_AUDIO_STATE_CHANGED} or
+ * {@link #ACTION_SCO_AUDIO_STATE_UPDATED} containing the new bluetooth SCO connection state.
+ */
+ public static final String EXTRA_SCO_AUDIO_STATE =
+ "android.media.extra.SCO_AUDIO_STATE";
+
+ /**
+ * Extra for intent {@link #ACTION_SCO_AUDIO_STATE_UPDATED} containing the previous
+ * bluetooth SCO connection state.
+ */
+ public static final String EXTRA_SCO_AUDIO_PREVIOUS_STATE =
+ "android.media.extra.SCO_AUDIO_PREVIOUS_STATE";
+
+ /**
+ * Value for extra EXTRA_SCO_AUDIO_STATE or EXTRA_SCO_AUDIO_PREVIOUS_STATE
+ * indicating that the SCO audio channel is not established
+ */
+ public static final int SCO_AUDIO_STATE_DISCONNECTED = 0;
+ /**
+ * Value for extra {@link #EXTRA_SCO_AUDIO_STATE} or {@link #EXTRA_SCO_AUDIO_PREVIOUS_STATE}
+ * indicating that the SCO audio channel is established
+ */
+ public static final int SCO_AUDIO_STATE_CONNECTED = 1;
+ /**
+ * Value for extra EXTRA_SCO_AUDIO_STATE or EXTRA_SCO_AUDIO_PREVIOUS_STATE
+ * indicating that the SCO audio channel is being established
+ */
+ public static final int SCO_AUDIO_STATE_CONNECTING = 2;
+ /**
+ * Value for extra EXTRA_SCO_AUDIO_STATE indicating that
+ * there was an error trying to obtain the state
+ */
+ public static final int SCO_AUDIO_STATE_ERROR = -1;
+
+
+ /**
+ * Indicates if current platform supports use of SCO for off call use cases.
+ * Application wanted to use bluetooth SCO audio when the phone is not in call
+ * must first call this method to make sure that the platform supports this
+ * feature.
+ * @return true if bluetooth SCO can be used for audio when not in call
+ * false otherwise
+ * @see #startBluetoothSco()
+ */
+ public boolean isBluetoothScoAvailableOffCall() {
+ return getContext().getResources().getBoolean(
+ com.android.internal.R.bool.config_bluetooth_sco_off_call);
+ }
+
+ /**
+ * Start bluetooth SCO audio connection.
+ * <p>Requires Permission:
+ * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}.
+ * <p>This method can be used by applications wanting to send and received audio
+ * to/from a bluetooth SCO headset while the phone is not in call.
+ * <p>As the SCO connection establishment can take several seconds,
+ * applications should not rely on the connection to be available when the method
+ * returns but instead register to receive the intent {@link #ACTION_SCO_AUDIO_STATE_UPDATED}
+ * and wait for the state to be {@link #SCO_AUDIO_STATE_CONNECTED}.
+ * <p>As the ACTION_SCO_AUDIO_STATE_UPDATED intent is sticky, the application can check the SCO
+ * audio state before calling startBluetoothSco() by reading the intent returned by the receiver
+ * registration. If the state is already CONNECTED, no state change will be received via the
+ * intent after calling startBluetoothSco(). It is however useful to call startBluetoothSco()
+ * so that the connection stays active in case the current initiator stops the connection.
+ * <p>Unless the connection is already active as described above, the state will always
+ * transition from DISCONNECTED to CONNECTING and then either to CONNECTED if the connection
+ * succeeds or back to DISCONNECTED if the connection fails (e.g no headset is connected).
+ * <p>When finished with the SCO connection or if the establishment fails, the application must
+ * call {@link #stopBluetoothSco()} to clear the request and turn down the bluetooth connection.
+ * <p>Even if a SCO connection is established, the following restrictions apply on audio
+ * output streams so that they can be routed to SCO headset:
+ * <ul>
+ * <li> the stream type must be {@link #STREAM_VOICE_CALL} </li>
+ * <li> the format must be mono </li>
+ * <li> the sampling must be 16kHz or 8kHz </li>
+ * </ul>
+ * <p>The following restrictions apply on input streams:
+ * <ul>
+ * <li> the format must be mono </li>
+ * <li> the sampling must be 8kHz </li>
+ * </ul>
+ * <p>Note that the phone application always has the priority on the usage of the SCO
+ * connection for telephony. If this method is called while the phone is in call
+ * it will be ignored. Similarly, if a call is received or sent while an application
+ * is using the SCO connection, the connection will be lost for the application and NOT
+ * returned automatically when the call ends.
+ * <p>NOTE: up to and including API version
+ * {@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR1}, this method initiates a virtual
+ * voice call to the bluetooth headset.
+ * After API version {@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2} only a raw SCO audio
+ * connection is established.
+ * @see #stopBluetoothSco()
+ * @see #ACTION_SCO_AUDIO_STATE_UPDATED
+ */
+ public void startBluetoothSco(){
+ final IAudioService service = getService();
+ try {
+ service.startBluetoothSco(mICallBack,
+ getContext().getApplicationInfo().targetSdkVersion);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * @hide
+ * Start bluetooth SCO audio connection in virtual call mode.
+ * <p>Requires Permission:
+ * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}.
+ * <p>Similar to {@link #startBluetoothSco()} with explicit selection of virtual call mode.
+ * Telephony and communication applications (VoIP, Video Chat) should preferably select
+ * virtual call mode.
+ * Applications using voice input for search or commands should first try raw audio connection
+ * with {@link #startBluetoothSco()} and fall back to startBluetoothScoVirtualCall() in case of
+ * failure.
+ * @see #startBluetoothSco()
+ * @see #stopBluetoothSco()
+ * @see #ACTION_SCO_AUDIO_STATE_UPDATED
+ */
+ public void startBluetoothScoVirtualCall() {
+ final IAudioService service = getService();
+ try {
+ service.startBluetoothScoVirtualCall(mICallBack);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Stop bluetooth SCO audio connection.
+ * <p>Requires Permission:
+ * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}.
+ * <p>This method must be called by applications having requested the use of
+ * bluetooth SCO audio with {@link #startBluetoothSco()} when finished with the SCO
+ * connection or if connection fails.
+ * @see #startBluetoothSco()
+ */
+ // Also used for connections started with {@link #startBluetoothScoVirtualCall()}
+ public void stopBluetoothSco(){
+ final IAudioService service = getService();
+ try {
+ service.stopBluetoothSco(mICallBack);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Request use of Bluetooth SCO headset for communications.
+ * <p>
+ * This method should only be used by applications that replace the platform-wide
+ * management of audio settings or the main telephony application.
+ *
+ * @param on set <var>true</var> to use bluetooth SCO for communications;
+ * <var>false</var> to not use bluetooth SCO for communications
+ */
+ public void setBluetoothScoOn(boolean on){
+ final IAudioService service = getService();
+ try {
+ service.setBluetoothScoOn(on);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Checks whether communications use Bluetooth SCO.
+ *
+ * @return true if SCO is used for communications;
+ * false if otherwise
+ */
+ public boolean isBluetoothScoOn() {
+ final IAudioService service = getService();
+ try {
+ return service.isBluetoothScoOn();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * @param on set <var>true</var> to route A2DP audio to/from Bluetooth
+ * headset; <var>false</var> disable A2DP audio
+ * @deprecated Do not use.
+ */
+ @Deprecated public void setBluetoothA2dpOn(boolean on){
+ }
+
+ /**
+ * Checks whether a Bluetooth A2DP audio peripheral is connected or not.
+ *
+ * @return true if a Bluetooth A2DP peripheral is connected
+ * false if otherwise
+ * @deprecated Use {@link AudioManager#getDevices(int)} instead to list available audio devices.
+ */
+ public boolean isBluetoothA2dpOn() {
+ if (AudioSystem.getDeviceConnectionState(DEVICE_OUT_BLUETOOTH_A2DP,"")
+ == AudioSystem.DEVICE_STATE_AVAILABLE) {
+ return true;
+ } else if (AudioSystem.getDeviceConnectionState(DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,"")
+ == AudioSystem.DEVICE_STATE_AVAILABLE) {
+ return true;
+ } else if (AudioSystem.getDeviceConnectionState(DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER,"")
+ == AudioSystem.DEVICE_STATE_AVAILABLE) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Sets audio routing to the wired headset on or off.
+ *
+ * @param on set <var>true</var> to route audio to/from wired
+ * headset; <var>false</var> disable wired headset audio
+ * @deprecated Do not use.
+ */
+ @Deprecated public void setWiredHeadsetOn(boolean on){
+ }
+
+ /**
+ * Checks whether a wired headset is connected or not.
+ * <p>This is not a valid indication that audio playback is
+ * actually over the wired headset as audio routing depends on other conditions.
+ *
+ * @return true if a wired headset is connected.
+ * false if otherwise
+ * @deprecated Use {@link AudioManager#getDevices(int)} instead to list available audio devices.
+ */
+ public boolean isWiredHeadsetOn() {
+ if (AudioSystem.getDeviceConnectionState(DEVICE_OUT_WIRED_HEADSET,"")
+ == AudioSystem.DEVICE_STATE_UNAVAILABLE &&
+ AudioSystem.getDeviceConnectionState(DEVICE_OUT_WIRED_HEADPHONE,"")
+ == AudioSystem.DEVICE_STATE_UNAVAILABLE &&
+ AudioSystem.getDeviceConnectionState(DEVICE_OUT_USB_HEADSET, "")
+ == AudioSystem.DEVICE_STATE_UNAVAILABLE) {
+ return false;
+ } else {
+ return true;
+ }
+ }
+
+ /**
+ * Sets the microphone mute on or off.
+ * <p>
+ * This method should only be used by applications that replace the platform-wide
+ * management of audio settings or the main telephony application.
+ *
+ * @param on set <var>true</var> to mute the microphone;
+ * <var>false</var> to turn mute off
+ */
+ public void setMicrophoneMute(boolean on) {
+ final IAudioService service = getService();
+ try {
+ service.setMicrophoneMute(on, getContext().getOpPackageName(),
+ UserHandle.getCallingUserId());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Checks whether the microphone mute is on or off.
+ *
+ * @return true if microphone is muted, false if it's not
+ */
+ public boolean isMicrophoneMute() {
+ return AudioSystem.isMicrophoneMuted();
+ }
+
+ /**
+ * Sets the audio mode.
+ * <p>
+ * The audio mode encompasses audio routing AND the behavior of
+ * the telephony layer. Therefore this method should only be used by applications that
+ * replace the platform-wide management of audio settings or the main telephony application.
+ * In particular, the {@link #MODE_IN_CALL} mode should only be used by the telephony
+ * application when it places a phone call, as it will cause signals from the radio layer
+ * to feed the platform mixer.
+ *
+ * @param mode the requested audio mode ({@link #MODE_NORMAL}, {@link #MODE_RINGTONE},
+ * {@link #MODE_IN_CALL} or {@link #MODE_IN_COMMUNICATION}).
+ * Informs the HAL about the current audio state so that
+ * it can route the audio appropriately.
+ */
+ public void setMode(int mode) {
+ final IAudioService service = getService();
+ try {
+ service.setMode(mode, mICallBack, mApplicationContext.getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the current audio mode.
+ *
+ * @return the current audio mode ({@link #MODE_NORMAL}, {@link #MODE_RINGTONE},
+ * {@link #MODE_IN_CALL} or {@link #MODE_IN_COMMUNICATION}).
+ * Returns the current current audio state from the HAL.
+ */
+ public int getMode() {
+ final IAudioService service = getService();
+ try {
+ return service.getMode();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /* modes for setMode/getMode/setRoute/getRoute */
+ /**
+ * Audio harware modes.
+ */
+ /**
+ * Invalid audio mode.
+ */
+ public static final int MODE_INVALID = AudioSystem.MODE_INVALID;
+ /**
+ * Current audio mode. Used to apply audio routing to current mode.
+ */
+ public static final int MODE_CURRENT = AudioSystem.MODE_CURRENT;
+ /**
+ * Normal audio mode: not ringing and no call established.
+ */
+ public static final int MODE_NORMAL = AudioSystem.MODE_NORMAL;
+ /**
+ * Ringing audio mode. An incoming is being signaled.
+ */
+ public static final int MODE_RINGTONE = AudioSystem.MODE_RINGTONE;
+ /**
+ * In call audio mode. A telephony call is established.
+ */
+ public static final int MODE_IN_CALL = AudioSystem.MODE_IN_CALL;
+ /**
+ * In communication audio mode. An audio/video chat or VoIP call is established.
+ */
+ public static final int MODE_IN_COMMUNICATION = AudioSystem.MODE_IN_COMMUNICATION;
+
+ /* Routing bits for setRouting/getRouting API */
+ /**
+ * Routing audio output to earpiece
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn() methods instead.
+ */
+ @Deprecated public static final int ROUTE_EARPIECE = AudioSystem.ROUTE_EARPIECE;
+ /**
+ * Routing audio output to speaker
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn() methods instead.
+ */
+ @Deprecated public static final int ROUTE_SPEAKER = AudioSystem.ROUTE_SPEAKER;
+ /**
+ * @deprecated use {@link #ROUTE_BLUETOOTH_SCO}
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn() methods instead.
+ */
+ @Deprecated public static final int ROUTE_BLUETOOTH = AudioSystem.ROUTE_BLUETOOTH_SCO;
+ /**
+ * Routing audio output to bluetooth SCO
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn() methods instead.
+ */
+ @Deprecated public static final int ROUTE_BLUETOOTH_SCO = AudioSystem.ROUTE_BLUETOOTH_SCO;
+ /**
+ * Routing audio output to headset
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn() methods instead.
+ */
+ @Deprecated public static final int ROUTE_HEADSET = AudioSystem.ROUTE_HEADSET;
+ /**
+ * Routing audio output to bluetooth A2DP
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn() methods instead.
+ */
+ @Deprecated public static final int ROUTE_BLUETOOTH_A2DP = AudioSystem.ROUTE_BLUETOOTH_A2DP;
+ /**
+ * Used for mask parameter of {@link #setRouting(int,int,int)}.
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn() methods instead.
+ */
+ @Deprecated public static final int ROUTE_ALL = AudioSystem.ROUTE_ALL;
+
+ /**
+ * Sets the audio routing for a specified mode
+ *
+ * @param mode audio mode to change route. E.g., MODE_RINGTONE.
+ * @param routes bit vector of routes requested, created from one or
+ * more of ROUTE_xxx types. Set bits indicate that route should be on
+ * @param mask bit vector of routes to change, created from one or more of
+ * ROUTE_xxx types. Unset bits indicate the route should be left unchanged
+ *
+ * @deprecated Do not set audio routing directly, use setSpeakerphoneOn(),
+ * setBluetoothScoOn() methods instead.
+ */
+ @Deprecated
+ public void setRouting(int mode, int routes, int mask) {
+ }
+
+ /**
+ * Returns the current audio routing bit vector for a specified mode.
+ *
+ * @param mode audio mode to get route (e.g., MODE_RINGTONE)
+ * @return an audio route bit vector that can be compared with ROUTE_xxx
+ * bits
+ * @deprecated Do not query audio routing directly, use isSpeakerphoneOn(),
+ * isBluetoothScoOn(), isBluetoothA2dpOn() and isWiredHeadsetOn() methods instead.
+ */
+ @Deprecated
+ public int getRouting(int mode) {
+ return -1;
+ }
+
+ /**
+ * Checks whether any music is active.
+ *
+ * @return true if any music tracks are active.
+ */
+ public boolean isMusicActive() {
+ return AudioSystem.isStreamActive(STREAM_MUSIC, 0);
+ }
+
+ /**
+ * @hide
+ * Checks whether any music or media is actively playing on a remote device (e.g. wireless
+ * display). Note that BT audio sinks are not considered remote devices.
+ * @return true if {@link AudioManager#STREAM_MUSIC} is active on a remote device
+ */
+ public boolean isMusicActiveRemotely() {
+ return AudioSystem.isStreamActiveRemotely(STREAM_MUSIC, 0);
+ }
+
+ /**
+ * @hide
+ * Checks whether the current audio focus is exclusive.
+ * @return true if the top of the audio focus stack requested focus
+ * with {@link #AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE}
+ */
+ public boolean isAudioFocusExclusive() {
+ final IAudioService service = getService();
+ try {
+ return service.getCurrentAudioFocus() == AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE;
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Return a new audio session identifier not associated with any player or effect.
+ * An audio session identifier is a system wide unique identifier for a set of audio streams
+ * (one or more mixed together).
+ * <p>The primary use of the audio session ID is to associate audio effects to audio players,
+ * such as {@link MediaPlayer} or {@link AudioTrack}: all audio effects sharing the same audio
+ * session ID will be applied to the mixed audio content of the players that share the same
+ * audio session.
+ * <p>This method can for instance be used when creating one of the
+ * {@link android.media.audiofx.AudioEffect} objects to define the audio session of the effect,
+ * or to specify a session for a speech synthesis utterance
+ * in {@link android.speech.tts.TextToSpeech.Engine}.
+ * @return a new unclaimed and unused audio session identifier, or {@link #ERROR} when the
+ * system failed to generate a new session, a condition in which audio playback or recording
+ * will subsequently fail as well.
+ */
+ public int generateAudioSessionId() {
+ int session = AudioSystem.newAudioSessionId();
+ if (session > 0) {
+ return session;
+ } else {
+ Log.e(TAG, "Failure to generate a new audio session ID");
+ return ERROR;
+ }
+ }
+
+ /**
+ * A special audio session ID to indicate that the audio session ID isn't known and the
+ * framework should generate a new value. This can be used when building a new
+ * {@link AudioTrack} instance with
+ * {@link AudioTrack#AudioTrack(AudioAttributes, AudioFormat, int, int, int)}.
+ */
+ public static final int AUDIO_SESSION_ID_GENERATE = AudioSystem.AUDIO_SESSION_ALLOCATE;
+
+
+ /*
+ * Sets a generic audio configuration parameter. The use of these parameters
+ * are platform dependant, see libaudio
+ *
+ * ** Temporary interface - DO NOT USE
+ *
+ * TODO: Replace with a more generic key:value get/set mechanism
+ *
+ * param key name of parameter to set. Must not be null.
+ * param value value of parameter. Must not be null.
+ */
+ /**
+ * @hide
+ * @deprecated Use {@link #setParameters(String)} instead
+ */
+ @Deprecated public void setParameter(String key, String value) {
+ setParameters(key+"="+value);
+ }
+
+ /**
+ * Sets a variable number of parameter values to audio hardware.
+ *
+ * @param keyValuePairs list of parameters key value pairs in the form:
+ * key1=value1;key2=value2;...
+ *
+ */
+ public void setParameters(String keyValuePairs) {
+ AudioSystem.setParameters(keyValuePairs);
+ }
+
+ /**
+ * Gets a variable number of parameter values from audio hardware.
+ *
+ * @param keys list of parameters
+ * @return list of parameters key value pairs in the form:
+ * key1=value1;key2=value2;...
+ */
+ public String getParameters(String keys) {
+ return AudioSystem.getParameters(keys);
+ }
+
+ /* Sound effect identifiers */
+ /**
+ * Keyboard and direction pad click sound
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_KEY_CLICK = 0;
+ /**
+ * Focus has moved up
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_FOCUS_NAVIGATION_UP = 1;
+ /**
+ * Focus has moved down
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_FOCUS_NAVIGATION_DOWN = 2;
+ /**
+ * Focus has moved left
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_FOCUS_NAVIGATION_LEFT = 3;
+ /**
+ * Focus has moved right
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_FOCUS_NAVIGATION_RIGHT = 4;
+ /**
+ * IME standard keypress sound
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_KEYPRESS_STANDARD = 5;
+ /**
+ * IME spacebar keypress sound
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_KEYPRESS_SPACEBAR = 6;
+ /**
+ * IME delete keypress sound
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_KEYPRESS_DELETE = 7;
+ /**
+ * IME return_keypress sound
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_KEYPRESS_RETURN = 8;
+
+ /**
+ * Invalid keypress sound
+ * @see #playSoundEffect(int)
+ */
+ public static final int FX_KEYPRESS_INVALID = 9;
+ /**
+ * @hide Number of sound effects
+ */
+ public static final int NUM_SOUND_EFFECTS = 10;
+
+ /**
+ * Plays a sound effect (Key clicks, lid open/close...)
+ * @param effectType The type of sound effect. One of
+ * {@link #FX_KEY_CLICK},
+ * {@link #FX_FOCUS_NAVIGATION_UP},
+ * {@link #FX_FOCUS_NAVIGATION_DOWN},
+ * {@link #FX_FOCUS_NAVIGATION_LEFT},
+ * {@link #FX_FOCUS_NAVIGATION_RIGHT},
+ * {@link #FX_KEYPRESS_STANDARD},
+ * {@link #FX_KEYPRESS_SPACEBAR},
+ * {@link #FX_KEYPRESS_DELETE},
+ * {@link #FX_KEYPRESS_RETURN},
+ * {@link #FX_KEYPRESS_INVALID},
+ * NOTE: This version uses the UI settings to determine
+ * whether sounds are heard or not.
+ */
+ public void playSoundEffect(int effectType) {
+ if (effectType < 0 || effectType >= NUM_SOUND_EFFECTS) {
+ return;
+ }
+
+ if (!querySoundEffectsEnabled(Process.myUserHandle().getIdentifier())) {
+ return;
+ }
+
+ final IAudioService service = getService();
+ try {
+ service.playSoundEffect(effectType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Plays a sound effect (Key clicks, lid open/close...)
+ * @param effectType The type of sound effect. One of
+ * {@link #FX_KEY_CLICK},
+ * {@link #FX_FOCUS_NAVIGATION_UP},
+ * {@link #FX_FOCUS_NAVIGATION_DOWN},
+ * {@link #FX_FOCUS_NAVIGATION_LEFT},
+ * {@link #FX_FOCUS_NAVIGATION_RIGHT},
+ * {@link #FX_KEYPRESS_STANDARD},
+ * {@link #FX_KEYPRESS_SPACEBAR},
+ * {@link #FX_KEYPRESS_DELETE},
+ * {@link #FX_KEYPRESS_RETURN},
+ * {@link #FX_KEYPRESS_INVALID},
+ * @param userId The current user to pull sound settings from
+ * NOTE: This version uses the UI settings to determine
+ * whether sounds are heard or not.
+ * @hide
+ */
+ public void playSoundEffect(int effectType, int userId) {
+ if (effectType < 0 || effectType >= NUM_SOUND_EFFECTS) {
+ return;
+ }
+
+ if (!querySoundEffectsEnabled(userId)) {
+ return;
+ }
+
+ final IAudioService service = getService();
+ try {
+ service.playSoundEffect(effectType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Plays a sound effect (Key clicks, lid open/close...)
+ * @param effectType The type of sound effect. One of
+ * {@link #FX_KEY_CLICK},
+ * {@link #FX_FOCUS_NAVIGATION_UP},
+ * {@link #FX_FOCUS_NAVIGATION_DOWN},
+ * {@link #FX_FOCUS_NAVIGATION_LEFT},
+ * {@link #FX_FOCUS_NAVIGATION_RIGHT},
+ * {@link #FX_KEYPRESS_STANDARD},
+ * {@link #FX_KEYPRESS_SPACEBAR},
+ * {@link #FX_KEYPRESS_DELETE},
+ * {@link #FX_KEYPRESS_RETURN},
+ * {@link #FX_KEYPRESS_INVALID},
+ * @param volume Sound effect volume.
+ * The volume value is a raw scalar so UI controls should be scaled logarithmically.
+ * If a volume of -1 is specified, the AudioManager.STREAM_MUSIC stream volume minus 3dB will be used.
+ * NOTE: This version is for applications that have their own
+ * settings panel for enabling and controlling volume.
+ */
+ public void playSoundEffect(int effectType, float volume) {
+ if (effectType < 0 || effectType >= NUM_SOUND_EFFECTS) {
+ return;
+ }
+
+ final IAudioService service = getService();
+ try {
+ service.playSoundEffectVolume(effectType, volume);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Settings has an in memory cache, so this is fast.
+ */
+ private boolean querySoundEffectsEnabled(int user) {
+ return Settings.System.getIntForUser(getContext().getContentResolver(),
+ Settings.System.SOUND_EFFECTS_ENABLED, 0, user) != 0;
+ }
+
+
+ /**
+ * Load Sound effects.
+ * This method must be called when sound effects are enabled.
+ */
+ public void loadSoundEffects() {
+ final IAudioService service = getService();
+ try {
+ service.loadSoundEffects();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Unload Sound effects.
+ * This method can be called to free some memory when
+ * sound effects are disabled.
+ */
+ public void unloadSoundEffects() {
+ final IAudioService service = getService();
+ try {
+ service.unloadSoundEffects();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Used to indicate no audio focus has been gained or lost, or requested.
+ */
+ public static final int AUDIOFOCUS_NONE = 0;
+
+ /**
+ * Used to indicate a gain of audio focus, or a request of audio focus, of unknown duration.
+ * @see OnAudioFocusChangeListener#onAudioFocusChange(int)
+ * @see #requestAudioFocus(OnAudioFocusChangeListener, int, int)
+ */
+ public static final int AUDIOFOCUS_GAIN = 1;
+ /**
+ * Used to indicate a temporary gain or request of audio focus, anticipated to last a short
+ * amount of time. Examples of temporary changes are the playback of driving directions, or an
+ * event notification.
+ * @see OnAudioFocusChangeListener#onAudioFocusChange(int)
+ * @see #requestAudioFocus(OnAudioFocusChangeListener, int, int)
+ */
+ public static final int AUDIOFOCUS_GAIN_TRANSIENT = 2;
+ /**
+ * Used to indicate a temporary request of audio focus, anticipated to last a short
+ * amount of time, and where it is acceptable for other audio applications to keep playing
+ * after having lowered their output level (also referred to as "ducking").
+ * Examples of temporary changes are the playback of driving directions where playback of music
+ * in the background is acceptable.
+ * @see OnAudioFocusChangeListener#onAudioFocusChange(int)
+ * @see #requestAudioFocus(OnAudioFocusChangeListener, int, int)
+ */
+ public static final int AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK = 3;
+ /**
+ * Used to indicate a temporary request of audio focus, anticipated to last a short
+ * amount of time, during which no other applications, or system components, should play
+ * anything. Examples of exclusive and transient audio focus requests are voice
+ * memo recording and speech recognition, during which the system shouldn't play any
+ * notifications, and media playback should have paused.
+ * @see #requestAudioFocus(OnAudioFocusChangeListener, int, int)
+ */
+ public static final int AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE = 4;
+ /**
+ * Used to indicate a loss of audio focus of unknown duration.
+ * @see OnAudioFocusChangeListener#onAudioFocusChange(int)
+ */
+ public static final int AUDIOFOCUS_LOSS = -1 * AUDIOFOCUS_GAIN;
+ /**
+ * Used to indicate a transient loss of audio focus.
+ * @see OnAudioFocusChangeListener#onAudioFocusChange(int)
+ */
+ public static final int AUDIOFOCUS_LOSS_TRANSIENT = -1 * AUDIOFOCUS_GAIN_TRANSIENT;
+ /**
+ * Used to indicate a transient loss of audio focus where the loser of the audio focus can
+ * lower its output volume if it wants to continue playing (also referred to as "ducking"), as
+ * the new focus owner doesn't require others to be silent.
+ * @see OnAudioFocusChangeListener#onAudioFocusChange(int)
+ */
+ public static final int AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK =
+ -1 * AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK;
+
+ /**
+ * Interface definition for a callback to be invoked when the audio focus of the system is
+ * updated.
+ */
+ public interface OnAudioFocusChangeListener {
+ /**
+ * Called on the listener to notify it the audio focus for this listener has been changed.
+ * The focusChange value indicates whether the focus was gained,
+ * whether the focus was lost, and whether that loss is transient, or whether the new focus
+ * holder will hold it for an unknown amount of time.
+ * When losing focus, listeners can use the focus change information to decide what
+ * behavior to adopt when losing focus. A music player could for instance elect to lower
+ * the volume of its music stream (duck) for transient focus losses, and pause otherwise.
+ * @param focusChange the type of focus change, one of {@link AudioManager#AUDIOFOCUS_GAIN},
+ * {@link AudioManager#AUDIOFOCUS_LOSS}, {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT}
+ * and {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}.
+ */
+ public void onAudioFocusChange(int focusChange);
+ }
+
+ /**
+ * Internal class to hold the AudioFocusRequest as well as the Handler for the callback
+ */
+ private static class FocusRequestInfo {
+ @NonNull final AudioFocusRequest mRequest;
+ @Nullable final Handler mHandler;
+ FocusRequestInfo(@NonNull AudioFocusRequest afr, @Nullable Handler handler) {
+ mRequest = afr;
+ mHandler = handler;
+ }
+ }
+
+ /**
+ * Map to convert focus event listener IDs, as used in the AudioService audio focus stack,
+ * to actual listener objects.
+ */
+ private final ConcurrentHashMap<String, FocusRequestInfo> mAudioFocusIdListenerMap =
+ new ConcurrentHashMap<String, FocusRequestInfo>();
+
+ private FocusRequestInfo findFocusRequestInfo(String id) {
+ return mAudioFocusIdListenerMap.get(id);
+ }
+
+ /**
+ * Handler for events (audio focus change, recording config change) coming from the
+ * audio service.
+ */
+ private final ServiceEventHandlerDelegate mServiceEventHandlerDelegate =
+ new ServiceEventHandlerDelegate(null);
+
+ /**
+ * Event types
+ */
+ private final static int MSSG_FOCUS_CHANGE = 0;
+ private final static int MSSG_RECORDING_CONFIG_CHANGE = 1;
+ private final static int MSSG_PLAYBACK_CONFIG_CHANGE = 2;
+
+ /**
+ * Helper class to handle the forwarding of audio service events to the appropriate listener
+ */
+ private class ServiceEventHandlerDelegate {
+ private final Handler mHandler;
+
+ ServiceEventHandlerDelegate(Handler handler) {
+ Looper looper;
+ if (handler == null) {
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+ } else {
+ looper = handler.getLooper();
+ }
+
+ if (looper != null) {
+ // implement the event handler delegate to receive events from audio service
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSSG_FOCUS_CHANGE: {
+ final FocusRequestInfo fri = findFocusRequestInfo((String)msg.obj);
+ if (fri != null) {
+ final OnAudioFocusChangeListener listener =
+ fri.mRequest.getOnAudioFocusChangeListener();
+ if (listener != null) {
+ Log.d(TAG, "dispatching onAudioFocusChange("
+ + msg.arg1 + ") to " + msg.obj);
+ listener.onAudioFocusChange(msg.arg1);
+ }
+ }
+ } break;
+ case MSSG_RECORDING_CONFIG_CHANGE: {
+ final RecordConfigChangeCallbackData cbData =
+ (RecordConfigChangeCallbackData) msg.obj;
+ if (cbData.mCb != null) {
+ cbData.mCb.onRecordingConfigChanged(cbData.mConfigs);
+ }
+ } break;
+ case MSSG_PLAYBACK_CONFIG_CHANGE: {
+ final PlaybackConfigChangeCallbackData cbData =
+ (PlaybackConfigChangeCallbackData) msg.obj;
+ if (cbData.mCb != null) {
+ if (DEBUG) {
+ Log.d(TAG, "dispatching onPlaybackConfigChanged()");
+ }
+ cbData.mCb.onPlaybackConfigChanged(cbData.mConfigs);
+ }
+ } break;
+ default:
+ Log.e(TAG, "Unknown event " + msg.what);
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
+
+ private final IAudioFocusDispatcher mAudioFocusDispatcher = new IAudioFocusDispatcher.Stub() {
+ @Override
+ public void dispatchAudioFocusChange(int focusChange, String id) {
+ final FocusRequestInfo fri = findFocusRequestInfo(id);
+ if (fri != null) {
+ final OnAudioFocusChangeListener listener =
+ fri.mRequest.getOnAudioFocusChangeListener();
+ if (listener != null) {
+ final Handler h = (fri.mHandler == null) ?
+ mServiceEventHandlerDelegate.getHandler() : fri.mHandler;
+ final Message m = h.obtainMessage(
+ MSSG_FOCUS_CHANGE/*what*/, focusChange/*arg1*/, 0/*arg2 ignored*/,
+ id/*obj*/);
+ h.sendMessage(m);
+ }
+ }
+ }
+ };
+
+ private String getIdForAudioFocusListener(OnAudioFocusChangeListener l) {
+ if (l == null) {
+ return new String(this.toString());
+ } else {
+ return new String(this.toString() + l.toString());
+ }
+ }
+
+ /**
+ * @hide
+ * Registers a listener to be called when audio focus changes and keeps track of the associated
+ * focus request (including Handler to use for the listener).
+ * @param afr the full request parameters
+ */
+ public void registerAudioFocusRequest(@NonNull AudioFocusRequest afr) {
+ final Handler h = afr.getOnAudioFocusChangeListenerHandler();
+ final FocusRequestInfo fri = new FocusRequestInfo(afr, (h == null) ? null :
+ new ServiceEventHandlerDelegate(h).getHandler());
+ final String key = getIdForAudioFocusListener(afr.getOnAudioFocusChangeListener());
+ mAudioFocusIdListenerMap.put(key, fri);
+ }
+
+ /**
+ * @hide
+ * Causes the specified listener to not be called anymore when focus is gained or lost.
+ * @param l the listener to unregister.
+ */
+ public void unregisterAudioFocusRequest(OnAudioFocusChangeListener l) {
+ // remove locally
+ mAudioFocusIdListenerMap.remove(getIdForAudioFocusListener(l));
+ }
+
+
+ /**
+ * A failed focus change request.
+ */
+ public static final int AUDIOFOCUS_REQUEST_FAILED = 0;
+ /**
+ * A successful focus change request.
+ */
+ public static final int AUDIOFOCUS_REQUEST_GRANTED = 1;
+ /**
+ * A focus change request whose granting is delayed: the request was successful, but the
+ * requester will only be granted audio focus once the condition that prevented immediate
+ * granting has ended.
+ * See {@link #requestAudioFocus(AudioFocusRequest)} and
+ * {@link AudioFocusRequest.Builder#setAcceptsDelayedFocusGain(boolean)}
+ */
+ public static final int AUDIOFOCUS_REQUEST_DELAYED = 2;
+
+
+ /**
+ * Request audio focus.
+ * Send a request to obtain the audio focus
+ * @param l the listener to be notified of audio focus changes
+ * @param streamType the main audio stream type affected by the focus request
+ * @param durationHint use {@link #AUDIOFOCUS_GAIN_TRANSIENT} to indicate this focus request
+ * is temporary, and focus will be abandonned shortly. Examples of transient requests are
+ * for the playback of driving directions, or notifications sounds.
+ * Use {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} to indicate also that it's ok for
+ * the previous focus owner to keep playing if it ducks its audio output.
+ * Alternatively use {@link #AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE} for a temporary request
+ * that benefits from the system not playing disruptive sounds like notifications, for
+ * usecases such as voice memo recording, or speech recognition.
+ * Use {@link #AUDIOFOCUS_GAIN} for a focus request of unknown duration such
+ * as the playback of a song or a video.
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED} or {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ * @deprecated use {@link #requestAudioFocus(AudioFocusRequest)}
+ */
+ public int requestAudioFocus(OnAudioFocusChangeListener l, int streamType, int durationHint) {
+ PlayerBase.deprecateStreamTypeForPlayback(streamType,
+ "AudioManager", "requestAudioFocus()");
+ int status = AUDIOFOCUS_REQUEST_FAILED;
+
+ try {
+ // status is guaranteed to be either AUDIOFOCUS_REQUEST_FAILED or
+ // AUDIOFOCUS_REQUEST_GRANTED as focus is requested without the
+ // AUDIOFOCUS_FLAG_DELAY_OK flag
+ status = requestAudioFocus(l,
+ new AudioAttributes.Builder()
+ .setInternalLegacyStreamType(streamType).build(),
+ durationHint,
+ 0 /* flags, legacy behavior */);
+ } catch (IllegalArgumentException e) {
+ Log.e(TAG, "Audio focus request denied due to ", e);
+ }
+
+ return status;
+ }
+
+ // when adding new flags, add them to the relevant AUDIOFOCUS_FLAGS_APPS or SYSTEM masks
+ /**
+ * @hide
+ * Use this flag when requesting audio focus to indicate it is ok for the requester to not be
+ * granted audio focus immediately (as indicated by {@link #AUDIOFOCUS_REQUEST_DELAYED}) when
+ * the system is in a state where focus cannot change, but be granted focus later when
+ * this condition ends.
+ */
+ @SystemApi
+ public static final int AUDIOFOCUS_FLAG_DELAY_OK = 0x1 << 0;
+ /**
+ * @hide
+ * Use this flag when requesting audio focus to indicate that the requester
+ * will pause its media playback (if applicable) when losing audio focus with
+ * {@link #AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}, rather than ducking.
+ * <br>On some platforms, the ducking may be handled without the application being aware of it
+ * (i.e. it will not transiently lose focus). For applications that for instance play spoken
+ * content, such as audio book or podcast players, ducking may never be acceptable, and will
+ * thus always pause. This flag enables them to be declared as such whenever they request focus.
+ */
+ @SystemApi
+ public static final int AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS = 0x1 << 1;
+ /**
+ * @hide
+ * Use this flag to lock audio focus so granting is temporarily disabled.
+ * <br>This flag can only be used by owners of a registered
+ * {@link android.media.audiopolicy.AudioPolicy} in
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int, AudioPolicy)}
+ */
+ @SystemApi
+ public static final int AUDIOFOCUS_FLAG_LOCK = 0x1 << 2;
+ /** @hide */
+ public static final int AUDIOFOCUS_FLAGS_APPS = AUDIOFOCUS_FLAG_DELAY_OK
+ | AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS;
+ /** @hide */
+ public static final int AUDIOFOCUS_FLAGS_SYSTEM = AUDIOFOCUS_FLAG_DELAY_OK
+ | AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS | AUDIOFOCUS_FLAG_LOCK;
+
+ /**
+ * Request audio focus.
+ * See the {@link AudioFocusRequest} for information about the options available to configure
+ * your request, and notification of focus gain and loss.
+ * @param focusRequest a {@link AudioFocusRequest} instance used to configure how focus is
+ * requested.
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED}, {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ * or {@link #AUDIOFOCUS_REQUEST_DELAYED}.
+ * <br>Note that the return value is never {@link #AUDIOFOCUS_REQUEST_DELAYED} when focus
+ * is requested without building the {@link AudioFocusRequest} with
+ * {@link AudioFocusRequest.Builder#setAcceptsDelayedFocusGain(boolean)} set to
+ * {@code true}.
+ * @throws NullPointerException if passed a null argument
+ */
+ public int requestAudioFocus(@NonNull AudioFocusRequest focusRequest) {
+ return requestAudioFocus(focusRequest, null /* no AudioPolicy*/);
+ }
+
+ /**
+ * Abandon audio focus. Causes the previous focus owner, if any, to receive focus.
+ * @param focusRequest the {@link AudioFocusRequest} that was used when requesting focus
+ * with {@link #requestAudioFocus(AudioFocusRequest)}.
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED} or {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ * @throws IllegalArgumentException if passed a null argument
+ */
+ public int abandonAudioFocusRequest(@NonNull AudioFocusRequest focusRequest) {
+ if (focusRequest == null) {
+ throw new IllegalArgumentException("Illegal null AudioFocusRequest");
+ }
+ return abandonAudioFocus(focusRequest.getOnAudioFocusChangeListener(),
+ focusRequest.getAudioAttributes());
+ }
+
+ /**
+ * @hide
+ * Request audio focus.
+ * Send a request to obtain the audio focus. This method differs from
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, int, int)} in that it can express
+ * that the requester accepts delayed grants of audio focus.
+ * @param l the listener to be notified of audio focus changes. It is not allowed to be null
+ * when the request is flagged with {@link #AUDIOFOCUS_FLAG_DELAY_OK}.
+ * @param requestAttributes non null {@link AudioAttributes} describing the main reason for
+ * requesting audio focus.
+ * @param durationHint use {@link #AUDIOFOCUS_GAIN_TRANSIENT} to indicate this focus request
+ * is temporary, and focus will be abandonned shortly. Examples of transient requests are
+ * for the playback of driving directions, or notifications sounds.
+ * Use {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} to indicate also that it's ok for
+ * the previous focus owner to keep playing if it ducks its audio output.
+ * Alternatively use {@link #AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE} for a temporary request
+ * that benefits from the system not playing disruptive sounds like notifications, for
+ * usecases such as voice memo recording, or speech recognition.
+ * Use {@link #AUDIOFOCUS_GAIN} for a focus request of unknown duration such
+ * as the playback of a song or a video.
+ * @param flags 0 or a combination of {link #AUDIOFOCUS_FLAG_DELAY_OK},
+ * {@link #AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS} and {@link #AUDIOFOCUS_FLAG_LOCK}.
+ * <br>Use 0 when not using any flags for the request, which behaves like
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, int, int)}, where either audio
+ * focus is granted immediately, or the grant request fails because the system is in a
+ * state where focus cannot change (e.g. a phone call).
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED}, {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ * or {@link #AUDIOFOCUS_REQUEST_DELAYED}.
+ * The return value is never {@link #AUDIOFOCUS_REQUEST_DELAYED} when focus is requested
+ * without the {@link #AUDIOFOCUS_FLAG_DELAY_OK} flag.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_PHONE_STATE)
+ public int requestAudioFocus(OnAudioFocusChangeListener l,
+ @NonNull AudioAttributes requestAttributes,
+ int durationHint,
+ int flags) throws IllegalArgumentException {
+ if (flags != (flags & AUDIOFOCUS_FLAGS_APPS)) {
+ throw new IllegalArgumentException("Invalid flags 0x"
+ + Integer.toHexString(flags).toUpperCase());
+ }
+ return requestAudioFocus(l, requestAttributes, durationHint,
+ flags & AUDIOFOCUS_FLAGS_APPS,
+ null /* no AudioPolicy*/);
+ }
+
+ /**
+ * @hide
+ * Request or lock audio focus.
+ * This method is to be used by system components that have registered an
+ * {@link android.media.audiopolicy.AudioPolicy} to request audio focus, but also to "lock" it
+ * so focus granting is temporarily disabled.
+ * @param l see the description of the same parameter in
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int)}
+ * @param requestAttributes non null {@link AudioAttributes} describing the main reason for
+ * requesting audio focus.
+ * @param durationHint see the description of the same parameter in
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int)}
+ * @param flags 0 or a combination of {link #AUDIOFOCUS_FLAG_DELAY_OK},
+ * {@link #AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS}, and {@link #AUDIOFOCUS_FLAG_LOCK}.
+ * <br>Use 0 when not using any flags for the request, which behaves like
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, int, int)}, where either audio
+ * focus is granted immediately, or the grant request fails because the system is in a
+ * state where focus cannot change (e.g. a phone call).
+ * @param ap a registered {@link android.media.audiopolicy.AudioPolicy} instance when locking
+ * focus, or null.
+ * @return see the description of the same return value in
+ * {@link #requestAudioFocus(OnAudioFocusChangeListener, AudioAttributes, int, int)}
+ * @throws IllegalArgumentException
+ * @deprecated use {@link #requestAudioFocus(AudioFocusRequest, AudioPolicy)}
+ */
+ @SystemApi
+ @RequiresPermission(anyOf= {
+ android.Manifest.permission.MODIFY_PHONE_STATE,
+ android.Manifest.permission.MODIFY_AUDIO_ROUTING
+ })
+ public int requestAudioFocus(OnAudioFocusChangeListener l,
+ @NonNull AudioAttributes requestAttributes,
+ int durationHint,
+ int flags,
+ AudioPolicy ap) throws IllegalArgumentException {
+ // parameter checking
+ if (requestAttributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes argument");
+ }
+ if (!AudioFocusRequest.isValidFocusGain(durationHint)) {
+ throw new IllegalArgumentException("Invalid duration hint");
+ }
+ if (flags != (flags & AUDIOFOCUS_FLAGS_SYSTEM)) {
+ throw new IllegalArgumentException("Illegal flags 0x"
+ + Integer.toHexString(flags).toUpperCase());
+ }
+ if (((flags & AUDIOFOCUS_FLAG_DELAY_OK) == AUDIOFOCUS_FLAG_DELAY_OK) && (l == null)) {
+ throw new IllegalArgumentException(
+ "Illegal null focus listener when flagged as accepting delayed focus grant");
+ }
+ if (((flags & AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS)
+ == AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS) && (l == null)) {
+ throw new IllegalArgumentException(
+ "Illegal null focus listener when flagged as pausing instead of ducking");
+ }
+ if (((flags & AUDIOFOCUS_FLAG_LOCK) == AUDIOFOCUS_FLAG_LOCK) && (ap == null)) {
+ throw new IllegalArgumentException(
+ "Illegal null audio policy when locking audio focus");
+ }
+
+ final AudioFocusRequest afr = new AudioFocusRequest.Builder(durationHint)
+ .setOnAudioFocusChangeListenerInt(l, null /* no Handler for this legacy API */)
+ .setAudioAttributes(requestAttributes)
+ .setAcceptsDelayedFocusGain((flags & AUDIOFOCUS_FLAG_DELAY_OK)
+ == AUDIOFOCUS_FLAG_DELAY_OK)
+ .setWillPauseWhenDucked((flags & AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS)
+ == AUDIOFOCUS_FLAG_PAUSES_ON_DUCKABLE_LOSS)
+ .setLocksFocus((flags & AUDIOFOCUS_FLAG_LOCK) == AUDIOFOCUS_FLAG_LOCK)
+ .build();
+ return requestAudioFocus(afr, ap);
+ }
+
+ /**
+ * @hide
+ * Request or lock audio focus.
+ * This method is to be used by system components that have registered an
+ * {@link android.media.audiopolicy.AudioPolicy} to request audio focus, but also to "lock" it
+ * so focus granting is temporarily disabled.
+ * @param afr see the description of the same parameter in
+ * {@link #requestAudioFocus(AudioFocusRequest)}
+ * @param ap a registered {@link android.media.audiopolicy.AudioPolicy} instance when locking
+ * focus, or null.
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED}, {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ * or {@link #AUDIOFOCUS_REQUEST_DELAYED}.
+ * @throws NullPointerException if the AudioFocusRequest is null
+ * @throws IllegalArgumentException when trying to lock focus without an AudioPolicy
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING)
+ public int requestAudioFocus(@NonNull AudioFocusRequest afr, @Nullable AudioPolicy ap) {
+ if (afr == null) {
+ throw new NullPointerException("Illegal null AudioFocusRequest");
+ }
+ // this can only be checked now, not during the creation of the AudioFocusRequest instance
+ if (afr.locksFocus() && ap == null) {
+ throw new IllegalArgumentException(
+ "Illegal null audio policy when locking audio focus");
+ }
+ registerAudioFocusRequest(afr);
+ final IAudioService service = getService();
+ final int status;
+ int sdk;
+ try {
+ sdk = getContext().getApplicationInfo().targetSdkVersion;
+ } catch (NullPointerException e) {
+ // some tests don't have a Context
+ sdk = Build.VERSION.SDK_INT;
+ }
+ try {
+ status = service.requestAudioFocus(afr.getAudioAttributes(),
+ afr.getFocusGain(), mICallBack,
+ mAudioFocusDispatcher,
+ getIdForAudioFocusListener(afr.getOnAudioFocusChangeListener()),
+ getContext().getOpPackageName() /* package name */, afr.getFlags(),
+ ap != null ? ap.cb() : null,
+ sdk);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ return status;
+ }
+
+ /**
+ * @hide
+ * Used internally by telephony package to request audio focus. Will cause the focus request
+ * to be associated with the "voice communication" identifier only used in AudioService
+ * to identify this use case.
+ * @param streamType use STREAM_RING for focus requests when ringing, VOICE_CALL for
+ * the establishment of the call
+ * @param durationHint the type of focus request. AUDIOFOCUS_GAIN_TRANSIENT is recommended so
+ * media applications resume after a call
+ */
+ public void requestAudioFocusForCall(int streamType, int durationHint) {
+ final IAudioService service = getService();
+ try {
+ service.requestAudioFocus(new AudioAttributes.Builder()
+ .setInternalLegacyStreamType(streamType).build(),
+ durationHint, mICallBack, null,
+ AudioSystem.IN_VOICE_COMM_FOCUS_ID,
+ getContext().getOpPackageName(),
+ AUDIOFOCUS_FLAG_LOCK,
+ null /* policy token */, 0 /* sdk n/a here*/);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * @hide
+ * Return the volume ramping time for a sound to be played after the given focus request,
+ * and to play a sound of the given attributes
+ * @param focusGain
+ * @param attr
+ * @return
+ */
+ public int getFocusRampTimeMs(int focusGain, AudioAttributes attr) {
+ final IAudioService service = getService();
+ try {
+ return service.getFocusRampTimeMs(focusGain, attr);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * @hide
+ * Notifies an application with a focus listener of gain or loss of audio focus.
+ * This method can only be used by owners of an {@link AudioPolicy} configured with
+ * {@link AudioPolicy.Builder#setIsAudioFocusPolicy(boolean)} set to true.
+ * @param afi the recipient of the focus change, that has previously requested audio focus, and
+ * that was received by the {@code AudioPolicy} through
+ * {@link AudioPolicy.AudioPolicyFocusListener#onAudioFocusRequest(AudioFocusInfo, int)}.
+ * @param focusChange one of focus gain types ({@link #AUDIOFOCUS_GAIN},
+ * {@link #AUDIOFOCUS_GAIN_TRANSIENT}, {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} or
+ * {@link #AUDIOFOCUS_GAIN_TRANSIENT_EXCLUSIVE})
+ * or one of the focus loss types ({@link AudioManager#AUDIOFOCUS_LOSS},
+ * {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT},
+ * or {@link AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}).
+ * <br>For the focus gain, the change type should be the same as the app requested.
+ * @param ap a valid registered {@link AudioPolicy} configured as a focus policy.
+ * @return {@link #AUDIOFOCUS_REQUEST_GRANTED} if the dispatch was successfully sent, or
+ * {@link #AUDIOFOCUS_REQUEST_FAILED} if the focus client didn't have a listener, or
+ * if there was an error sending the request.
+ * @throws NullPointerException if the {@link AudioFocusInfo} or {@link AudioPolicy} are null.
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING)
+ public int dispatchAudioFocusChange(@NonNull AudioFocusInfo afi, int focusChange,
+ @NonNull AudioPolicy ap) {
+ if (afi == null) {
+ throw new NullPointerException("Illegal null AudioFocusInfo");
+ }
+ if (ap == null) {
+ throw new NullPointerException("Illegal null AudioPolicy");
+ }
+ final IAudioService service = getService();
+ try {
+ return service.dispatchFocusChange(afi, focusChange, ap.cb());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * @hide
+ * Used internally by telephony package to abandon audio focus, typically after a call or
+ * when ringing ends and the call is rejected or not answered.
+ * Should match one or more calls to {@link #requestAudioFocusForCall(int, int)}.
+ */
+ public void abandonAudioFocusForCall() {
+ final IAudioService service = getService();
+ try {
+ service.abandonAudioFocus(null, AudioSystem.IN_VOICE_COMM_FOCUS_ID,
+ null /*AudioAttributes, legacy behavior*/, getContext().getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Abandon audio focus. Causes the previous focus owner, if any, to receive focus.
+ * @param l the listener with which focus was requested.
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED} or {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ * @deprecated use {@link #abandonAudioFocusRequest(AudioFocusRequest)}
+ */
+ public int abandonAudioFocus(OnAudioFocusChangeListener l) {
+ return abandonAudioFocus(l, null /*AudioAttributes, legacy behavior*/);
+ }
+
+ /**
+ * @hide
+ * Abandon audio focus. Causes the previous focus owner, if any, to receive focus.
+ * @param l the listener with which focus was requested.
+ * @param aa the {@link AudioAttributes} with which audio focus was requested
+ * @return {@link #AUDIOFOCUS_REQUEST_FAILED} or {@link #AUDIOFOCUS_REQUEST_GRANTED}
+ * @deprecated use {@link #abandonAudioFocusRequest(AudioFocusRequest)}
+ */
+ @SystemApi
+ @SuppressLint("Doclava125") // no permission enforcement, but only "undoes" what would have been
+ // done by a matching requestAudioFocus
+ public int abandonAudioFocus(OnAudioFocusChangeListener l, AudioAttributes aa) {
+ int status = AUDIOFOCUS_REQUEST_FAILED;
+ unregisterAudioFocusRequest(l);
+ final IAudioService service = getService();
+ try {
+ status = service.abandonAudioFocus(mAudioFocusDispatcher,
+ getIdForAudioFocusListener(l), aa, getContext().getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ return status;
+ }
+
+ //====================================================================
+ // Remote Control
+ /**
+ * Register a component to be the sole receiver of MEDIA_BUTTON intents.
+ * @param eventReceiver identifier of a {@link android.content.BroadcastReceiver}
+ * that will receive the media button intent. This broadcast receiver must be declared
+ * in the application manifest. The package of the component must match that of
+ * the context you're registering from.
+ * @deprecated Use {@link MediaSession#setMediaButtonReceiver(PendingIntent)} instead.
+ */
+ @Deprecated
+ public void registerMediaButtonEventReceiver(ComponentName eventReceiver) {
+ if (eventReceiver == null) {
+ return;
+ }
+ if (!eventReceiver.getPackageName().equals(getContext().getPackageName())) {
+ Log.e(TAG, "registerMediaButtonEventReceiver() error: " +
+ "receiver and context package names don't match");
+ return;
+ }
+ // construct a PendingIntent for the media button and register it
+ Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
+ // the associated intent will be handled by the component being registered
+ mediaButtonIntent.setComponent(eventReceiver);
+ PendingIntent pi = PendingIntent.getBroadcast(getContext(),
+ 0/*requestCode, ignored*/, mediaButtonIntent, 0/*flags*/);
+ registerMediaButtonIntent(pi, eventReceiver);
+ }
+
+ /**
+ * Register a component to be the sole receiver of MEDIA_BUTTON intents. This is like
+ * {@link #registerMediaButtonEventReceiver(android.content.ComponentName)}, but allows
+ * the buttons to go to any PendingIntent. Note that you should only use this form if
+ * you know you will continue running for the full time until unregistering the
+ * PendingIntent.
+ * @param eventReceiver target that will receive media button intents. The PendingIntent
+ * will be sent an {@link Intent#ACTION_MEDIA_BUTTON} event when a media button action
+ * occurs, with {@link Intent#EXTRA_KEY_EVENT} added and holding the key code of the
+ * media button that was pressed.
+ * @deprecated Use {@link MediaSession#setMediaButtonReceiver(PendingIntent)} instead.
+ */
+ @Deprecated
+ public void registerMediaButtonEventReceiver(PendingIntent eventReceiver) {
+ if (eventReceiver == null) {
+ return;
+ }
+ registerMediaButtonIntent(eventReceiver, null);
+ }
+
+ /**
+ * @hide
+ * no-op if (pi == null) or (eventReceiver == null)
+ */
+ public void registerMediaButtonIntent(PendingIntent pi, ComponentName eventReceiver) {
+ if (pi == null) {
+ Log.e(TAG, "Cannot call registerMediaButtonIntent() with a null parameter");
+ return;
+ }
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.addMediaButtonListener(pi, eventReceiver, getContext());
+ }
+
+ /**
+ * Unregister the receiver of MEDIA_BUTTON intents.
+ * @param eventReceiver identifier of a {@link android.content.BroadcastReceiver}
+ * that was registered with {@link #registerMediaButtonEventReceiver(ComponentName)}.
+ * @deprecated Use {@link MediaSession} instead.
+ */
+ @Deprecated
+ public void unregisterMediaButtonEventReceiver(ComponentName eventReceiver) {
+ if (eventReceiver == null) {
+ return;
+ }
+ // construct a PendingIntent for the media button and unregister it
+ Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
+ // the associated intent will be handled by the component being registered
+ mediaButtonIntent.setComponent(eventReceiver);
+ PendingIntent pi = PendingIntent.getBroadcast(getContext(),
+ 0/*requestCode, ignored*/, mediaButtonIntent, 0/*flags*/);
+ unregisterMediaButtonIntent(pi);
+ }
+
+ /**
+ * Unregister the receiver of MEDIA_BUTTON intents.
+ * @param eventReceiver same PendingIntent that was registed with
+ * {@link #registerMediaButtonEventReceiver(PendingIntent)}.
+ * @deprecated Use {@link MediaSession} instead.
+ */
+ @Deprecated
+ public void unregisterMediaButtonEventReceiver(PendingIntent eventReceiver) {
+ if (eventReceiver == null) {
+ return;
+ }
+ unregisterMediaButtonIntent(eventReceiver);
+ }
+
+ /**
+ * @hide
+ */
+ public void unregisterMediaButtonIntent(PendingIntent pi) {
+ MediaSessionLegacyHelper helper = MediaSessionLegacyHelper.getHelper(getContext());
+ helper.removeMediaButtonListener(pi);
+ }
+
+ /**
+ * Registers the remote control client for providing information to display on the remote
+ * controls.
+ * @param rcClient The remote control client from which remote controls will receive
+ * information to display.
+ * @see RemoteControlClient
+ * @deprecated Use {@link MediaSession} instead.
+ */
+ @Deprecated
+ public void registerRemoteControlClient(RemoteControlClient rcClient) {
+ if ((rcClient == null) || (rcClient.getRcMediaIntent() == null)) {
+ return;
+ }
+ rcClient.registerWithSession(MediaSessionLegacyHelper.getHelper(getContext()));
+ }
+
+ /**
+ * Unregisters the remote control client that was providing information to display on the
+ * remote controls.
+ * @param rcClient The remote control client to unregister.
+ * @see #registerRemoteControlClient(RemoteControlClient)
+ * @deprecated Use {@link MediaSession} instead.
+ */
+ @Deprecated
+ public void unregisterRemoteControlClient(RemoteControlClient rcClient) {
+ if ((rcClient == null) || (rcClient.getRcMediaIntent() == null)) {
+ return;
+ }
+ rcClient.unregisterWithSession(MediaSessionLegacyHelper.getHelper(getContext()));
+ }
+
+ /**
+ * Registers a {@link RemoteController} instance for it to receive media
+ * metadata updates and playback state information from applications using
+ * {@link RemoteControlClient}, and control their playback.
+ * <p>
+ * Registration requires the {@link RemoteController.OnClientUpdateListener} listener to be
+ * one of the enabled notification listeners (see
+ * {@link android.service.notification.NotificationListenerService}).
+ *
+ * @param rctlr the object to register.
+ * @return true if the {@link RemoteController} was successfully registered,
+ * false if an error occurred, due to an internal system error, or
+ * insufficient permissions.
+ * @deprecated Use
+ * {@link MediaSessionManager#addOnActiveSessionsChangedListener(android.media.session.MediaSessionManager.OnActiveSessionsChangedListener, ComponentName)}
+ * and {@link MediaController} instead.
+ */
+ @Deprecated
+ public boolean registerRemoteController(RemoteController rctlr) {
+ if (rctlr == null) {
+ return false;
+ }
+ rctlr.startListeningToSessions();
+ return true;
+ }
+
+ /**
+ * Unregisters a {@link RemoteController}, causing it to no longer receive
+ * media metadata and playback state information, and no longer be capable
+ * of controlling playback.
+ *
+ * @param rctlr the object to unregister.
+ * @deprecated Use
+ * {@link MediaSessionManager#removeOnActiveSessionsChangedListener(android.media.session.MediaSessionManager.OnActiveSessionsChangedListener)}
+ * instead.
+ */
+ @Deprecated
+ public void unregisterRemoteController(RemoteController rctlr) {
+ if (rctlr == null) {
+ return;
+ }
+ rctlr.stopListeningToSessions();
+ }
+
+
+ //====================================================================
+ // Audio policy
+ /**
+ * @hide
+ * Register the given {@link AudioPolicy}.
+ * This call is synchronous and blocks until the registration process successfully completed
+ * or failed to complete.
+ * @param policy the non-null {@link AudioPolicy} to register.
+ * @return {@link #ERROR} if there was an error communicating with the registration service
+ * or if the user doesn't have the required
+ * {@link android.Manifest.permission#MODIFY_AUDIO_ROUTING} permission,
+ * {@link #SUCCESS} otherwise.
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING)
+ public int registerAudioPolicy(@NonNull AudioPolicy policy) {
+ if (policy == null) {
+ throw new IllegalArgumentException("Illegal null AudioPolicy argument");
+ }
+ final IAudioService service = getService();
+ try {
+ String regId = service.registerAudioPolicy(policy.getConfig(), policy.cb(),
+ policy.hasFocusListener(), policy.isFocusPolicy());
+ if (regId == null) {
+ return ERROR;
+ } else {
+ policy.setRegistration(regId);
+ }
+ // successful registration
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ return SUCCESS;
+ }
+
+ /**
+ * @hide
+ * @param policy the non-null {@link AudioPolicy} to unregister.
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_AUDIO_ROUTING)
+ public void unregisterAudioPolicyAsync(@NonNull AudioPolicy policy) {
+ if (policy == null) {
+ throw new IllegalArgumentException("Illegal null AudioPolicy argument");
+ }
+ final IAudioService service = getService();
+ try {
+ service.unregisterAudioPolicyAsync(policy.cb());
+ policy.setRegistration(null);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ //====================================================================
+ // Notification of playback activity & playback configuration
+ /**
+ * Interface for receiving update notifications about the playback activity on the system.
+ * Extend this abstract class and register it with
+ * {@link AudioManager#registerAudioPlaybackCallback(AudioPlaybackCallback, Handler)}
+ * to be notified.
+ * Use {@link AudioManager#getActivePlaybackConfigurations()} to query the current
+ * configuration.
+ * @see AudioPlaybackConfiguration
+ */
+ public static abstract class AudioPlaybackCallback {
+ /**
+ * Called whenever the playback activity and configuration has changed.
+ * @param configs list containing the results of
+ * {@link AudioManager#getActivePlaybackConfigurations()}.
+ */
+ public void onPlaybackConfigChanged(List<AudioPlaybackConfiguration> configs) {}
+ }
+
+ private static class AudioPlaybackCallbackInfo {
+ final AudioPlaybackCallback mCb;
+ final Handler mHandler;
+ AudioPlaybackCallbackInfo(AudioPlaybackCallback cb, Handler handler) {
+ mCb = cb;
+ mHandler = handler;
+ }
+ }
+
+ private final static class PlaybackConfigChangeCallbackData {
+ final AudioPlaybackCallback mCb;
+ final List<AudioPlaybackConfiguration> mConfigs;
+
+ PlaybackConfigChangeCallbackData(AudioPlaybackCallback cb,
+ List<AudioPlaybackConfiguration> configs) {
+ mCb = cb;
+ mConfigs = configs;
+ }
+ }
+
+ /**
+ * Register a callback to be notified of audio playback changes through
+ * {@link AudioPlaybackCallback}
+ * @param cb non-null callback to register
+ * @param handler the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ public void registerAudioPlaybackCallback(@NonNull AudioPlaybackCallback cb, Handler handler)
+ {
+ if (cb == null) {
+ throw new IllegalArgumentException("Illegal null AudioPlaybackCallback argument");
+ }
+
+ synchronized(mPlaybackCallbackLock) {
+ // lazy initialization of the list of playback callbacks
+ if (mPlaybackCallbackList == null) {
+ mPlaybackCallbackList = new ArrayList<AudioPlaybackCallbackInfo>();
+ }
+ final int oldCbCount = mPlaybackCallbackList.size();
+ if (!hasPlaybackCallback_sync(cb)) {
+ mPlaybackCallbackList.add(new AudioPlaybackCallbackInfo(cb,
+ new ServiceEventHandlerDelegate(handler).getHandler()));
+ final int newCbCount = mPlaybackCallbackList.size();
+ if ((oldCbCount == 0) && (newCbCount > 0)) {
+ // register binder for callbacks
+ try {
+ getService().registerPlaybackCallback(mPlayCb);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+ } else {
+ Log.w(TAG, "attempt to call registerAudioPlaybackCallback() on a previously"
+ + "registered callback");
+ }
+ }
+ }
+
+ /**
+ * Unregister an audio playback callback previously registered with
+ * {@link #registerAudioPlaybackCallback(AudioPlaybackCallback, Handler)}.
+ * @param cb non-null callback to unregister
+ */
+ public void unregisterAudioPlaybackCallback(@NonNull AudioPlaybackCallback cb) {
+ if (cb == null) {
+ throw new IllegalArgumentException("Illegal null AudioPlaybackCallback argument");
+ }
+ synchronized(mPlaybackCallbackLock) {
+ if (mPlaybackCallbackList == null) {
+ Log.w(TAG, "attempt to call unregisterAudioPlaybackCallback() on a callback"
+ + " that was never registered");
+ return;
+ }
+ final int oldCbCount = mPlaybackCallbackList.size();
+ if (removePlaybackCallback_sync(cb)) {
+ final int newCbCount = mPlaybackCallbackList.size();
+ if ((oldCbCount > 0) && (newCbCount == 0)) {
+ // unregister binder for callbacks
+ try {
+ getService().unregisterPlaybackCallback(mPlayCb);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+ } else {
+ Log.w(TAG, "attempt to call unregisterAudioPlaybackCallback() on a callback"
+ + " already unregistered or never registered");
+ }
+ }
+ }
+
+ /**
+ * Returns the current active audio playback configurations of the device
+ * @return a non-null list of playback configurations. An empty list indicates there is no
+ * playback active when queried.
+ * @see AudioPlaybackConfiguration
+ */
+ public @NonNull List<AudioPlaybackConfiguration> getActivePlaybackConfigurations() {
+ final IAudioService service = getService();
+ try {
+ return service.getActivePlaybackConfigurations();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * All operations on this list are sync'd on mPlaybackCallbackLock.
+ * List is lazy-initialized in
+ * {@link #registerAudioPlaybackCallback(AudioPlaybackCallback, Handler)}.
+ * List can be null.
+ */
+ private List<AudioPlaybackCallbackInfo> mPlaybackCallbackList;
+ private final Object mPlaybackCallbackLock = new Object();
+
+ /**
+ * Must be called synchronized on mPlaybackCallbackLock
+ */
+ private boolean hasPlaybackCallback_sync(@NonNull AudioPlaybackCallback cb) {
+ if (mPlaybackCallbackList != null) {
+ for (int i=0 ; i < mPlaybackCallbackList.size() ; i++) {
+ if (cb.equals(mPlaybackCallbackList.get(i).mCb)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Must be called synchronized on mPlaybackCallbackLock
+ */
+ private boolean removePlaybackCallback_sync(@NonNull AudioPlaybackCallback cb) {
+ if (mPlaybackCallbackList != null) {
+ for (int i=0 ; i < mPlaybackCallbackList.size() ; i++) {
+ if (cb.equals(mPlaybackCallbackList.get(i).mCb)) {
+ mPlaybackCallbackList.remove(i);
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ private final IPlaybackConfigDispatcher mPlayCb = new IPlaybackConfigDispatcher.Stub() {
+ @Override
+ public void dispatchPlaybackConfigChange(List<AudioPlaybackConfiguration> configs) {
+ synchronized(mPlaybackCallbackLock) {
+ if (mPlaybackCallbackList != null) {
+ for (int i=0 ; i < mPlaybackCallbackList.size() ; i++) {
+ final AudioPlaybackCallbackInfo arci = mPlaybackCallbackList.get(i);
+ if (arci.mHandler != null) {
+ final Message m = arci.mHandler.obtainMessage(
+ MSSG_PLAYBACK_CONFIG_CHANGE/*what*/,
+ new PlaybackConfigChangeCallbackData(arci.mCb, configs)/*obj*/);
+ arci.mHandler.sendMessage(m);
+ }
+ }
+ }
+ }
+ }
+
+ };
+
+ //====================================================================
+ // Notification of recording activity & recording configuration
+ /**
+ * Interface for receiving update notifications about the recording configuration. Extend
+ * this abstract class and register it with
+ * {@link AudioManager#registerAudioRecordingCallback(AudioRecordingCallback, Handler)}
+ * to be notified.
+ * Use {@link AudioManager#getActiveRecordingConfigurations()} to query the current
+ * configuration.
+ * @see AudioRecordingConfiguration
+ */
+ public static abstract class AudioRecordingCallback {
+ /**
+ * Called whenever the device recording configuration has changed.
+ * @param configs list containing the results of
+ * {@link AudioManager#getActiveRecordingConfigurations()}.
+ */
+ public void onRecordingConfigChanged(List<AudioRecordingConfiguration> configs) {}
+ }
+
+ private static class AudioRecordingCallbackInfo {
+ final AudioRecordingCallback mCb;
+ final Handler mHandler;
+ AudioRecordingCallbackInfo(AudioRecordingCallback cb, Handler handler) {
+ mCb = cb;
+ mHandler = handler;
+ }
+ }
+
+ private final static class RecordConfigChangeCallbackData {
+ final AudioRecordingCallback mCb;
+ final List<AudioRecordingConfiguration> mConfigs;
+
+ RecordConfigChangeCallbackData(AudioRecordingCallback cb,
+ List<AudioRecordingConfiguration> configs) {
+ mCb = cb;
+ mConfigs = configs;
+ }
+ }
+
+ /**
+ * Register a callback to be notified of audio recording changes through
+ * {@link AudioRecordingCallback}
+ * @param cb non-null callback to register
+ * @param handler the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ public void registerAudioRecordingCallback(@NonNull AudioRecordingCallback cb, Handler handler)
+ {
+ if (cb == null) {
+ throw new IllegalArgumentException("Illegal null AudioRecordingCallback argument");
+ }
+
+ synchronized(mRecordCallbackLock) {
+ // lazy initialization of the list of recording callbacks
+ if (mRecordCallbackList == null) {
+ mRecordCallbackList = new ArrayList<AudioRecordingCallbackInfo>();
+ }
+ final int oldCbCount = mRecordCallbackList.size();
+ if (!hasRecordCallback_sync(cb)) {
+ mRecordCallbackList.add(new AudioRecordingCallbackInfo(cb,
+ new ServiceEventHandlerDelegate(handler).getHandler()));
+ final int newCbCount = mRecordCallbackList.size();
+ if ((oldCbCount == 0) && (newCbCount > 0)) {
+ // register binder for callbacks
+ final IAudioService service = getService();
+ try {
+ service.registerRecordingCallback(mRecCb);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+ } else {
+ Log.w(TAG, "attempt to call registerAudioRecordingCallback() on a previously"
+ + "registered callback");
+ }
+ }
+ }
+
+ /**
+ * Unregister an audio recording callback previously registered with
+ * {@link #registerAudioRecordingCallback(AudioRecordingCallback, Handler)}.
+ * @param cb non-null callback to unregister
+ */
+ public void unregisterAudioRecordingCallback(@NonNull AudioRecordingCallback cb) {
+ if (cb == null) {
+ throw new IllegalArgumentException("Illegal null AudioRecordingCallback argument");
+ }
+ synchronized(mRecordCallbackLock) {
+ if (mRecordCallbackList == null) {
+ return;
+ }
+ final int oldCbCount = mRecordCallbackList.size();
+ if (removeRecordCallback_sync(cb)) {
+ final int newCbCount = mRecordCallbackList.size();
+ if ((oldCbCount > 0) && (newCbCount == 0)) {
+ // unregister binder for callbacks
+ final IAudioService service = getService();
+ try {
+ service.unregisterRecordingCallback(mRecCb);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+ } else {
+ Log.w(TAG, "attempt to call unregisterAudioRecordingCallback() on a callback"
+ + " already unregistered or never registered");
+ }
+ }
+ }
+
+ /**
+ * Returns the current active audio recording configurations of the device.
+ * @return a non-null list of recording configurations. An empty list indicates there is
+ * no recording active when queried.
+ * @see AudioRecordingConfiguration
+ */
+ public @NonNull List<AudioRecordingConfiguration> getActiveRecordingConfigurations() {
+ final IAudioService service = getService();
+ try {
+ return service.getActiveRecordingConfigurations();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * constants for the recording events, to keep in sync
+ * with frameworks/av/include/media/AudioPolicy.h
+ */
+ /** @hide */
+ public final static int RECORD_CONFIG_EVENT_START = 1;
+ /** @hide */
+ public final static int RECORD_CONFIG_EVENT_STOP = 0;
+
+ /**
+ * All operations on this list are sync'd on mRecordCallbackLock.
+ * List is lazy-initialized in
+ * {@link #registerAudioRecordingCallback(AudioRecordingCallback, Handler)}.
+ * List can be null.
+ */
+ private List<AudioRecordingCallbackInfo> mRecordCallbackList;
+ private final Object mRecordCallbackLock = new Object();
+
+ /**
+ * Must be called synchronized on mRecordCallbackLock
+ */
+ private boolean hasRecordCallback_sync(@NonNull AudioRecordingCallback cb) {
+ if (mRecordCallbackList != null) {
+ for (int i=0 ; i < mRecordCallbackList.size() ; i++) {
+ if (cb.equals(mRecordCallbackList.get(i).mCb)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Must be called synchronized on mRecordCallbackLock
+ */
+ private boolean removeRecordCallback_sync(@NonNull AudioRecordingCallback cb) {
+ if (mRecordCallbackList != null) {
+ for (int i=0 ; i < mRecordCallbackList.size() ; i++) {
+ if (cb.equals(mRecordCallbackList.get(i).mCb)) {
+ mRecordCallbackList.remove(i);
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ private final IRecordingConfigDispatcher mRecCb = new IRecordingConfigDispatcher.Stub() {
+ @Override
+ public void dispatchRecordingConfigChange(List<AudioRecordingConfiguration> configs) {
+ synchronized(mRecordCallbackLock) {
+ if (mRecordCallbackList != null) {
+ for (int i=0 ; i < mRecordCallbackList.size() ; i++) {
+ final AudioRecordingCallbackInfo arci = mRecordCallbackList.get(i);
+ if (arci.mHandler != null) {
+ final Message m = arci.mHandler.obtainMessage(
+ MSSG_RECORDING_CONFIG_CHANGE/*what*/,
+ new RecordConfigChangeCallbackData(arci.mCb, configs)/*obj*/);
+ arci.mHandler.sendMessage(m);
+ }
+ }
+ }
+ }
+ }
+
+ };
+
+ //=====================================================================
+
+ /**
+ * @hide
+ * Reload audio settings. This method is called by Settings backup
+ * agent when audio settings are restored and causes the AudioService
+ * to read and apply restored settings.
+ */
+ public void reloadAudioSettings() {
+ final IAudioService service = getService();
+ try {
+ service.reloadAudioSettings();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * @hide
+ * Notifies AudioService that it is connected to an A2DP device that supports absolute volume,
+ * so that AudioService can send volume change events to the A2DP device, rather than handling
+ * them.
+ */
+ public void avrcpSupportsAbsoluteVolume(String address, boolean support) {
+ final IAudioService service = getService();
+ try {
+ service.avrcpSupportsAbsoluteVolume(address, support);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * {@hide}
+ */
+ private final IBinder mICallBack = new Binder();
+
+ /**
+ * Checks whether the phone is in silent mode, with or without vibrate.
+ *
+ * @return true if phone is in silent mode, with or without vibrate.
+ *
+ * @see #getRingerMode()
+ *
+ * @hide pending API Council approval
+ */
+ public boolean isSilentMode() {
+ int ringerMode = getRingerMode();
+ boolean silentMode =
+ (ringerMode == RINGER_MODE_SILENT) ||
+ (ringerMode == RINGER_MODE_VIBRATE);
+ return silentMode;
+ }
+
+ // This section re-defines new output device constants from AudioSystem, because the AudioSystem
+ // class is not used by other parts of the framework, which instead use definitions and methods
+ // from AudioManager. AudioSystem is an internal class used by AudioManager and AudioService.
+
+ /** @hide
+ * The audio device code for representing "no device." */
+ public static final int DEVICE_NONE = AudioSystem.DEVICE_NONE;
+ /** @hide
+ * The audio output device code for the small speaker at the front of the device used
+ * when placing calls. Does not refer to an in-ear headphone without attached microphone,
+ * such as earbuds, earphones, or in-ear monitors (IEM). Those would be handled as a
+ * {@link #DEVICE_OUT_WIRED_HEADPHONE}.
+ */
+ public static final int DEVICE_OUT_EARPIECE = AudioSystem.DEVICE_OUT_EARPIECE;
+ /** @hide
+ * The audio output device code for the built-in speaker */
+ public static final int DEVICE_OUT_SPEAKER = AudioSystem.DEVICE_OUT_SPEAKER;
+ /** @hide
+ * The audio output device code for a wired headset with attached microphone */
+ public static final int DEVICE_OUT_WIRED_HEADSET = AudioSystem.DEVICE_OUT_WIRED_HEADSET;
+ /** @hide
+ * The audio output device code for a wired headphone without attached microphone */
+ public static final int DEVICE_OUT_WIRED_HEADPHONE = AudioSystem.DEVICE_OUT_WIRED_HEADPHONE;
+ /** @hide
+ * The audio output device code for a USB headphone with attached microphone */
+ public static final int DEVICE_OUT_USB_HEADSET = AudioSystem.DEVICE_OUT_USB_HEADSET;
+ /** @hide
+ * The audio output device code for generic Bluetooth SCO, for voice */
+ public static final int DEVICE_OUT_BLUETOOTH_SCO = AudioSystem.DEVICE_OUT_BLUETOOTH_SCO;
+ /** @hide
+ * The audio output device code for Bluetooth SCO Headset Profile (HSP) and
+ * Hands-Free Profile (HFP), for voice
+ */
+ public static final int DEVICE_OUT_BLUETOOTH_SCO_HEADSET =
+ AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_HEADSET;
+ /** @hide
+ * The audio output device code for Bluetooth SCO car audio, for voice */
+ public static final int DEVICE_OUT_BLUETOOTH_SCO_CARKIT =
+ AudioSystem.DEVICE_OUT_BLUETOOTH_SCO_CARKIT;
+ /** @hide
+ * The audio output device code for generic Bluetooth A2DP, for music */
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP = AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP;
+ /** @hide
+ * The audio output device code for Bluetooth A2DP headphones, for music */
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES =
+ AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES;
+ /** @hide
+ * The audio output device code for Bluetooth A2DP external speaker, for music */
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER =
+ AudioSystem.DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER;
+ /** @hide
+ * The audio output device code for S/PDIF (legacy) or HDMI
+ * Deprecated: replaced by {@link #DEVICE_OUT_HDMI} */
+ public static final int DEVICE_OUT_AUX_DIGITAL = AudioSystem.DEVICE_OUT_AUX_DIGITAL;
+ /** @hide
+ * The audio output device code for HDMI */
+ public static final int DEVICE_OUT_HDMI = AudioSystem.DEVICE_OUT_HDMI;
+ /** @hide
+ * The audio output device code for an analog wired headset attached via a
+ * docking station
+ */
+ public static final int DEVICE_OUT_ANLG_DOCK_HEADSET = AudioSystem.DEVICE_OUT_ANLG_DOCK_HEADSET;
+ /** @hide
+ * The audio output device code for a digital wired headset attached via a
+ * docking station
+ */
+ public static final int DEVICE_OUT_DGTL_DOCK_HEADSET = AudioSystem.DEVICE_OUT_DGTL_DOCK_HEADSET;
+ /** @hide
+ * The audio output device code for a USB audio accessory. The accessory is in USB host
+ * mode and the Android device in USB device mode
+ */
+ public static final int DEVICE_OUT_USB_ACCESSORY = AudioSystem.DEVICE_OUT_USB_ACCESSORY;
+ /** @hide
+ * The audio output device code for a USB audio device. The device is in USB device
+ * mode and the Android device in USB host mode
+ */
+ public static final int DEVICE_OUT_USB_DEVICE = AudioSystem.DEVICE_OUT_USB_DEVICE;
+ /** @hide
+ * The audio output device code for projection output.
+ */
+ public static final int DEVICE_OUT_REMOTE_SUBMIX = AudioSystem.DEVICE_OUT_REMOTE_SUBMIX;
+ /** @hide
+ * The audio output device code the telephony voice TX path.
+ */
+ public static final int DEVICE_OUT_TELEPHONY_TX = AudioSystem.DEVICE_OUT_TELEPHONY_TX;
+ /** @hide
+ * The audio output device code for an analog jack with line impedance detected.
+ */
+ public static final int DEVICE_OUT_LINE = AudioSystem.DEVICE_OUT_LINE;
+ /** @hide
+ * The audio output device code for HDMI Audio Return Channel.
+ */
+ public static final int DEVICE_OUT_HDMI_ARC = AudioSystem.DEVICE_OUT_HDMI_ARC;
+ /** @hide
+ * The audio output device code for S/PDIF digital connection.
+ */
+ public static final int DEVICE_OUT_SPDIF = AudioSystem.DEVICE_OUT_SPDIF;
+ /** @hide
+ * The audio output device code for built-in FM transmitter.
+ */
+ public static final int DEVICE_OUT_FM = AudioSystem.DEVICE_OUT_FM;
+ /** @hide
+ * This is not used as a returned value from {@link #getDevicesForStream}, but could be
+ * used in the future in a set method to select whatever default device is chosen by the
+ * platform-specific implementation.
+ */
+ public static final int DEVICE_OUT_DEFAULT = AudioSystem.DEVICE_OUT_DEFAULT;
+
+ /** @hide
+ * The audio input device code for default built-in microphone
+ */
+ public static final int DEVICE_IN_BUILTIN_MIC = AudioSystem.DEVICE_IN_BUILTIN_MIC;
+ /** @hide
+ * The audio input device code for a Bluetooth SCO headset
+ */
+ public static final int DEVICE_IN_BLUETOOTH_SCO_HEADSET =
+ AudioSystem.DEVICE_IN_BLUETOOTH_SCO_HEADSET;
+ /** @hide
+ * The audio input device code for wired headset microphone
+ */
+ public static final int DEVICE_IN_WIRED_HEADSET =
+ AudioSystem.DEVICE_IN_WIRED_HEADSET;
+ /** @hide
+ * The audio input device code for HDMI
+ */
+ public static final int DEVICE_IN_HDMI =
+ AudioSystem.DEVICE_IN_HDMI;
+ /** @hide
+ * The audio input device code for telephony voice RX path
+ */
+ public static final int DEVICE_IN_TELEPHONY_RX =
+ AudioSystem.DEVICE_IN_TELEPHONY_RX;
+ /** @hide
+ * The audio input device code for built-in microphone pointing to the back
+ */
+ public static final int DEVICE_IN_BACK_MIC =
+ AudioSystem.DEVICE_IN_BACK_MIC;
+ /** @hide
+ * The audio input device code for analog from a docking station
+ */
+ public static final int DEVICE_IN_ANLG_DOCK_HEADSET =
+ AudioSystem.DEVICE_IN_ANLG_DOCK_HEADSET;
+ /** @hide
+ * The audio input device code for digital from a docking station
+ */
+ public static final int DEVICE_IN_DGTL_DOCK_HEADSET =
+ AudioSystem.DEVICE_IN_DGTL_DOCK_HEADSET;
+ /** @hide
+ * The audio input device code for a USB audio accessory. The accessory is in USB host
+ * mode and the Android device in USB device mode
+ */
+ public static final int DEVICE_IN_USB_ACCESSORY =
+ AudioSystem.DEVICE_IN_USB_ACCESSORY;
+ /** @hide
+ * The audio input device code for a USB audio device. The device is in USB device
+ * mode and the Android device in USB host mode
+ */
+ public static final int DEVICE_IN_USB_DEVICE =
+ AudioSystem.DEVICE_IN_USB_DEVICE;
+ /** @hide
+ * The audio input device code for a FM radio tuner
+ */
+ public static final int DEVICE_IN_FM_TUNER = AudioSystem.DEVICE_IN_FM_TUNER;
+ /** @hide
+ * The audio input device code for a TV tuner
+ */
+ public static final int DEVICE_IN_TV_TUNER = AudioSystem.DEVICE_IN_TV_TUNER;
+ /** @hide
+ * The audio input device code for an analog jack with line impedance detected
+ */
+ public static final int DEVICE_IN_LINE = AudioSystem.DEVICE_IN_LINE;
+ /** @hide
+ * The audio input device code for a S/PDIF digital connection
+ */
+ public static final int DEVICE_IN_SPDIF = AudioSystem.DEVICE_IN_SPDIF;
+ /** @hide
+ * The audio input device code for audio loopback
+ */
+ public static final int DEVICE_IN_LOOPBACK = AudioSystem.DEVICE_IN_LOOPBACK;
+
+ /**
+ * Return true if the device code corresponds to an output device.
+ * @hide
+ */
+ public static boolean isOutputDevice(int device)
+ {
+ return (device & AudioSystem.DEVICE_BIT_IN) == 0;
+ }
+
+ /**
+ * Return true if the device code corresponds to an input device.
+ * @hide
+ */
+ public static boolean isInputDevice(int device)
+ {
+ return (device & AudioSystem.DEVICE_BIT_IN) == AudioSystem.DEVICE_BIT_IN;
+ }
+
+
+ /**
+ * Return the enabled devices for the specified output stream type.
+ *
+ * @param streamType The stream type to query. One of
+ * {@link #STREAM_VOICE_CALL},
+ * {@link #STREAM_SYSTEM},
+ * {@link #STREAM_RING},
+ * {@link #STREAM_MUSIC},
+ * {@link #STREAM_ALARM},
+ * {@link #STREAM_NOTIFICATION},
+ * {@link #STREAM_DTMF},
+ * {@link #STREAM_ACCESSIBILITY}.
+ *
+ * @return The bit-mask "or" of audio output device codes for all enabled devices on this
+ * stream. Zero or more of
+ * {@link #DEVICE_OUT_EARPIECE},
+ * {@link #DEVICE_OUT_SPEAKER},
+ * {@link #DEVICE_OUT_WIRED_HEADSET},
+ * {@link #DEVICE_OUT_WIRED_HEADPHONE},
+ * {@link #DEVICE_OUT_BLUETOOTH_SCO},
+ * {@link #DEVICE_OUT_BLUETOOTH_SCO_HEADSET},
+ * {@link #DEVICE_OUT_BLUETOOTH_SCO_CARKIT},
+ * {@link #DEVICE_OUT_BLUETOOTH_A2DP},
+ * {@link #DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES},
+ * {@link #DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER},
+ * {@link #DEVICE_OUT_HDMI},
+ * {@link #DEVICE_OUT_ANLG_DOCK_HEADSET},
+ * {@link #DEVICE_OUT_DGTL_DOCK_HEADSET}.
+ * {@link #DEVICE_OUT_USB_ACCESSORY}.
+ * {@link #DEVICE_OUT_USB_DEVICE}.
+ * {@link #DEVICE_OUT_REMOTE_SUBMIX}.
+ * {@link #DEVICE_OUT_TELEPHONY_TX}.
+ * {@link #DEVICE_OUT_LINE}.
+ * {@link #DEVICE_OUT_HDMI_ARC}.
+ * {@link #DEVICE_OUT_SPDIF}.
+ * {@link #DEVICE_OUT_FM}.
+ * {@link #DEVICE_OUT_DEFAULT} is not used here.
+ *
+ * The implementation may support additional device codes beyond those listed, so
+ * the application should ignore any bits which it does not recognize.
+ * Note that the information may be imprecise when the implementation
+ * cannot distinguish whether a particular device is enabled.
+ *
+ * {@hide}
+ */
+ public int getDevicesForStream(int streamType) {
+ switch (streamType) {
+ case STREAM_VOICE_CALL:
+ case STREAM_SYSTEM:
+ case STREAM_RING:
+ case STREAM_MUSIC:
+ case STREAM_ALARM:
+ case STREAM_NOTIFICATION:
+ case STREAM_DTMF:
+ case STREAM_ACCESSIBILITY:
+ return AudioSystem.getDevicesForStream(streamType);
+ default:
+ return 0;
+ }
+ }
+
+ /**
+ * Indicate wired accessory connection state change.
+ * @param device type of device connected/disconnected (AudioManager.DEVICE_OUT_xxx)
+ * @param state new connection state: 1 connected, 0 disconnected
+ * @param name device name
+ * {@hide}
+ */
+ public void setWiredDeviceConnectionState(int type, int state, String address, String name) {
+ final IAudioService service = getService();
+ try {
+ service.setWiredDeviceConnectionState(type, state, address, name,
+ mApplicationContext.getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Indicate A2DP source or sink connection state change.
+ * @param device Bluetooth device connected/disconnected
+ * @param state new connection state (BluetoothProfile.STATE_xxx)
+ * @param profile profile for the A2DP device
+ * (either {@link android.bluetooth.BluetoothProfile.A2DP} or
+ * {@link android.bluetooth.BluetoothProfile.A2DP_SINK})
+ * @return a delay in ms that the caller should wait before broadcasting
+ * BluetoothA2dp.ACTION_CONNECTION_STATE_CHANGED intent.
+ * {@hide}
+ */
+ public int setBluetoothA2dpDeviceConnectionState(BluetoothDevice device, int state,
+ int profile) {
+ final IAudioService service = getService();
+ int delay = 0;
+ try {
+ delay = service.setBluetoothA2dpDeviceConnectionState(device, state, profile);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ return delay;
+ }
+
+ /**
+ * Indicate A2DP device configuration has changed.
+ * @param device Bluetooth device whose configuration has changed.
+ * {@hide}
+ */
+ public void handleBluetoothA2dpDeviceConfigChange(BluetoothDevice device) {
+ final IAudioService service = getService();
+ try {
+ service.handleBluetoothA2dpDeviceConfigChange(device);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /** {@hide} */
+ public IRingtonePlayer getRingtonePlayer() {
+ try {
+ return getService().getRingtonePlayer();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Used as a key for {@link #getProperty} to request the native or optimal output sample rate
+ * for this device's low latency output stream, in decimal Hz. Latency-sensitive apps
+ * should use this value as a default, and offer the user the option to override it.
+ * The low latency output stream is typically either the device's primary output stream,
+ * or another output stream with smaller buffers.
+ */
+ // FIXME Deprecate
+ public static final String PROPERTY_OUTPUT_SAMPLE_RATE =
+ "android.media.property.OUTPUT_SAMPLE_RATE";
+
+ /**
+ * Used as a key for {@link #getProperty} to request the native or optimal output buffer size
+ * for this device's low latency output stream, in decimal PCM frames. Latency-sensitive apps
+ * should use this value as a minimum, and offer the user the option to override it.
+ * The low latency output stream is typically either the device's primary output stream,
+ * or another output stream with smaller buffers.
+ */
+ // FIXME Deprecate
+ public static final String PROPERTY_OUTPUT_FRAMES_PER_BUFFER =
+ "android.media.property.OUTPUT_FRAMES_PER_BUFFER";
+
+ /**
+ * Used as a key for {@link #getProperty} to determine if the default microphone audio source
+ * supports near-ultrasound frequencies (range of 18 - 21 kHz).
+ */
+ public static final String PROPERTY_SUPPORT_MIC_NEAR_ULTRASOUND =
+ "android.media.property.SUPPORT_MIC_NEAR_ULTRASOUND";
+
+ /**
+ * Used as a key for {@link #getProperty} to determine if the default speaker audio path
+ * supports near-ultrasound frequencies (range of 18 - 21 kHz).
+ */
+ public static final String PROPERTY_SUPPORT_SPEAKER_NEAR_ULTRASOUND =
+ "android.media.property.SUPPORT_SPEAKER_NEAR_ULTRASOUND";
+
+ /**
+ * Used as a key for {@link #getProperty} to determine if the unprocessed audio source is
+ * available and supported with the expected frequency range and level response.
+ */
+ public static final String PROPERTY_SUPPORT_AUDIO_SOURCE_UNPROCESSED =
+ "android.media.property.SUPPORT_AUDIO_SOURCE_UNPROCESSED";
+ /**
+ * Returns the value of the property with the specified key.
+ * @param key One of the strings corresponding to a property key: either
+ * {@link #PROPERTY_OUTPUT_SAMPLE_RATE},
+ * {@link #PROPERTY_OUTPUT_FRAMES_PER_BUFFER},
+ * {@link #PROPERTY_SUPPORT_MIC_NEAR_ULTRASOUND},
+ * {@link #PROPERTY_SUPPORT_SPEAKER_NEAR_ULTRASOUND}, or
+ * {@link #PROPERTY_SUPPORT_AUDIO_SOURCE_UNPROCESSED}.
+ * @return A string representing the associated value for that property key,
+ * or null if there is no value for that key.
+ */
+ public String getProperty(String key) {
+ if (PROPERTY_OUTPUT_SAMPLE_RATE.equals(key)) {
+ int outputSampleRate = AudioSystem.getPrimaryOutputSamplingRate();
+ return outputSampleRate > 0 ? Integer.toString(outputSampleRate) : null;
+ } else if (PROPERTY_OUTPUT_FRAMES_PER_BUFFER.equals(key)) {
+ int outputFramesPerBuffer = AudioSystem.getPrimaryOutputFrameCount();
+ return outputFramesPerBuffer > 0 ? Integer.toString(outputFramesPerBuffer) : null;
+ } else if (PROPERTY_SUPPORT_MIC_NEAR_ULTRASOUND.equals(key)) {
+ // Will throw a RuntimeException Resources.NotFoundException if this config value is
+ // not found.
+ return String.valueOf(getContext().getResources().getBoolean(
+ com.android.internal.R.bool.config_supportMicNearUltrasound));
+ } else if (PROPERTY_SUPPORT_SPEAKER_NEAR_ULTRASOUND.equals(key)) {
+ return String.valueOf(getContext().getResources().getBoolean(
+ com.android.internal.R.bool.config_supportSpeakerNearUltrasound));
+ } else if (PROPERTY_SUPPORT_AUDIO_SOURCE_UNPROCESSED.equals(key)) {
+ return String.valueOf(getContext().getResources().getBoolean(
+ com.android.internal.R.bool.config_supportAudioSourceUnprocessed));
+ } else {
+ // null or unknown key
+ return null;
+ }
+ }
+
+ /**
+ * Returns the estimated latency for the given stream type in milliseconds.
+ *
+ * DO NOT UNHIDE. The existing approach for doing A/V sync has too many problems. We need
+ * a better solution.
+ * @hide
+ */
+ public int getOutputLatency(int streamType) {
+ return AudioSystem.getOutputLatency(streamType);
+ }
+
+ /**
+ * Registers a global volume controller interface. Currently limited to SystemUI.
+ *
+ * @hide
+ */
+ public void setVolumeController(IVolumeController controller) {
+ try {
+ getService().setVolumeController(controller);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Notify audio manager about volume controller visibility changes.
+ * Currently limited to SystemUI.
+ *
+ * @hide
+ */
+ public void notifyVolumeControllerVisible(IVolumeController controller, boolean visible) {
+ try {
+ getService().notifyVolumeControllerVisible(controller, visible);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public boolean isStreamAffectedByRingerMode(int streamType) {
+ try {
+ return getService().isStreamAffectedByRingerMode(streamType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public boolean isStreamAffectedByMute(int streamType) {
+ try {
+ return getService().isStreamAffectedByMute(streamType);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public void disableSafeMediaVolume() {
+ try {
+ getService().disableSafeMediaVolume(mApplicationContext.getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public void setRingerModeInternal(int ringerMode) {
+ try {
+ getService().setRingerModeInternal(ringerMode, getContext().getOpPackageName());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public int getRingerModeInternal() {
+ try {
+ return getService().getRingerModeInternal();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Only useful for volume controllers.
+ * @hide
+ */
+ public void setVolumePolicy(VolumePolicy policy) {
+ try {
+ getService().setVolumePolicy(policy);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Set Hdmi Cec system audio mode.
+ *
+ * @param on whether to be on system audio mode
+ * @return output device type. 0 (DEVICE_NONE) if failed to set device.
+ * @hide
+ */
+ public int setHdmiSystemAudioSupported(boolean on) {
+ try {
+ return getService().setHdmiSystemAudioSupported(on);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns true if Hdmi Cec system audio mode is supported.
+ *
+ * @hide
+ */
+ @SystemApi
+ @SuppressLint("Doclava125") // FIXME is this still used?
+ public boolean isHdmiSystemAudioSupported() {
+ try {
+ return getService().isHdmiSystemAudioSupported();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Return codes for listAudioPorts(), createAudioPatch() ...
+ */
+
+ /** @hide
+ * CANDIDATE FOR PUBLIC API
+ */
+ public static final int SUCCESS = AudioSystem.SUCCESS;
+ /**
+ * A default error code.
+ */
+ public static final int ERROR = AudioSystem.ERROR;
+ /** @hide
+ * CANDIDATE FOR PUBLIC API
+ */
+ public static final int ERROR_BAD_VALUE = AudioSystem.BAD_VALUE;
+ /** @hide
+ */
+ public static final int ERROR_INVALID_OPERATION = AudioSystem.INVALID_OPERATION;
+ /** @hide
+ */
+ public static final int ERROR_PERMISSION_DENIED = AudioSystem.PERMISSION_DENIED;
+ /** @hide
+ */
+ public static final int ERROR_NO_INIT = AudioSystem.NO_INIT;
+ /**
+ * An error code indicating that the object reporting it is no longer valid and needs to
+ * be recreated.
+ */
+ public static final int ERROR_DEAD_OBJECT = AudioSystem.DEAD_OBJECT;
+
+ /**
+ * Returns a list of descriptors for all audio ports managed by the audio framework.
+ * Audio ports are nodes in the audio framework or audio hardware that can be configured
+ * or connected and disconnected with createAudioPatch() or releaseAudioPatch().
+ * See AudioPort for a list of attributes of each audio port.
+ * @param ports An AudioPort ArrayList where the list will be returned.
+ * @hide
+ */
+ public static int listAudioPorts(ArrayList<AudioPort> ports) {
+ return updateAudioPortCache(ports, null, null);
+ }
+
+ /**
+ * Returns a list of descriptors for all audio ports managed by the audio framework as
+ * it was before the last update calback.
+ * @param ports An AudioPort ArrayList where the list will be returned.
+ * @hide
+ */
+ public static int listPreviousAudioPorts(ArrayList<AudioPort> ports) {
+ return updateAudioPortCache(null, null, ports);
+ }
+
+ /**
+ * Specialized version of listAudioPorts() listing only audio devices (AudioDevicePort)
+ * @see listAudioPorts(ArrayList<AudioPort>)
+ * @hide
+ */
+ public static int listAudioDevicePorts(ArrayList<AudioDevicePort> devices) {
+ if (devices == null) {
+ return ERROR_BAD_VALUE;
+ }
+ ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
+ int status = updateAudioPortCache(ports, null, null);
+ if (status == SUCCESS) {
+ filterDevicePorts(ports, devices);
+ }
+ return status;
+ }
+
+ /**
+ * Specialized version of listPreviousAudioPorts() listing only audio devices (AudioDevicePort)
+ * @see listPreviousAudioPorts(ArrayList<AudioPort>)
+ * @hide
+ */
+ public static int listPreviousAudioDevicePorts(ArrayList<AudioDevicePort> devices) {
+ if (devices == null) {
+ return ERROR_BAD_VALUE;
+ }
+ ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
+ int status = updateAudioPortCache(null, null, ports);
+ if (status == SUCCESS) {
+ filterDevicePorts(ports, devices);
+ }
+ return status;
+ }
+
+ private static void filterDevicePorts(ArrayList<AudioPort> ports,
+ ArrayList<AudioDevicePort> devices) {
+ devices.clear();
+ for (int i = 0; i < ports.size(); i++) {
+ if (ports.get(i) instanceof AudioDevicePort) {
+ devices.add((AudioDevicePort)ports.get(i));
+ }
+ }
+ }
+
+ /**
+ * Create a connection between two or more devices. The framework will reject the request if
+ * device types are not compatible or the implementation does not support the requested
+ * configuration.
+ * NOTE: current implementation is limited to one source and one sink per patch.
+ * @param patch AudioPatch array where the newly created patch will be returned.
+ * As input, if patch[0] is not null, the specified patch will be replaced by the
+ * new patch created. This avoids calling releaseAudioPatch() when modifying a
+ * patch and allows the implementation to optimize transitions.
+ * @param sources List of source audio ports. All must be AudioPort.ROLE_SOURCE.
+ * @param sinks List of sink audio ports. All must be AudioPort.ROLE_SINK.
+ *
+ * @return - {@link #SUCCESS} if connection is successful.
+ * - {@link #ERROR_BAD_VALUE} if incompatible device types are passed.
+ * - {@link #ERROR_INVALID_OPERATION} if the requested connection is not supported.
+ * - {@link #ERROR_PERMISSION_DENIED} if the client does not have permission to create
+ * a patch.
+ * - {@link #ERROR_DEAD_OBJECT} if the server process is dead
+ * - {@link #ERROR} if patch cannot be connected for any other reason.
+ *
+ * patch[0] contains the newly created patch
+ * @hide
+ */
+ public static int createAudioPatch(AudioPatch[] patch,
+ AudioPortConfig[] sources,
+ AudioPortConfig[] sinks) {
+ return AudioSystem.createAudioPatch(patch, sources, sinks);
+ }
+
+ /**
+ * Releases an existing audio patch connection.
+ * @param patch The audio patch to disconnect.
+ * @return - {@link #SUCCESS} if disconnection is successful.
+ * - {@link #ERROR_BAD_VALUE} if the specified patch does not exist.
+ * - {@link #ERROR_PERMISSION_DENIED} if the client does not have permission to release
+ * a patch.
+ * - {@link #ERROR_DEAD_OBJECT} if the server process is dead
+ * - {@link #ERROR} if patch cannot be released for any other reason.
+ * @hide
+ */
+ public static int releaseAudioPatch(AudioPatch patch) {
+ return AudioSystem.releaseAudioPatch(patch);
+ }
+
+ /**
+ * List all existing connections between audio ports.
+ * @param patches An AudioPatch array where the list will be returned.
+ * @hide
+ */
+ public static int listAudioPatches(ArrayList<AudioPatch> patches) {
+ return updateAudioPortCache(null, patches, null);
+ }
+
+ /**
+ * Set the gain on the specified AudioPort. The AudioGainConfig config is build by
+ * AudioGain.buildConfig()
+ * @hide
+ */
+ public static int setAudioPortGain(AudioPort port, AudioGainConfig gain) {
+ if (port == null || gain == null) {
+ return ERROR_BAD_VALUE;
+ }
+ AudioPortConfig activeConfig = port.activeConfig();
+ AudioPortConfig config = new AudioPortConfig(port, activeConfig.samplingRate(),
+ activeConfig.channelMask(), activeConfig.format(), gain);
+ config.mConfigMask = AudioPortConfig.GAIN;
+ return AudioSystem.setAudioPortConfig(config);
+ }
+
+ /**
+ * Listener registered by client to be notified upon new audio port connections,
+ * disconnections or attributes update.
+ * @hide
+ */
+ public interface OnAudioPortUpdateListener {
+ /**
+ * Callback method called upon audio port list update.
+ * @param portList the updated list of audio ports
+ */
+ public void onAudioPortListUpdate(AudioPort[] portList);
+
+ /**
+ * Callback method called upon audio patch list update.
+ * @param patchList the updated list of audio patches
+ */
+ public void onAudioPatchListUpdate(AudioPatch[] patchList);
+
+ /**
+ * Callback method called when the mediaserver dies
+ */
+ public void onServiceDied();
+ }
+
+ /**
+ * Register an audio port list update listener.
+ * @hide
+ */
+ public void registerAudioPortUpdateListener(OnAudioPortUpdateListener l) {
+ sAudioPortEventHandler.init();
+ sAudioPortEventHandler.registerListener(l);
+ }
+
+ /**
+ * Unregister an audio port list update listener.
+ * @hide
+ */
+ public void unregisterAudioPortUpdateListener(OnAudioPortUpdateListener l) {
+ sAudioPortEventHandler.unregisterListener(l);
+ }
+
+ //
+ // AudioPort implementation
+ //
+
+ static final int AUDIOPORT_GENERATION_INIT = 0;
+ static Integer sAudioPortGeneration = new Integer(AUDIOPORT_GENERATION_INIT);
+ static ArrayList<AudioPort> sAudioPortsCached = new ArrayList<AudioPort>();
+ static ArrayList<AudioPort> sPreviousAudioPortsCached = new ArrayList<AudioPort>();
+ static ArrayList<AudioPatch> sAudioPatchesCached = new ArrayList<AudioPatch>();
+
+ static int resetAudioPortGeneration() {
+ int generation;
+ synchronized (sAudioPortGeneration) {
+ generation = sAudioPortGeneration;
+ sAudioPortGeneration = AUDIOPORT_GENERATION_INIT;
+ }
+ return generation;
+ }
+
+ static int updateAudioPortCache(ArrayList<AudioPort> ports, ArrayList<AudioPatch> patches,
+ ArrayList<AudioPort> previousPorts) {
+ sAudioPortEventHandler.init();
+ synchronized (sAudioPortGeneration) {
+
+ if (sAudioPortGeneration == AUDIOPORT_GENERATION_INIT) {
+ int[] patchGeneration = new int[1];
+ int[] portGeneration = new int[1];
+ int status;
+ ArrayList<AudioPort> newPorts = new ArrayList<AudioPort>();
+ ArrayList<AudioPatch> newPatches = new ArrayList<AudioPatch>();
+
+ do {
+ newPorts.clear();
+ status = AudioSystem.listAudioPorts(newPorts, portGeneration);
+ if (status != SUCCESS) {
+ Log.w(TAG, "updateAudioPortCache: listAudioPorts failed");
+ return status;
+ }
+ newPatches.clear();
+ status = AudioSystem.listAudioPatches(newPatches, patchGeneration);
+ if (status != SUCCESS) {
+ Log.w(TAG, "updateAudioPortCache: listAudioPatches failed");
+ return status;
+ }
+ } while (patchGeneration[0] != portGeneration[0]);
+
+ for (int i = 0; i < newPatches.size(); i++) {
+ for (int j = 0; j < newPatches.get(i).sources().length; j++) {
+ AudioPortConfig portCfg = updatePortConfig(newPatches.get(i).sources()[j],
+ newPorts);
+ newPatches.get(i).sources()[j] = portCfg;
+ }
+ for (int j = 0; j < newPatches.get(i).sinks().length; j++) {
+ AudioPortConfig portCfg = updatePortConfig(newPatches.get(i).sinks()[j],
+ newPorts);
+ newPatches.get(i).sinks()[j] = portCfg;
+ }
+ }
+ for (Iterator<AudioPatch> i = newPatches.iterator(); i.hasNext(); ) {
+ AudioPatch newPatch = i.next();
+ boolean hasInvalidPort = false;
+ for (AudioPortConfig portCfg : newPatch.sources()) {
+ if (portCfg == null) {
+ hasInvalidPort = true;
+ break;
+ }
+ }
+ for (AudioPortConfig portCfg : newPatch.sinks()) {
+ if (portCfg == null) {
+ hasInvalidPort = true;
+ break;
+ }
+ }
+ if (hasInvalidPort) {
+ // Temporarily remove patches with invalid ports. One who created the patch
+ // is responsible for dealing with the port change.
+ i.remove();
+ }
+ }
+
+ sPreviousAudioPortsCached = sAudioPortsCached;
+ sAudioPortsCached = newPorts;
+ sAudioPatchesCached = newPatches;
+ sAudioPortGeneration = portGeneration[0];
+ }
+ if (ports != null) {
+ ports.clear();
+ ports.addAll(sAudioPortsCached);
+ }
+ if (patches != null) {
+ patches.clear();
+ patches.addAll(sAudioPatchesCached);
+ }
+ if (previousPorts != null) {
+ previousPorts.clear();
+ previousPorts.addAll(sPreviousAudioPortsCached);
+ }
+ }
+ return SUCCESS;
+ }
+
+ static AudioPortConfig updatePortConfig(AudioPortConfig portCfg, ArrayList<AudioPort> ports) {
+ AudioPort port = portCfg.port();
+ int k;
+ for (k = 0; k < ports.size(); k++) {
+ // compare handles because the port returned by JNI is not of the correct
+ // subclass
+ if (ports.get(k).handle().equals(port.handle())) {
+ port = ports.get(k);
+ break;
+ }
+ }
+ if (k == ports.size()) {
+ // this hould never happen
+ Log.e(TAG, "updatePortConfig port not found for handle: "+port.handle().id());
+ return null;
+ }
+ AudioGainConfig gainCfg = portCfg.gain();
+ if (gainCfg != null) {
+ AudioGain gain = port.gain(gainCfg.index());
+ gainCfg = gain.buildConfig(gainCfg.mode(),
+ gainCfg.channelMask(),
+ gainCfg.values(),
+ gainCfg.rampDurationMs());
+ }
+ return port.buildConfig(portCfg.samplingRate(),
+ portCfg.channelMask(),
+ portCfg.format(),
+ gainCfg);
+ }
+
+ private OnAmPortUpdateListener mPortListener = null;
+
+ /**
+ * The message sent to apps when the contents of the device list changes if they provide
+ * a {#link Handler} object to addOnAudioDeviceConnectionListener().
+ */
+ private final static int MSG_DEVICES_CALLBACK_REGISTERED = 0;
+ private final static int MSG_DEVICES_DEVICES_ADDED = 1;
+ private final static int MSG_DEVICES_DEVICES_REMOVED = 2;
+
+ /**
+ * The list of {@link AudioDeviceCallback} objects to receive add/remove notifications.
+ */
+ private ArrayMap<AudioDeviceCallback, NativeEventHandlerDelegate>
+ mDeviceCallbacks =
+ new ArrayMap<AudioDeviceCallback, NativeEventHandlerDelegate>();
+
+ /**
+ * The following are flags to allow users of {@link AudioManager#getDevices(int)} to filter
+ * the results list to only those device types they are interested in.
+ */
+ /**
+ * Specifies to the {@link AudioManager#getDevices(int)} method to include
+ * source (i.e. input) audio devices.
+ */
+ public static final int GET_DEVICES_INPUTS = 0x0001;
+
+ /**
+ * Specifies to the {@link AudioManager#getDevices(int)} method to include
+ * sink (i.e. output) audio devices.
+ */
+ public static final int GET_DEVICES_OUTPUTS = 0x0002;
+
+ /**
+ * Specifies to the {@link AudioManager#getDevices(int)} method to include both
+ * source and sink devices.
+ */
+ public static final int GET_DEVICES_ALL = GET_DEVICES_OUTPUTS | GET_DEVICES_INPUTS;
+
+ /**
+ * Determines if a given AudioDevicePort meets the specified filter criteria.
+ * @param port The port to test.
+ * @param flags A set of bitflags specifying the criteria to test.
+ * @see {@link GET_DEVICES_OUTPUTS} and {@link GET_DEVICES_INPUTS}
+ **/
+ private static boolean checkFlags(AudioDevicePort port, int flags) {
+ return port.role() == AudioPort.ROLE_SINK && (flags & GET_DEVICES_OUTPUTS) != 0 ||
+ port.role() == AudioPort.ROLE_SOURCE && (flags & GET_DEVICES_INPUTS) != 0;
+ }
+
+ private static boolean checkTypes(AudioDevicePort port) {
+ return AudioDeviceInfo.convertInternalDeviceToDeviceType(port.type()) !=
+ AudioDeviceInfo.TYPE_UNKNOWN &&
+ port.type() != AudioSystem.DEVICE_IN_BACK_MIC;
+ }
+
+ /**
+ * Returns an array of {@link AudioDeviceInfo} objects corresponding to the audio devices
+ * currently connected to the system and meeting the criteria specified in the
+ * <code>flags</code> parameter.
+ * @param flags A set of bitflags specifying the criteria to test.
+ * @see #GET_DEVICES_OUTPUTS
+ * @see #GET_DEVICES_INPUTS
+ * @see #GET_DEVICES_ALL
+ * @return A (possibly zero-length) array of AudioDeviceInfo objects.
+ */
+ public AudioDeviceInfo[] getDevices(int flags) {
+ return getDevicesStatic(flags);
+ }
+
+ /**
+ * Does the actual computation to generate an array of (externally-visible) AudioDeviceInfo
+ * objects from the current (internal) AudioDevicePort list.
+ */
+ private static AudioDeviceInfo[]
+ infoListFromPortList(ArrayList<AudioDevicePort> ports, int flags) {
+
+ // figure out how many AudioDeviceInfo we need space for...
+ int numRecs = 0;
+ for (AudioDevicePort port : ports) {
+ if (checkTypes(port) && checkFlags(port, flags)) {
+ numRecs++;
+ }
+ }
+
+ // Now load them up...
+ AudioDeviceInfo[] deviceList = new AudioDeviceInfo[numRecs];
+ int slot = 0;
+ for (AudioDevicePort port : ports) {
+ if (checkTypes(port) && checkFlags(port, flags)) {
+ deviceList[slot++] = new AudioDeviceInfo(port);
+ }
+ }
+
+ return deviceList;
+ }
+
+ /*
+ * Calculate the list of ports that are in ports_B, but not in ports_A. This is used by
+ * the add/remove callback mechanism to provide a list of the newly added or removed devices
+ * rather than the whole list and make the app figure it out.
+ * Note that calling this method with:
+ * ports_A == PREVIOUS_ports and ports_B == CURRENT_ports will calculated ADDED ports.
+ * ports_A == CURRENT_ports and ports_B == PREVIOUS_ports will calculated REMOVED ports.
+ */
+ private static AudioDeviceInfo[] calcListDeltas(
+ ArrayList<AudioDevicePort> ports_A, ArrayList<AudioDevicePort> ports_B, int flags) {
+
+ ArrayList<AudioDevicePort> delta_ports = new ArrayList<AudioDevicePort>();
+
+ AudioDevicePort cur_port = null;
+ for (int cur_index = 0; cur_index < ports_B.size(); cur_index++) {
+ boolean cur_port_found = false;
+ cur_port = ports_B.get(cur_index);
+ for (int prev_index = 0;
+ prev_index < ports_A.size() && !cur_port_found;
+ prev_index++) {
+ cur_port_found = (cur_port.id() == ports_A.get(prev_index).id());
+ }
+
+ if (!cur_port_found) {
+ delta_ports.add(cur_port);
+ }
+ }
+
+ return infoListFromPortList(delta_ports, flags);
+ }
+
+ /**
+ * Generates a list of AudioDeviceInfo objects corresponding to the audio devices currently
+ * connected to the system and meeting the criteria specified in the <code>flags</code>
+ * parameter.
+ * This is an internal function. The public API front is getDevices(int).
+ * @param flags A set of bitflags specifying the criteria to test.
+ * @see #GET_DEVICES_OUTPUTS
+ * @see #GET_DEVICES_INPUTS
+ * @see #GET_DEVICES_ALL
+ * @return A (possibly zero-length) array of AudioDeviceInfo objects.
+ * @hide
+ */
+ public static AudioDeviceInfo[] getDevicesStatic(int flags) {
+ ArrayList<AudioDevicePort> ports = new ArrayList<AudioDevicePort>();
+ int status = AudioManager.listAudioDevicePorts(ports);
+ if (status != AudioManager.SUCCESS) {
+ // fail and bail!
+ return new AudioDeviceInfo[0]; // Always return an array.
+ }
+
+ return infoListFromPortList(ports, flags);
+ }
+
+ /**
+ * Registers an {@link AudioDeviceCallback} object to receive notifications of changes
+ * to the set of connected audio devices.
+ * @param callback The {@link AudioDeviceCallback} object to receive connect/disconnect
+ * notifications.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ public void registerAudioDeviceCallback(AudioDeviceCallback callback,
+ android.os.Handler handler) {
+ synchronized (mDeviceCallbacks) {
+ if (callback != null && !mDeviceCallbacks.containsKey(callback)) {
+ if (mDeviceCallbacks.size() == 0) {
+ if (mPortListener == null) {
+ mPortListener = new OnAmPortUpdateListener();
+ }
+ registerAudioPortUpdateListener(mPortListener);
+ }
+ NativeEventHandlerDelegate delegate =
+ new NativeEventHandlerDelegate(callback, handler);
+ mDeviceCallbacks.put(callback, delegate);
+ broadcastDeviceListChange(delegate.getHandler());
+ }
+ }
+ }
+
+ /**
+ * Unregisters an {@link AudioDeviceCallback} object which has been previously registered
+ * to receive notifications of changes to the set of connected audio devices.
+ * @param callback The {@link AudioDeviceCallback} object that was previously registered
+ * with {@link AudioManager#registerAudioDeviceCallback} to be unregistered.
+ */
+ public void unregisterAudioDeviceCallback(AudioDeviceCallback callback) {
+ synchronized (mDeviceCallbacks) {
+ if (mDeviceCallbacks.containsKey(callback)) {
+ mDeviceCallbacks.remove(callback);
+ if (mDeviceCallbacks.size() == 0) {
+ unregisterAudioPortUpdateListener(mPortListener);
+ }
+ }
+ }
+ }
+
+ // Since we need to calculate the changes since THE LAST NOTIFICATION, and not since the
+ // (unpredictable) last time updateAudioPortCache() was called by someone, keep a list
+ // of the ports that exist at the time of the last notification.
+ private ArrayList<AudioDevicePort> mPreviousPorts = new ArrayList<AudioDevicePort>();
+
+ /**
+ * Internal method to compute and generate add/remove messages and then send to any
+ * registered callbacks.
+ */
+ private void broadcastDeviceListChange(Handler handler) {
+ int status;
+
+ // Get the new current set of ports
+ ArrayList<AudioDevicePort> current_ports = new ArrayList<AudioDevicePort>();
+ status = AudioManager.listAudioDevicePorts(current_ports);
+ if (status != AudioManager.SUCCESS) {
+ return;
+ }
+
+ if (handler != null) {
+ // This is the callback for the registration, so send the current list
+ AudioDeviceInfo[] deviceList =
+ infoListFromPortList(current_ports, GET_DEVICES_ALL);
+ handler.sendMessage(
+ Message.obtain(handler, MSG_DEVICES_CALLBACK_REGISTERED, deviceList));
+ } else {
+ AudioDeviceInfo[] added_devices =
+ calcListDeltas(mPreviousPorts, current_ports, GET_DEVICES_ALL);
+ AudioDeviceInfo[] removed_devices =
+ calcListDeltas(current_ports, mPreviousPorts, GET_DEVICES_ALL);
+
+ if (added_devices.length != 0 || removed_devices.length != 0) {
+ synchronized (mDeviceCallbacks) {
+ for (int i = 0; i < mDeviceCallbacks.size(); i++) {
+ handler = mDeviceCallbacks.valueAt(i).getHandler();
+ if (handler != null) {
+ if (added_devices.length != 0) {
+ handler.sendMessage(Message.obtain(handler,
+ MSG_DEVICES_DEVICES_ADDED,
+ added_devices));
+ }
+ if (removed_devices.length != 0) {
+ handler.sendMessage(Message.obtain(handler,
+ MSG_DEVICES_DEVICES_REMOVED,
+ removed_devices));
+ }
+ }
+ }
+ }
+ }
+ }
+
+ mPreviousPorts = current_ports;
+ }
+
+ /**
+ * Handles Port list update notifications from the AudioManager
+ */
+ private class OnAmPortUpdateListener implements AudioManager.OnAudioPortUpdateListener {
+ static final String TAG = "OnAmPortUpdateListener";
+ public void onAudioPortListUpdate(AudioPort[] portList) {
+ broadcastDeviceListChange(null);
+ }
+
+ /**
+ * Callback method called upon audio patch list update.
+ * Note: We don't do anything with Patches at this time, so ignore this notification.
+ * @param patchList the updated list of audio patches.
+ */
+ public void onAudioPatchListUpdate(AudioPatch[] patchList) {}
+
+ /**
+ * Callback method called when the mediaserver dies
+ */
+ public void onServiceDied() {
+ broadcastDeviceListChange(null);
+ }
+ }
+
+ //---------------------------------------------------------
+ // Inner classes
+ //--------------------
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread.
+ */
+ private class NativeEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeEventHandlerDelegate(final AudioDeviceCallback callback,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the addListener call was called in
+ looper = Looper.getMainLooper();
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case MSG_DEVICES_CALLBACK_REGISTERED:
+ case MSG_DEVICES_DEVICES_ADDED:
+ if (callback != null) {
+ callback.onAudioDevicesAdded((AudioDeviceInfo[])msg.obj);
+ }
+ break;
+
+ case MSG_DEVICES_DEVICES_REMOVED:
+ if (callback != null) {
+ callback.onAudioDevicesRemoved((AudioDeviceInfo[])msg.obj);
+ }
+ break;
+
+ default:
+ Log.e(TAG, "Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
+}
diff --git a/android/media/AudioManagerInternal.java b/android/media/AudioManagerInternal.java
new file mode 100644
index 00000000..0a1de33b
--- /dev/null
+++ b/android/media/AudioManagerInternal.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+import android.util.IntArray;
+import com.android.server.LocalServices;
+
+/**
+ * Class for system services to access extra AudioManager functionality. The
+ * AudioService is responsible for registering an implementation with
+ * {@link LocalServices}.
+ *
+ * @hide
+ */
+public abstract class AudioManagerInternal {
+
+ public abstract void adjustSuggestedStreamVolumeForUid(int streamType, int direction,
+ int flags, String callingPackage, int uid);
+
+ public abstract void adjustStreamVolumeForUid(int streamType, int direction, int flags,
+ String callingPackage, int uid);
+
+ public abstract void setStreamVolumeForUid(int streamType, int direction, int flags,
+ String callingPackage, int uid);
+
+ public abstract void setRingerModeDelegate(RingerModeDelegate delegate);
+
+ public abstract int getRingerModeInternal();
+
+ public abstract void setRingerModeInternal(int ringerMode, String caller);
+
+ public abstract void updateRingerModeAffectedStreamsInternal();
+
+ public abstract void setAccessibilityServiceUids(IntArray uids);
+
+ public interface RingerModeDelegate {
+ /** Called when external ringer mode is evaluated, returns the new internal ringer mode */
+ int onSetRingerModeExternal(int ringerModeOld, int ringerModeNew, String caller,
+ int ringerModeInternal, VolumePolicy policy);
+
+ /** Called when internal ringer mode is evaluated, returns the new external ringer mode */
+ int onSetRingerModeInternal(int ringerModeOld, int ringerModeNew, String caller,
+ int ringerModeExternal, VolumePolicy policy);
+
+ boolean canVolumeDownEnterSilent();
+
+ int getRingerModeAffectedStreams(int streams);
+ }
+}
diff --git a/android/media/AudioMixPort.java b/android/media/AudioMixPort.java
new file mode 100644
index 00000000..ba144bf4
--- /dev/null
+++ b/android/media/AudioMixPort.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The AudioMixPort is a specialized type of AudioPort
+ * describing an audio mix or stream at an input or output stream of the audio
+ * framework.
+ * In addition to base audio port attributes, the mix descriptor contains:
+ * - the unique audio I/O handle assigned by AudioFlinger to this mix.
+ * @see AudioPort
+ * @hide
+ */
+
+public class AudioMixPort extends AudioPort {
+
+ private final int mIoHandle;
+
+ AudioMixPort(AudioHandle handle, int ioHandle, int role, String deviceName,
+ int[] samplingRates, int[] channelMasks, int[] channelIndexMasks,
+ int[] formats, AudioGain[] gains) {
+ super(handle, role, deviceName, samplingRates, channelMasks, channelIndexMasks,
+ formats, gains);
+ mIoHandle = ioHandle;
+ }
+
+ /**
+ * Build a specific configuration of this audio mix port for use by methods
+ * like AudioManager.connectAudioPatch().
+ */
+ public AudioMixPortConfig buildConfig(int samplingRate, int channelMask, int format,
+ AudioGainConfig gain) {
+ return new AudioMixPortConfig(this, samplingRate, channelMask, format, gain);
+ }
+
+ /**
+ * Get the device type (e.g AudioManager.DEVICE_OUT_SPEAKER)
+ */
+ public int ioHandle() {
+ return mIoHandle;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || !(o instanceof AudioMixPort)) {
+ return false;
+ }
+ AudioMixPort other = (AudioMixPort)o;
+ if (mIoHandle != other.ioHandle()) {
+ return false;
+ }
+
+ return super.equals(o);
+ }
+
+}
diff --git a/android/media/AudioMixPortConfig.java b/android/media/AudioMixPortConfig.java
new file mode 100644
index 00000000..8eb9ef46
--- /dev/null
+++ b/android/media/AudioMixPortConfig.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An AudioMixPortConfig describes a possible configuration of an output or input mixer.
+ * It is used to specify a sink or source when creating a connection with
+ * AudioManager.connectAudioPatch().
+ * An AudioMixPortConfig is obtained from AudioMixPort.buildConfig().
+ * @hide
+ */
+
+public class AudioMixPortConfig extends AudioPortConfig {
+
+ AudioMixPortConfig(AudioMixPort mixPort, int samplingRate, int channelMask, int format,
+ AudioGainConfig gain) {
+ super((AudioPort)mixPort, samplingRate, channelMask, format, gain);
+ }
+
+ /**
+ * Returns the audio mix port this AudioMixPortConfig is issued from.
+ */
+ public AudioMixPort port() {
+ return (AudioMixPort)mPort;
+ }
+}
+
diff --git a/android/media/AudioPatch.java b/android/media/AudioPatch.java
new file mode 100644
index 00000000..6c70213a
--- /dev/null
+++ b/android/media/AudioPatch.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+
+/**
+ * An AudioPatch describes a connection between audio sources and audio sinks.
+ * An audio source can be an output mix (playback AudioBus) or an input device (microphone).
+ * An audio sink can be an output device (speaker) or an input mix (capture AudioBus).
+ * An AudioPatch is created by AudioManager.createAudioPatch() and released by
+ * AudioManager.releaseAudioPatch()
+ * It contains the list of source and sink AudioPortConfig showing audio port configurations
+ * being connected.
+ * @hide
+ */
+public class AudioPatch {
+
+ private final AudioHandle mHandle;
+ private final AudioPortConfig[] mSources;
+ private final AudioPortConfig[] mSinks;
+
+ AudioPatch(AudioHandle patchHandle, AudioPortConfig[] sources, AudioPortConfig[] sinks) {
+ mHandle = patchHandle;
+ mSources = sources;
+ mSinks = sinks;
+ }
+
+ /**
+ * Retrieve the list of sources of this audio patch.
+ */
+ public AudioPortConfig[] sources() {
+ return mSources;
+ }
+
+ /**
+ * Retreive the list of sinks of this audio patch.
+ */
+ public AudioPortConfig[] sinks() {
+ return mSinks;
+ }
+
+ /**
+ * Get the system unique patch ID.
+ */
+ public int id() {
+ return mHandle.id();
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder s = new StringBuilder();
+ s.append("mHandle: ");
+ s.append(mHandle.toString());
+
+ s.append(" mSources: {");
+ for (AudioPortConfig source : mSources) {
+ s.append(source.toString());
+ s.append(", ");
+ }
+ s.append("} mSinks: {");
+ for (AudioPortConfig sink : mSinks) {
+ s.append(sink.toString());
+ s.append(", ");
+ }
+ s.append("}");
+
+ return s.toString();
+ }
+}
diff --git a/android/media/AudioPlaybackConfiguration.java b/android/media/AudioPlaybackConfiguration.java
new file mode 100644
index 00000000..14bc5551
--- /dev/null
+++ b/android/media/AudioPlaybackConfiguration.java
@@ -0,0 +1,552 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.RemoteException;
+import android.util.Log;
+
+import java.io.PrintWriter;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.Objects;
+
+/**
+ * The AudioPlaybackConfiguration class collects the information describing an audio playback
+ * session.
+ */
+public final class AudioPlaybackConfiguration implements Parcelable {
+ private static final String TAG = new String("AudioPlaybackConfiguration");
+
+ private static final boolean DEBUG = false;
+
+ /** @hide */
+ public static final int PLAYER_PIID_INVALID = -1;
+ /** @hide */
+ public static final int PLAYER_UPID_INVALID = -1;
+
+ // information about the implementation
+ /**
+ * @hide
+ * An unknown type of player
+ */
+ @SystemApi
+ public static final int PLAYER_TYPE_UNKNOWN = -1;
+ /**
+ * @hide
+ * Player backed by a java android.media.AudioTrack player
+ */
+ @SystemApi
+ public static final int PLAYER_TYPE_JAM_AUDIOTRACK = 1;
+ /**
+ * @hide
+ * Player backed by a java android.media.MediaPlayer player
+ */
+ @SystemApi
+ public static final int PLAYER_TYPE_JAM_MEDIAPLAYER = 2;
+ /**
+ * @hide
+ * Player backed by a java android.media.SoundPool player
+ */
+ @SystemApi
+ public static final int PLAYER_TYPE_JAM_SOUNDPOOL = 3;
+ /**
+ * @hide
+ * Player backed by a C OpenSL ES AudioPlayer player with a BufferQueue source
+ */
+ @SystemApi
+ public static final int PLAYER_TYPE_SLES_AUDIOPLAYER_BUFFERQUEUE = 11;
+ /**
+ * @hide
+ * Player backed by a C OpenSL ES AudioPlayer player with a URI or FD source
+ */
+ @SystemApi
+ public static final int PLAYER_TYPE_SLES_AUDIOPLAYER_URI_FD = 12;
+
+ /**
+ * @hide
+ * Player backed an AAudio player.
+ * Note this type is not in System API so it will not be returned in public API calls
+ */
+ // TODO unhide for SystemApi, update getPlayerType()
+ public static final int PLAYER_TYPE_AAUDIO = 13;
+
+ /**
+ * @hide
+ * Player backed a hardware source, whose state is visible in the Android audio policy manager.
+ * Note this type is not in System API so it will not be returned in public API calls
+ */
+ // TODO unhide for SystemApi, update getPlayerType()
+ public static final int PLAYER_TYPE_HW_SOURCE = 14;
+
+ /**
+ * @hide
+ * Player is a proxy for an audio player whose audio and state doesn't go through the Android
+ * audio framework.
+ * Note this type is not in System API so it will not be returned in public API calls
+ */
+ // TODO unhide for SystemApi, update getPlayerType()
+ public static final int PLAYER_TYPE_EXTERNAL_PROXY = 15;
+
+ /** @hide */
+ @IntDef({
+ PLAYER_TYPE_UNKNOWN,
+ PLAYER_TYPE_JAM_AUDIOTRACK,
+ PLAYER_TYPE_JAM_MEDIAPLAYER,
+ PLAYER_TYPE_JAM_SOUNDPOOL,
+ PLAYER_TYPE_SLES_AUDIOPLAYER_BUFFERQUEUE,
+ PLAYER_TYPE_SLES_AUDIOPLAYER_URI_FD,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PlayerType {}
+
+ /**
+ * @hide
+ * An unknown player state
+ */
+ @SystemApi
+ public static final int PLAYER_STATE_UNKNOWN = -1;
+ /**
+ * @hide
+ * The resources of the player have been released, it cannot play anymore
+ */
+ @SystemApi
+ public static final int PLAYER_STATE_RELEASED = 0;
+ /**
+ * @hide
+ * The state of a player when it's created
+ */
+ @SystemApi
+ public static final int PLAYER_STATE_IDLE = 1;
+ /**
+ * @hide
+ * The state of a player that is actively playing
+ */
+ @SystemApi
+ public static final int PLAYER_STATE_STARTED = 2;
+ /**
+ * @hide
+ * The state of a player where playback is paused
+ */
+ @SystemApi
+ public static final int PLAYER_STATE_PAUSED = 3;
+ /**
+ * @hide
+ * The state of a player where playback is stopped
+ */
+ @SystemApi
+ public static final int PLAYER_STATE_STOPPED = 4;
+
+ /** @hide */
+ @IntDef({
+ PLAYER_STATE_UNKNOWN,
+ PLAYER_STATE_RELEASED,
+ PLAYER_STATE_IDLE,
+ PLAYER_STATE_STARTED,
+ PLAYER_STATE_PAUSED,
+ PLAYER_STATE_STOPPED
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PlayerState {}
+
+ // immutable data
+ private final int mPlayerIId;
+
+ // not final due to anonymization step
+ private int mPlayerType;
+ private int mClientUid;
+ private int mClientPid;
+ // the IPlayer reference and death monitor
+ private IPlayerShell mIPlayerShell;
+
+ private int mPlayerState;
+ private AudioAttributes mPlayerAttr; // never null
+
+ /**
+ * Never use without initializing parameters afterwards
+ */
+ private AudioPlaybackConfiguration(int piid) {
+ mPlayerIId = piid;
+ mIPlayerShell = null;
+ }
+
+ /**
+ * @hide
+ */
+ public AudioPlaybackConfiguration(PlayerBase.PlayerIdCard pic, int piid, int uid, int pid) {
+ if (DEBUG) { Log.d(TAG, "new: piid=" + piid + " iplayer=" + pic.mIPlayer); }
+ mPlayerIId = piid;
+ mPlayerType = pic.mPlayerType;
+ mClientUid = uid;
+ mClientPid = pid;
+ mPlayerState = PLAYER_STATE_IDLE;
+ mPlayerAttr = pic.mAttributes;
+ if ((sPlayerDeathMonitor != null) && (pic.mIPlayer != null)) {
+ mIPlayerShell = new IPlayerShell(this, pic.mIPlayer);
+ } else {
+ mIPlayerShell = null;
+ }
+ }
+
+ /**
+ * @hide
+ */
+ public void init() {
+ if (mIPlayerShell != null) {
+ mIPlayerShell.monitorDeath();
+ }
+ }
+
+ // Note that this method is called server side, so no "privileged" information is ever sent
+ // to a client that is not supposed to have access to it.
+ /**
+ * @hide
+ * Creates a copy of the playback configuration that is stripped of any data enabling
+ * identification of which application it is associated with ("anonymized").
+ * @param toSanitize
+ */
+ public static AudioPlaybackConfiguration anonymizedCopy(AudioPlaybackConfiguration in) {
+ final AudioPlaybackConfiguration anonymCopy = new AudioPlaybackConfiguration(in.mPlayerIId);
+ anonymCopy.mPlayerState = in.mPlayerState;
+ // do not reuse the full attributes: only usage, content type and public flags are allowed
+ anonymCopy.mPlayerAttr = new AudioAttributes.Builder()
+ .setUsage(in.mPlayerAttr.getUsage())
+ .setContentType(in.mPlayerAttr.getContentType())
+ .setFlags(in.mPlayerAttr.getFlags())
+ .build();
+ // anonymized data
+ anonymCopy.mPlayerType = PLAYER_TYPE_UNKNOWN;
+ anonymCopy.mClientUid = PLAYER_UPID_INVALID;
+ anonymCopy.mClientPid = PLAYER_UPID_INVALID;
+ anonymCopy.mIPlayerShell = null;
+ return anonymCopy;
+ }
+
+ /**
+ * Return the {@link AudioAttributes} of the corresponding player.
+ * @return the audio attributes of the player
+ */
+ public AudioAttributes getAudioAttributes() {
+ return mPlayerAttr;
+ }
+
+ /**
+ * @hide
+ * Return the uid of the client application that created this player.
+ * @return the uid of the client
+ */
+ @SystemApi
+ public int getClientUid() {
+ return mClientUid;
+ }
+
+ /**
+ * @hide
+ * Return the pid of the client application that created this player.
+ * @return the pid of the client
+ */
+ @SystemApi
+ public int getClientPid() {
+ return mClientPid;
+ }
+
+ /**
+ * @hide
+ * Return the type of player linked to this configuration. The return value is one of
+ * {@link #PLAYER_TYPE_JAM_AUDIOTRACK}, {@link #PLAYER_TYPE_JAM_MEDIAPLAYER},
+ * {@link #PLAYER_TYPE_JAM_SOUNDPOOL}, {@link #PLAYER_TYPE_SLES_AUDIOPLAYER_BUFFERQUEUE},
+ * {@link #PLAYER_TYPE_SLES_AUDIOPLAYER_URI_FD}, or {@link #PLAYER_TYPE_UNKNOWN}.
+ * <br>Note that player types not exposed in the system API will be represented as
+ * {@link #PLAYER_TYPE_UNKNOWN}.
+ * @return the type of the player.
+ */
+ @SystemApi
+ public @PlayerType int getPlayerType() {
+ switch (mPlayerType) {
+ case PLAYER_TYPE_AAUDIO:
+ case PLAYER_TYPE_HW_SOURCE:
+ case PLAYER_TYPE_EXTERNAL_PROXY:
+ return PLAYER_TYPE_UNKNOWN;
+ default:
+ return mPlayerType;
+ }
+ }
+
+ /**
+ * @hide
+ * Return the current state of the player linked to this configuration. The return value is one
+ * of {@link #PLAYER_STATE_IDLE}, {@link #PLAYER_STATE_PAUSED}, {@link #PLAYER_STATE_STARTED},
+ * {@link #PLAYER_STATE_STOPPED}, {@link #PLAYER_STATE_RELEASED} or
+ * {@link #PLAYER_STATE_UNKNOWN}.
+ * @return the state of the player.
+ */
+ @SystemApi
+ public @PlayerState int getPlayerState() {
+ return mPlayerState;
+ }
+
+ /**
+ * @hide
+ * Return an identifier unique for the lifetime of the player.
+ * @return a player interface identifier
+ */
+ @SystemApi
+ public int getPlayerInterfaceId() {
+ return mPlayerIId;
+ }
+
+ /**
+ * @hide
+ * Return a proxy for the player associated with this playback configuration
+ * @return a proxy player
+ */
+ @SystemApi
+ public PlayerProxy getPlayerProxy() {
+ return mIPlayerShell == null ? null : new PlayerProxy(this);
+ }
+
+ /**
+ * @hide
+ * @return the IPlayer interface for the associated player
+ */
+ IPlayer getIPlayer() {
+ return mIPlayerShell == null ? null : mIPlayerShell.getIPlayer();
+ }
+
+ /**
+ * @hide
+ * Handle a change of audio attributes
+ * @param attr
+ */
+ public boolean handleAudioAttributesEvent(@NonNull AudioAttributes attr) {
+ final boolean changed = !attr.equals(mPlayerAttr);
+ mPlayerAttr = attr;
+ return changed;
+ }
+
+ /**
+ * @hide
+ * Handle a player state change
+ * @param event
+ * @return true if the state changed, false otherwise
+ */
+ public boolean handleStateEvent(int event) {
+ final boolean changed = (mPlayerState != event);
+ mPlayerState = event;
+ if ((event == PLAYER_STATE_RELEASED) && (mIPlayerShell != null)) {
+ mIPlayerShell.release();
+ }
+ return changed;
+ }
+
+ // To report IPlayer death from death recipient
+ /** @hide */
+ public interface PlayerDeathMonitor {
+ public void playerDeath(int piid);
+ }
+ /** @hide */
+ public static PlayerDeathMonitor sPlayerDeathMonitor;
+
+ private void playerDied() {
+ if (sPlayerDeathMonitor != null) {
+ sPlayerDeathMonitor.playerDeath(mPlayerIId);
+ }
+ }
+
+ /**
+ * @hide
+ * Returns true if the player is considered "active", i.e. actively playing, and thus
+ * in a state that should make it considered for the list public (sanitized) active playback
+ * configurations
+ * @return true if active
+ */
+ public boolean isActive() {
+ switch (mPlayerState) {
+ case PLAYER_STATE_STARTED:
+ return true;
+ case PLAYER_STATE_UNKNOWN:
+ case PLAYER_STATE_RELEASED:
+ case PLAYER_STATE_IDLE:
+ case PLAYER_STATE_PAUSED:
+ case PLAYER_STATE_STOPPED:
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * @hide
+ * For AudioService dump
+ * @param pw
+ */
+ public void dump(PrintWriter pw) {
+ pw.println(" " + toLogFriendlyString(this));
+ }
+
+ /**
+ * @hide
+ */
+ public static String toLogFriendlyString(AudioPlaybackConfiguration apc) {
+ return new String("ID:" + apc.mPlayerIId
+ + " -- type:" + toLogFriendlyPlayerType(apc.mPlayerType)
+ + " -- u/pid:" + apc.mClientUid +"/" + apc.mClientPid
+ + " -- state:" + toLogFriendlyPlayerState(apc.mPlayerState)
+ + " -- attr:" + apc.mPlayerAttr);
+ }
+
+ public static final Parcelable.Creator<AudioPlaybackConfiguration> CREATOR
+ = new Parcelable.Creator<AudioPlaybackConfiguration>() {
+ /**
+ * Rebuilds an AudioPlaybackConfiguration previously stored with writeToParcel().
+ * @param p Parcel object to read the AudioPlaybackConfiguration from
+ * @return a new AudioPlaybackConfiguration created from the data in the parcel
+ */
+ public AudioPlaybackConfiguration createFromParcel(Parcel p) {
+ return new AudioPlaybackConfiguration(p);
+ }
+ public AudioPlaybackConfiguration[] newArray(int size) {
+ return new AudioPlaybackConfiguration[size];
+ }
+ };
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mPlayerIId, mPlayerType, mClientUid, mClientPid);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mPlayerIId);
+ dest.writeInt(mPlayerType);
+ dest.writeInt(mClientUid);
+ dest.writeInt(mClientPid);
+ dest.writeInt(mPlayerState);
+ mPlayerAttr.writeToParcel(dest, 0);
+ dest.writeStrongInterface(mIPlayerShell == null ? null : mIPlayerShell.getIPlayer());
+ }
+
+ private AudioPlaybackConfiguration(Parcel in) {
+ mPlayerIId = in.readInt();
+ mPlayerType = in.readInt();
+ mClientUid = in.readInt();
+ mClientPid = in.readInt();
+ mPlayerState = in.readInt();
+ mPlayerAttr = AudioAttributes.CREATOR.createFromParcel(in);
+ final IPlayer p = IPlayer.Stub.asInterface(in.readStrongBinder());
+ mIPlayerShell = (p == null) ? null : new IPlayerShell(null, p);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || !(o instanceof AudioPlaybackConfiguration)) return false;
+
+ AudioPlaybackConfiguration that = (AudioPlaybackConfiguration) o;
+
+ return ((mPlayerIId == that.mPlayerIId)
+ && (mPlayerType == that.mPlayerType)
+ && (mClientUid == that.mClientUid)
+ && (mClientPid == that.mClientPid));
+ }
+
+ //=====================================================================
+ // Inner class for corresponding IPlayer and its death monitoring
+ static final class IPlayerShell implements IBinder.DeathRecipient {
+
+ final AudioPlaybackConfiguration mMonitor; // never null
+ private IPlayer mIPlayer;
+
+ IPlayerShell(@NonNull AudioPlaybackConfiguration monitor, @NonNull IPlayer iplayer) {
+ mMonitor = monitor;
+ mIPlayer = iplayer;
+ }
+
+ void monitorDeath() {
+ try {
+ mIPlayer.asBinder().linkToDeath(this, 0);
+ } catch (RemoteException e) {
+ if (mMonitor != null) {
+ Log.w(TAG, "Could not link to client death for piid=" + mMonitor.mPlayerIId, e);
+ } else {
+ Log.w(TAG, "Could not link to client death", e);
+ }
+ }
+ }
+
+ IPlayer getIPlayer() {
+ return mIPlayer;
+ }
+
+ public void binderDied() {
+ if (mMonitor != null) {
+ if (DEBUG) { Log.i(TAG, "IPlayerShell binderDied for piid=" + mMonitor.mPlayerIId);}
+ mMonitor.playerDied();
+ } else if (DEBUG) { Log.i(TAG, "IPlayerShell binderDied"); }
+ }
+
+ void release() {
+ mIPlayer.asBinder().unlinkToDeath(this, 0);
+ }
+ }
+
+ //=====================================================================
+ // Utilities
+
+ /** @hide */
+ public static String toLogFriendlyPlayerType(int type) {
+ switch (type) {
+ case PLAYER_TYPE_UNKNOWN: return "unknown";
+ case PLAYER_TYPE_JAM_AUDIOTRACK: return "android.media.AudioTrack";
+ case PLAYER_TYPE_JAM_MEDIAPLAYER: return "android.media.MediaPlayer";
+ case PLAYER_TYPE_JAM_SOUNDPOOL: return "android.media.SoundPool";
+ case PLAYER_TYPE_SLES_AUDIOPLAYER_BUFFERQUEUE:
+ return "OpenSL ES AudioPlayer (Buffer Queue)";
+ case PLAYER_TYPE_SLES_AUDIOPLAYER_URI_FD:
+ return "OpenSL ES AudioPlayer (URI/FD)";
+ case PLAYER_TYPE_AAUDIO: return "AAudio";
+ case PLAYER_TYPE_HW_SOURCE: return "hardware source";
+ case PLAYER_TYPE_EXTERNAL_PROXY: return "external proxy";
+ default:
+ return "unknown player type - FIXME";
+ }
+ }
+
+ /** @hide */
+ public static String toLogFriendlyPlayerState(int state) {
+ switch (state) {
+ case PLAYER_STATE_UNKNOWN: return "unknown";
+ case PLAYER_STATE_RELEASED: return "released";
+ case PLAYER_STATE_IDLE: return "idle";
+ case PLAYER_STATE_STARTED: return "started";
+ case PLAYER_STATE_PAUSED: return "paused";
+ case PLAYER_STATE_STOPPED: return "stopped";
+ default:
+ return "unknown player state - FIXME";
+ }
+ }
+}
diff --git a/android/media/AudioPort.java b/android/media/AudioPort.java
new file mode 100644
index 00000000..19bf51d9
--- /dev/null
+++ b/android/media/AudioPort.java
@@ -0,0 +1,226 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An audio port is a node of the audio framework or hardware that can be connected to or
+ * disconnect from another audio node to create a specific audio routing configuration.
+ * Examples of audio ports are an output device (speaker) or an output mix (see AudioMixPort).
+ * All attributes that are relevant for applications to make routing selection are decribed
+ * in an AudioPort, in particular:
+ * - possible channel mask configurations.
+ * - audio format (PCM 16bit, PCM 24bit...)
+ * - gain: a port can be associated with one or more gain controllers (see AudioGain).
+ *
+ * This object is always created by the framework and read only by applications.
+ * A list of all audio port descriptors currently available for applications to control
+ * is obtained by AudioManager.listAudioPorts().
+ * An application can obtain an AudioPortConfig for a valid configuration of this port
+ * by calling AudioPort.buildConfig() and use this configuration
+ * to create a connection between audio sinks and sources with AudioManager.connectAudioPatch()
+ *
+ * @hide
+ */
+public class AudioPort {
+ private static final String TAG = "AudioPort";
+
+ /**
+ * For use by the audio framework.
+ */
+ public static final int ROLE_NONE = 0;
+ /**
+ * The audio port is a source (produces audio)
+ */
+ public static final int ROLE_SOURCE = 1;
+ /**
+ * The audio port is a sink (consumes audio)
+ */
+ public static final int ROLE_SINK = 2;
+
+ /**
+ * audio port type for use by audio framework implementation
+ */
+ public static final int TYPE_NONE = 0;
+ /**
+ */
+ public static final int TYPE_DEVICE = 1;
+ /**
+ */
+ public static final int TYPE_SUBMIX = 2;
+ /**
+ */
+ public static final int TYPE_SESSION = 3;
+
+
+ AudioHandle mHandle;
+ protected final int mRole;
+ private final String mName;
+ private final int[] mSamplingRates;
+ private final int[] mChannelMasks;
+ private final int[] mChannelIndexMasks;
+ private final int[] mFormats;
+ private final AudioGain[] mGains;
+ private AudioPortConfig mActiveConfig;
+
+ AudioPort(AudioHandle handle, int role, String name,
+ int[] samplingRates, int[] channelMasks, int[] channelIndexMasks,
+ int[] formats, AudioGain[] gains) {
+
+ mHandle = handle;
+ mRole = role;
+ mName = name;
+ mSamplingRates = samplingRates;
+ mChannelMasks = channelMasks;
+ mChannelIndexMasks = channelIndexMasks;
+ mFormats = formats;
+ mGains = gains;
+ }
+
+ AudioHandle handle() {
+ return mHandle;
+ }
+
+ /**
+ * Get the system unique device ID.
+ */
+ public int id() {
+ return mHandle.id();
+ }
+
+
+ /**
+ * Get the audio port role
+ */
+ public int role() {
+ return mRole;
+ }
+
+ /**
+ * Get the human-readable name of this port. Perhaps an internal
+ * designation or an physical device.
+ */
+ public String name() {
+ return mName;
+ }
+
+ /**
+ * Get the list of supported sampling rates
+ * Empty array if sampling rate is not relevant for this audio port
+ */
+ public int[] samplingRates() {
+ return mSamplingRates;
+ }
+
+ /**
+ * Get the list of supported channel mask configurations
+ * (e.g AudioFormat.CHANNEL_OUT_STEREO)
+ * Empty array if channel mask is not relevant for this audio port
+ */
+ public int[] channelMasks() {
+ return mChannelMasks;
+ }
+
+ /**
+ * Get the list of supported channel index mask configurations
+ * (e.g 0x0003 means 2 channel, 0x000F means 4 channel....)
+ * Empty array if channel index mask is not relevant for this audio port
+ */
+ public int[] channelIndexMasks() {
+ return mChannelIndexMasks;
+ }
+
+ /**
+ * Get the list of supported audio format configurations
+ * (e.g AudioFormat.ENCODING_PCM_16BIT)
+ * Empty array if format is not relevant for this audio port
+ */
+ public int[] formats() {
+ return mFormats;
+ }
+
+ /**
+ * Get the list of gain descriptors
+ * Empty array if this port does not have gain control
+ */
+ public AudioGain[] gains() {
+ return mGains;
+ }
+
+ /**
+ * Get the gain descriptor at a given index
+ */
+ AudioGain gain(int index) {
+ if (index < 0 || index >= mGains.length) {
+ return null;
+ }
+ return mGains[index];
+ }
+
+ /**
+ * Build a specific configuration of this audio port for use by methods
+ * like AudioManager.connectAudioPatch().
+ * @param channelMask The desired channel mask. AudioFormat.CHANNEL_OUT_DEFAULT if no change
+ * from active configuration requested.
+ * @param format The desired audio format. AudioFormat.ENCODING_DEFAULT if no change
+ * from active configuration requested.
+ * @param gain The desired gain. null if no gain changed requested.
+ */
+ public AudioPortConfig buildConfig(int samplingRate, int channelMask, int format,
+ AudioGainConfig gain) {
+ return new AudioPortConfig(this, samplingRate, channelMask, format, gain);
+ }
+
+ /**
+ * Get currently active configuration of this audio port.
+ */
+ public AudioPortConfig activeConfig() {
+ return mActiveConfig;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null || !(o instanceof AudioPort)) {
+ return false;
+ }
+ AudioPort ap = (AudioPort)o;
+ return mHandle.equals(ap.handle());
+ }
+
+ @Override
+ public int hashCode() {
+ return mHandle.hashCode();
+ }
+
+ @Override
+ public String toString() {
+ String role = Integer.toString(mRole);
+ switch (mRole) {
+ case ROLE_NONE:
+ role = "NONE";
+ break;
+ case ROLE_SOURCE:
+ role = "SOURCE";
+ break;
+ case ROLE_SINK:
+ role = "SINK";
+ break;
+ }
+ return "{mHandle: " + mHandle
+ + ", mRole: " + role
+ + "}";
+ }
+}
diff --git a/android/media/AudioPortConfig.java b/android/media/AudioPortConfig.java
new file mode 100644
index 00000000..f937cc29
--- /dev/null
+++ b/android/media/AudioPortConfig.java
@@ -0,0 +1,103 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An AudioPortConfig contains a possible configuration of an audio port chosen
+ * among all possible attributes described by an AudioPort.
+ * An AudioPortConfig is created by AudioPort.buildConfiguration().
+ * AudioPorts are used to specify the sources and sinks of a patch created
+ * with AudioManager.connectAudioPatch().
+ * Several specialized versions of AudioPortConfig exist to handle different categories of
+ * audio ports and their specific attributes:
+ * - AudioDevicePortConfig for input (e.g micropohone) and output devices (e.g speaker)
+ * - AudioMixPortConfig for input or output streams of the audio framework.
+ * @hide
+ */
+
+public class AudioPortConfig {
+ final AudioPort mPort;
+ private final int mSamplingRate;
+ private final int mChannelMask;
+ private final int mFormat;
+ private final AudioGainConfig mGain;
+
+ // mConfigMask indicates which fields in this configuration should be
+ // taken into account. Used with AudioSystem.setAudioPortConfig()
+ // framework use only.
+ static final int SAMPLE_RATE = 0x1;
+ static final int CHANNEL_MASK = 0x2;
+ static final int FORMAT = 0x4;
+ static final int GAIN = 0x8;
+ int mConfigMask;
+
+ AudioPortConfig(AudioPort port, int samplingRate, int channelMask, int format,
+ AudioGainConfig gain) {
+ mPort = port;
+ mSamplingRate = samplingRate;
+ mChannelMask = channelMask;
+ mFormat = format;
+ mGain = gain;
+ mConfigMask = 0;
+ }
+
+ /**
+ * Returns the audio port this AudioPortConfig is issued from.
+ */
+ public AudioPort port() {
+ return mPort;
+ }
+
+ /**
+ * Sampling rate configured for this AudioPortConfig.
+ */
+ public int samplingRate() {
+ return mSamplingRate;
+ }
+
+ /**
+ * Channel mask configuration (e.g AudioFormat.CHANNEL_CONFIGURATION_STEREO).
+ */
+ public int channelMask() {
+ return mChannelMask;
+ }
+
+ /**
+ * Audio format configuration (e.g AudioFormat.ENCODING_PCM_16BIT).
+ */
+ public int format() {
+ return mFormat;
+ }
+
+ /**
+ * The gain configuration if this port supports gain control, null otherwise
+ * @see AudioGainConfig.
+ */
+ public AudioGainConfig gain() {
+ return mGain;
+ }
+
+ @Override
+ public String toString() {
+ return "{mPort:" + mPort
+ + ", mSamplingRate:" + mSamplingRate
+ + ", mChannelMask: " + mChannelMask
+ + ", mFormat:" + mFormat
+ + ", mGain:" + mGain
+ + "}";
+ }
+}
diff --git a/android/media/AudioPortEventHandler.java b/android/media/AudioPortEventHandler.java
new file mode 100644
index 00000000..c152245d
--- /dev/null
+++ b/android/media/AudioPortEventHandler.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import java.util.ArrayList;
+import java.lang.ref.WeakReference;
+
+/**
+ * The AudioPortEventHandler handles AudioManager.OnAudioPortUpdateListener callbacks
+ * posted from JNI
+ * @hide
+ */
+
+class AudioPortEventHandler {
+ private Handler mHandler;
+ private final ArrayList<AudioManager.OnAudioPortUpdateListener> mListeners =
+ new ArrayList<AudioManager.OnAudioPortUpdateListener>();
+
+ private static final String TAG = "AudioPortEventHandler";
+
+ private static final int AUDIOPORT_EVENT_PORT_LIST_UPDATED = 1;
+ private static final int AUDIOPORT_EVENT_PATCH_LIST_UPDATED = 2;
+ private static final int AUDIOPORT_EVENT_SERVICE_DIED = 3;
+ private static final int AUDIOPORT_EVENT_NEW_LISTENER = 4;
+
+ /**
+ * Accessed by native methods: JNI Callback context.
+ */
+ @SuppressWarnings("unused")
+ private long mJniCallback;
+
+ void init() {
+ synchronized (this) {
+ if (mHandler != null) {
+ return;
+ }
+ // find the looper for our new event handler
+ Looper looper = Looper.getMainLooper();
+
+ if (looper != null) {
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ ArrayList<AudioManager.OnAudioPortUpdateListener> listeners;
+ synchronized (this) {
+ if (msg.what == AUDIOPORT_EVENT_NEW_LISTENER) {
+ listeners = new ArrayList<AudioManager.OnAudioPortUpdateListener>();
+ if (mListeners.contains(msg.obj)) {
+ listeners.add((AudioManager.OnAudioPortUpdateListener)msg.obj);
+ }
+ } else {
+ listeners = mListeners;
+ }
+ }
+ // reset audio port cache if the event corresponds to a change coming
+ // from audio policy service or if mediaserver process died.
+ if (msg.what == AUDIOPORT_EVENT_PORT_LIST_UPDATED ||
+ msg.what == AUDIOPORT_EVENT_PATCH_LIST_UPDATED ||
+ msg.what == AUDIOPORT_EVENT_SERVICE_DIED) {
+ AudioManager.resetAudioPortGeneration();
+ }
+
+ if (listeners.isEmpty()) {
+ return;
+ }
+
+ ArrayList<AudioPort> ports = new ArrayList<AudioPort>();
+ ArrayList<AudioPatch> patches = new ArrayList<AudioPatch>();
+ if (msg.what != AUDIOPORT_EVENT_SERVICE_DIED) {
+ int status = AudioManager.updateAudioPortCache(ports, patches, null);
+ if (status != AudioManager.SUCCESS) {
+ return;
+ }
+ }
+
+ switch (msg.what) {
+ case AUDIOPORT_EVENT_NEW_LISTENER:
+ case AUDIOPORT_EVENT_PORT_LIST_UPDATED:
+ AudioPort[] portList = ports.toArray(new AudioPort[0]);
+ for (int i = 0; i < listeners.size(); i++) {
+ listeners.get(i).onAudioPortListUpdate(portList);
+ }
+ if (msg.what == AUDIOPORT_EVENT_PORT_LIST_UPDATED) {
+ break;
+ }
+ // FALL THROUGH
+
+ case AUDIOPORT_EVENT_PATCH_LIST_UPDATED:
+ AudioPatch[] patchList = patches.toArray(new AudioPatch[0]);
+ for (int i = 0; i < listeners.size(); i++) {
+ listeners.get(i).onAudioPatchListUpdate(patchList);
+ }
+ break;
+
+ case AUDIOPORT_EVENT_SERVICE_DIED:
+ for (int i = 0; i < listeners.size(); i++) {
+ listeners.get(i).onServiceDied();
+ }
+ break;
+
+ default:
+ break;
+ }
+ }
+ };
+ native_setup(new WeakReference<AudioPortEventHandler>(this));
+ } else {
+ mHandler = null;
+ }
+ }
+ }
+
+ private native void native_setup(Object module_this);
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+ private native void native_finalize();
+
+ void registerListener(AudioManager.OnAudioPortUpdateListener l) {
+ synchronized (this) {
+ mListeners.add(l);
+ }
+ if (mHandler != null) {
+ Message m = mHandler.obtainMessage(AUDIOPORT_EVENT_NEW_LISTENER, 0, 0, l);
+ mHandler.sendMessage(m);
+ }
+ }
+
+ void unregisterListener(AudioManager.OnAudioPortUpdateListener l) {
+ synchronized (this) {
+ mListeners.remove(l);
+ }
+ }
+
+ Handler handler() {
+ return mHandler;
+ }
+
+ @SuppressWarnings("unused")
+ private static void postEventFromNative(Object module_ref,
+ int what, int arg1, int arg2, Object obj) {
+ AudioPortEventHandler eventHandler =
+ (AudioPortEventHandler)((WeakReference)module_ref).get();
+ if (eventHandler == null) {
+ return;
+ }
+
+ if (eventHandler != null) {
+ Handler handler = eventHandler.handler();
+ if (handler != null) {
+ Message m = handler.obtainMessage(what, arg1, arg2, obj);
+ handler.sendMessage(m);
+ }
+ }
+ }
+
+}
diff --git a/android/media/AudioRecord.java b/android/media/AudioRecord.java
new file mode 100644
index 00000000..0906ba50
--- /dev/null
+++ b/android/media/AudioRecord.java
@@ -0,0 +1,1795 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.util.Collection;
+import java.util.Iterator;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.app.ActivityThread;
+import android.os.Binder;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.ArrayMap;
+import android.util.Log;
+
+import com.android.internal.annotations.GuardedBy;
+
+/**
+ * The AudioRecord class manages the audio resources for Java applications
+ * to record audio from the audio input hardware of the platform. This is
+ * achieved by "pulling" (reading) the data from the AudioRecord object. The
+ * application is responsible for polling the AudioRecord object in time using one of
+ * the following three methods: {@link #read(byte[],int, int)}, {@link #read(short[], int, int)}
+ * or {@link #read(ByteBuffer, int)}. The choice of which method to use will be based
+ * on the audio data storage format that is the most convenient for the user of AudioRecord.
+ * <p>Upon creation, an AudioRecord object initializes its associated audio buffer that it will
+ * fill with the new audio data. The size of this buffer, specified during the construction,
+ * determines how long an AudioRecord can record before "over-running" data that has not
+ * been read yet. Data should be read from the audio hardware in chunks of sizes inferior to
+ * the total recording buffer size.
+ */
+public class AudioRecord implements AudioRouting
+{
+ //---------------------------------------------------------
+ // Constants
+ //--------------------
+
+
+ /**
+ * indicates AudioRecord state is not successfully initialized.
+ */
+ public static final int STATE_UNINITIALIZED = 0;
+ /**
+ * indicates AudioRecord state is ready to be used
+ */
+ public static final int STATE_INITIALIZED = 1;
+
+ /**
+ * indicates AudioRecord recording state is not recording
+ */
+ public static final int RECORDSTATE_STOPPED = 1; // matches SL_RECORDSTATE_STOPPED
+ /**
+ * indicates AudioRecord recording state is recording
+ */
+ public static final int RECORDSTATE_RECORDING = 3;// matches SL_RECORDSTATE_RECORDING
+
+ /**
+ * Denotes a successful operation.
+ */
+ public static final int SUCCESS = AudioSystem.SUCCESS;
+ /**
+ * Denotes a generic operation failure.
+ */
+ public static final int ERROR = AudioSystem.ERROR;
+ /**
+ * Denotes a failure due to the use of an invalid value.
+ */
+ public static final int ERROR_BAD_VALUE = AudioSystem.BAD_VALUE;
+ /**
+ * Denotes a failure due to the improper use of a method.
+ */
+ public static final int ERROR_INVALID_OPERATION = AudioSystem.INVALID_OPERATION;
+ /**
+ * An error code indicating that the object reporting it is no longer valid and needs to
+ * be recreated.
+ */
+ public static final int ERROR_DEAD_OBJECT = AudioSystem.DEAD_OBJECT;
+
+ // Error codes:
+ // to keep in sync with frameworks/base/core/jni/android_media_AudioRecord.cpp
+ private static final int AUDIORECORD_ERROR_SETUP_ZEROFRAMECOUNT = -16;
+ private static final int AUDIORECORD_ERROR_SETUP_INVALIDCHANNELMASK = -17;
+ private static final int AUDIORECORD_ERROR_SETUP_INVALIDFORMAT = -18;
+ private static final int AUDIORECORD_ERROR_SETUP_INVALIDSOURCE = -19;
+ private static final int AUDIORECORD_ERROR_SETUP_NATIVEINITFAILED = -20;
+
+ // Events:
+ // to keep in sync with frameworks/av/include/media/AudioRecord.h
+ /**
+ * Event id denotes when record head has reached a previously set marker.
+ */
+ private static final int NATIVE_EVENT_MARKER = 2;
+ /**
+ * Event id denotes when previously set update period has elapsed during recording.
+ */
+ private static final int NATIVE_EVENT_NEW_POS = 3;
+
+ private final static String TAG = "android.media.AudioRecord";
+
+ /** @hide */
+ public final static String SUBMIX_FIXED_VOLUME = "fixedVolume";
+
+ /** @hide */
+ @IntDef({
+ READ_BLOCKING,
+ READ_NON_BLOCKING
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ReadMode {}
+
+ /**
+ * The read mode indicating the read operation will block until all data
+ * requested has been read.
+ */
+ public final static int READ_BLOCKING = 0;
+
+ /**
+ * The read mode indicating the read operation will return immediately after
+ * reading as much audio data as possible without blocking.
+ */
+ public final static int READ_NON_BLOCKING = 1;
+
+ //---------------------------------------------------------
+ // Used exclusively by native code
+ //--------------------
+ /**
+ * Accessed by native methods: provides access to C++ AudioRecord object
+ */
+ @SuppressWarnings("unused")
+ private long mNativeRecorderInJavaObj;
+
+ /**
+ * Accessed by native methods: provides access to the callback data.
+ */
+ @SuppressWarnings("unused")
+ private long mNativeCallbackCookie;
+
+ /**
+ * Accessed by native methods: provides access to the JNIDeviceCallback instance.
+ */
+ @SuppressWarnings("unused")
+ private long mNativeDeviceCallback;
+
+
+ //---------------------------------------------------------
+ // Member variables
+ //--------------------
+ /**
+ * The audio data sampling rate in Hz.
+ * Never {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED}.
+ */
+ private int mSampleRate; // initialized by all constructors via audioParamCheck()
+ /**
+ * The number of input audio channels (1 is mono, 2 is stereo)
+ */
+ private int mChannelCount;
+ /**
+ * The audio channel position mask
+ */
+ private int mChannelMask;
+ /**
+ * The audio channel index mask
+ */
+ private int mChannelIndexMask;
+ /**
+ * The encoding of the audio samples.
+ * @see AudioFormat#ENCODING_PCM_8BIT
+ * @see AudioFormat#ENCODING_PCM_16BIT
+ * @see AudioFormat#ENCODING_PCM_FLOAT
+ */
+ private int mAudioFormat;
+ /**
+ * Where the audio data is recorded from.
+ */
+ private int mRecordSource;
+ /**
+ * Indicates the state of the AudioRecord instance.
+ */
+ private int mState = STATE_UNINITIALIZED;
+ /**
+ * Indicates the recording state of the AudioRecord instance.
+ */
+ private int mRecordingState = RECORDSTATE_STOPPED;
+ /**
+ * Lock to make sure mRecordingState updates are reflecting the actual state of the object.
+ */
+ private final Object mRecordingStateLock = new Object();
+ /**
+ * The listener the AudioRecord notifies when the record position reaches a marker
+ * or for periodic updates during the progression of the record head.
+ * @see #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)
+ * @see #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler)
+ */
+ private OnRecordPositionUpdateListener mPositionListener = null;
+ /**
+ * Lock to protect position listener updates against event notifications
+ */
+ private final Object mPositionListenerLock = new Object();
+ /**
+ * Handler for marker events coming from the native code
+ */
+ private NativeEventHandler mEventHandler = null;
+ /**
+ * Looper associated with the thread that creates the AudioRecord instance
+ */
+ private Looper mInitializationLooper = null;
+ /**
+ * Size of the native audio buffer.
+ */
+ private int mNativeBufferSizeInBytes = 0;
+ /**
+ * Audio session ID
+ */
+ private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
+ /**
+ * AudioAttributes
+ */
+ private AudioAttributes mAudioAttributes;
+ private boolean mIsSubmixFullVolume = false;
+
+ //---------------------------------------------------------
+ // Constructor, Finalize
+ //--------------------
+ /**
+ * Class constructor.
+ * Though some invalid parameters will result in an {@link IllegalArgumentException} exception,
+ * other errors do not. Thus you should call {@link #getState()} immediately after construction
+ * to confirm that the object is usable.
+ * @param audioSource the recording source.
+ * See {@link MediaRecorder.AudioSource} for the recording source definitions.
+ * @param sampleRateInHz the sample rate expressed in Hertz. 44100Hz is currently the only
+ * rate that is guaranteed to work on all devices, but other rates such as 22050,
+ * 16000, and 11025 may work on some devices.
+ * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} means to use a route-dependent value
+ * which is usually the sample rate of the source.
+ * {@link #getSampleRate()} can be used to retrieve the actual sample rate chosen.
+ * @param channelConfig describes the configuration of the audio channels.
+ * See {@link AudioFormat#CHANNEL_IN_MONO} and
+ * {@link AudioFormat#CHANNEL_IN_STEREO}. {@link AudioFormat#CHANNEL_IN_MONO} is guaranteed
+ * to work on all devices.
+ * @param audioFormat the format in which the audio data is to be returned.
+ * See {@link AudioFormat#ENCODING_PCM_8BIT}, {@link AudioFormat#ENCODING_PCM_16BIT},
+ * and {@link AudioFormat#ENCODING_PCM_FLOAT}.
+ * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written
+ * to during the recording. New audio data can be read from this buffer in smaller chunks
+ * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
+ * required buffer size for the successful creation of an AudioRecord instance. Using values
+ * smaller than getMinBufferSize() will result in an initialization failure.
+ * @throws java.lang.IllegalArgumentException
+ */
+ public AudioRecord(int audioSource, int sampleRateInHz, int channelConfig, int audioFormat,
+ int bufferSizeInBytes)
+ throws IllegalArgumentException {
+ this((new AudioAttributes.Builder())
+ .setInternalCapturePreset(audioSource)
+ .build(),
+ (new AudioFormat.Builder())
+ .setChannelMask(getChannelMaskFromLegacyConfig(channelConfig,
+ true/*allow legacy configurations*/))
+ .setEncoding(audioFormat)
+ .setSampleRate(sampleRateInHz)
+ .build(),
+ bufferSizeInBytes,
+ AudioManager.AUDIO_SESSION_ID_GENERATE);
+ }
+
+ /**
+ * @hide
+ * Class constructor with {@link AudioAttributes} and {@link AudioFormat}.
+ * @param attributes a non-null {@link AudioAttributes} instance. Use
+ * {@link AudioAttributes.Builder#setAudioSource(int)} for configuring the audio
+ * source for this instance.
+ * @param format a non-null {@link AudioFormat} instance describing the format of the data
+ * that will be recorded through this AudioRecord. See {@link AudioFormat.Builder} for
+ * configuring the audio format parameters such as encoding, channel mask and sample rate.
+ * @param bufferSizeInBytes the total size (in bytes) of the buffer where audio data is written
+ * to during the recording. New audio data can be read from this buffer in smaller chunks
+ * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
+ * required buffer size for the successful creation of an AudioRecord instance. Using values
+ * smaller than getMinBufferSize() will result in an initialization failure.
+ * @param sessionId ID of audio session the AudioRecord must be attached to, or
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at construction
+ * time. See also {@link AudioManager#generateAudioSessionId()} to obtain a session ID before
+ * construction.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public AudioRecord(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes,
+ int sessionId) throws IllegalArgumentException {
+ mRecordingState = RECORDSTATE_STOPPED;
+
+ if (attributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes");
+ }
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat");
+ }
+
+ // remember which looper is associated with the AudioRecord instanciation
+ if ((mInitializationLooper = Looper.myLooper()) == null) {
+ mInitializationLooper = Looper.getMainLooper();
+ }
+
+ // is this AudioRecord using REMOTE_SUBMIX at full volume?
+ if (attributes.getCapturePreset() == MediaRecorder.AudioSource.REMOTE_SUBMIX) {
+ final AudioAttributes.Builder filteredAttr = new AudioAttributes.Builder();
+ final Iterator<String> tagsIter = attributes.getTags().iterator();
+ while (tagsIter.hasNext()) {
+ final String tag = tagsIter.next();
+ if (tag.equalsIgnoreCase(SUBMIX_FIXED_VOLUME)) {
+ mIsSubmixFullVolume = true;
+ Log.v(TAG, "Will record from REMOTE_SUBMIX at full fixed volume");
+ } else { // SUBMIX_FIXED_VOLUME: is not to be propagated to the native layers
+ filteredAttr.addTag(tag);
+ }
+ }
+ filteredAttr.setInternalCapturePreset(attributes.getCapturePreset());
+ mAudioAttributes = filteredAttr.build();
+ } else {
+ mAudioAttributes = attributes;
+ }
+
+ int rate = format.getSampleRate();
+ if (rate == AudioFormat.SAMPLE_RATE_UNSPECIFIED) {
+ rate = 0;
+ }
+
+ int encoding = AudioFormat.ENCODING_DEFAULT;
+ if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0)
+ {
+ encoding = format.getEncoding();
+ }
+
+ audioParamCheck(attributes.getCapturePreset(), rate, encoding);
+
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) {
+ mChannelIndexMask = format.getChannelIndexMask();
+ mChannelCount = format.getChannelCount();
+ }
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) {
+ mChannelMask = getChannelMaskFromLegacyConfig(format.getChannelMask(), false);
+ mChannelCount = format.getChannelCount();
+ } else if (mChannelIndexMask == 0) {
+ mChannelMask = getChannelMaskFromLegacyConfig(AudioFormat.CHANNEL_IN_DEFAULT, false);
+ mChannelCount = AudioFormat.channelCountFromInChannelMask(mChannelMask);
+ }
+
+ audioBuffSizeCheck(bufferSizeInBytes);
+
+ int[] sampleRate = new int[] {mSampleRate};
+ int[] session = new int[1];
+ session[0] = sessionId;
+ //TODO: update native initialization when information about hardware init failure
+ // due to capture device already open is available.
+ int initResult = native_setup( new WeakReference<AudioRecord>(this),
+ mAudioAttributes, sampleRate, mChannelMask, mChannelIndexMask,
+ mAudioFormat, mNativeBufferSizeInBytes,
+ session, ActivityThread.currentOpPackageName(), 0 /*nativeRecordInJavaObj*/);
+ if (initResult != SUCCESS) {
+ loge("Error code "+initResult+" when initializing native AudioRecord object.");
+ return; // with mState == STATE_UNINITIALIZED
+ }
+
+ mSampleRate = sampleRate[0];
+ mSessionId = session[0];
+
+ mState = STATE_INITIALIZED;
+ }
+
+ /**
+ * A constructor which explicitly connects a Native (C++) AudioRecord. For use by
+ * the AudioRecordRoutingProxy subclass.
+ * @param nativeRecordInJavaObj A C/C++ pointer to a native AudioRecord
+ * (associated with an OpenSL ES recorder). Note: the caller must ensure a correct
+ * value here as no error checking is or can be done.
+ */
+ /*package*/ AudioRecord(long nativeRecordInJavaObj) {
+ mNativeRecorderInJavaObj = 0;
+ mNativeCallbackCookie = 0;
+ mNativeDeviceCallback = 0;
+
+ // other initialization...
+ if (nativeRecordInJavaObj != 0) {
+ deferred_connect(nativeRecordInJavaObj);
+ } else {
+ mState = STATE_UNINITIALIZED;
+ }
+ }
+
+ /**
+ * @hide
+ */
+ /* package */ void deferred_connect(long nativeRecordInJavaObj) {
+ if (mState != STATE_INITIALIZED) {
+ int[] session = { 0 };
+ int[] rates = { 0 };
+ //TODO: update native initialization when information about hardware init failure
+ // due to capture device already open is available.
+ // Note that for this native_setup, we are providing an already created/initialized
+ // *Native* AudioRecord, so the attributes parameters to native_setup() are ignored.
+ int initResult = native_setup(new WeakReference<AudioRecord>(this),
+ null /*mAudioAttributes*/,
+ rates /*mSampleRates*/,
+ 0 /*mChannelMask*/,
+ 0 /*mChannelIndexMask*/,
+ 0 /*mAudioFormat*/,
+ 0 /*mNativeBufferSizeInBytes*/,
+ session,
+ ActivityThread.currentOpPackageName(),
+ nativeRecordInJavaObj);
+ if (initResult != SUCCESS) {
+ loge("Error code "+initResult+" when initializing native AudioRecord object.");
+ return; // with mState == STATE_UNINITIALIZED
+ }
+
+ mSessionId = session[0];
+
+ mState = STATE_INITIALIZED;
+ }
+ }
+
+ /**
+ * Builder class for {@link AudioRecord} objects.
+ * Use this class to configure and create an <code>AudioRecord</code> instance. By setting the
+ * recording source and audio format parameters, you indicate which of
+ * those vary from the default behavior on the device.
+ * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat}
+ * parameters, to be used by a new <code>AudioRecord</code> instance:
+ *
+ * <pre class="prettyprint">
+ * AudioRecord recorder = new AudioRecord.Builder()
+ * .setAudioSource(MediaRecorder.AudioSource.VOICE_COMMUNICATION)
+ * .setAudioFormat(new AudioFormat.Builder()
+ * .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ * .setSampleRate(32000)
+ * .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
+ * .build())
+ * .setBufferSize(2*minBuffSize)
+ * .build();
+ * </pre>
+ * <p>
+ * If the audio source is not set with {@link #setAudioSource(int)},
+ * {@link MediaRecorder.AudioSource#DEFAULT} is used.
+ * <br>If the audio format is not specified or is incomplete, its channel configuration will be
+ * {@link AudioFormat#CHANNEL_IN_MONO}, and the encoding will be
+ * {@link AudioFormat#ENCODING_PCM_16BIT}.
+ * The sample rate will depend on the device actually selected for capture and can be queried
+ * with {@link #getSampleRate()} method.
+ * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)},
+ * the minimum buffer size for the source is used.
+ */
+ public static class Builder {
+ private AudioAttributes mAttributes;
+ private AudioFormat mFormat;
+ private int mBufferSizeInBytes;
+ private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
+
+ /**
+ * Constructs a new Builder with the default values as described above.
+ */
+ public Builder() {
+ }
+
+ /**
+ * @param source the audio source.
+ * See {@link MediaRecorder.AudioSource} for the supported audio source definitions.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setAudioSource(int source) throws IllegalArgumentException {
+ if ( (source < MediaRecorder.AudioSource.DEFAULT) ||
+ (source > MediaRecorder.getAudioSourceMax()) ) {
+ throw new IllegalArgumentException("Invalid audio source " + source);
+ }
+ mAttributes = new AudioAttributes.Builder()
+ .setInternalCapturePreset(source)
+ .build();
+ return this;
+ }
+
+ /**
+ * @hide
+ * To be only used by system components. Allows specifying non-public capture presets
+ * @param attributes a non-null {@link AudioAttributes} instance that contains the capture
+ * preset to be used.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setAudioAttributes(@NonNull AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes argument");
+ }
+ if (attributes.getCapturePreset() == MediaRecorder.AudioSource.AUDIO_SOURCE_INVALID) {
+ throw new IllegalArgumentException(
+ "No valid capture preset in AudioAttributes argument");
+ }
+ // keep reference, we only copy the data when building
+ mAttributes = attributes;
+ return this;
+ }
+
+ /**
+ * Sets the format of the audio data to be captured.
+ * @param format a non-null {@link AudioFormat} instance
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setAudioFormat(@NonNull AudioFormat format) throws IllegalArgumentException {
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat argument");
+ }
+ // keep reference, we only copy the data when building
+ mFormat = format;
+ return this;
+ }
+
+ /**
+ * Sets the total size (in bytes) of the buffer where audio data is written
+ * during the recording. New audio data can be read from this buffer in smaller chunks
+ * than this size. See {@link #getMinBufferSize(int, int, int)} to determine the minimum
+ * required buffer size for the successful creation of an AudioRecord instance.
+ * Since bufferSizeInBytes may be internally increased to accommodate the source
+ * requirements, use {@link #getBufferSizeInFrames()} to determine the actual buffer size
+ * in frames.
+ * @param bufferSizeInBytes a value strictly greater than 0
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public Builder setBufferSizeInBytes(int bufferSizeInBytes) throws IllegalArgumentException {
+ if (bufferSizeInBytes <= 0) {
+ throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes);
+ }
+ mBufferSizeInBytes = bufferSizeInBytes;
+ return this;
+ }
+
+ /**
+ * @hide
+ * To be only used by system components.
+ * @param sessionId ID of audio session the AudioRecord must be attached to, or
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at
+ * construction time.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setSessionId(int sessionId) throws IllegalArgumentException {
+ if (sessionId < 0) {
+ throw new IllegalArgumentException("Invalid session ID " + sessionId);
+ }
+ mSessionId = sessionId;
+ return this;
+ }
+
+ /**
+ * @return a new {@link AudioRecord} instance successfully initialized with all
+ * the parameters set on this <code>Builder</code>.
+ * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code>
+ * were incompatible, or if they are not supported by the device,
+ * or if the device was not available.
+ */
+ public AudioRecord build() throws UnsupportedOperationException {
+ if (mFormat == null) {
+ mFormat = new AudioFormat.Builder()
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
+ .build();
+ } else {
+ if (mFormat.getEncoding() == AudioFormat.ENCODING_INVALID) {
+ mFormat = new AudioFormat.Builder(mFormat)
+ .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ .build();
+ }
+ if (mFormat.getChannelMask() == AudioFormat.CHANNEL_INVALID
+ && mFormat.getChannelIndexMask() == AudioFormat.CHANNEL_INVALID) {
+ mFormat = new AudioFormat.Builder(mFormat)
+ .setChannelMask(AudioFormat.CHANNEL_IN_MONO)
+ .build();
+ }
+ }
+ if (mAttributes == null) {
+ mAttributes = new AudioAttributes.Builder()
+ .setInternalCapturePreset(MediaRecorder.AudioSource.DEFAULT)
+ .build();
+ }
+ try {
+ // If the buffer size is not specified,
+ // use a single frame for the buffer size and let the
+ // native code figure out the minimum buffer size.
+ if (mBufferSizeInBytes == 0) {
+ mBufferSizeInBytes = mFormat.getChannelCount()
+ * mFormat.getBytesPerSample(mFormat.getEncoding());
+ }
+ final AudioRecord record = new AudioRecord(
+ mAttributes, mFormat, mBufferSizeInBytes, mSessionId);
+ if (record.getState() == STATE_UNINITIALIZED) {
+ // release is not necessary
+ throw new UnsupportedOperationException("Cannot create AudioRecord");
+ }
+ return record;
+ } catch (IllegalArgumentException e) {
+ throw new UnsupportedOperationException(e.getMessage());
+ }
+ }
+ }
+
+ // Convenience method for the constructor's parameter checks.
+ // This, getChannelMaskFromLegacyConfig and audioBuffSizeCheck are where constructor
+ // IllegalArgumentException-s are thrown
+ private static int getChannelMaskFromLegacyConfig(int inChannelConfig,
+ boolean allowLegacyConfig) {
+ int mask;
+ switch (inChannelConfig) {
+ case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
+ case AudioFormat.CHANNEL_IN_MONO:
+ case AudioFormat.CHANNEL_CONFIGURATION_MONO:
+ mask = AudioFormat.CHANNEL_IN_MONO;
+ break;
+ case AudioFormat.CHANNEL_IN_STEREO:
+ case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
+ mask = AudioFormat.CHANNEL_IN_STEREO;
+ break;
+ case (AudioFormat.CHANNEL_IN_FRONT | AudioFormat.CHANNEL_IN_BACK):
+ mask = inChannelConfig;
+ break;
+ default:
+ throw new IllegalArgumentException("Unsupported channel configuration.");
+ }
+
+ if (!allowLegacyConfig && ((inChannelConfig == AudioFormat.CHANNEL_CONFIGURATION_MONO)
+ || (inChannelConfig == AudioFormat.CHANNEL_CONFIGURATION_STEREO))) {
+ // only happens with the constructor that uses AudioAttributes and AudioFormat
+ throw new IllegalArgumentException("Unsupported deprecated configuration.");
+ }
+
+ return mask;
+ }
+
+ // postconditions:
+ // mRecordSource is valid
+ // mAudioFormat is valid
+ // mSampleRate is valid
+ private void audioParamCheck(int audioSource, int sampleRateInHz, int audioFormat)
+ throws IllegalArgumentException {
+
+ //--------------
+ // audio source
+ if ( (audioSource < MediaRecorder.AudioSource.DEFAULT) ||
+ ((audioSource > MediaRecorder.getAudioSourceMax()) &&
+ (audioSource != MediaRecorder.AudioSource.RADIO_TUNER) &&
+ (audioSource != MediaRecorder.AudioSource.HOTWORD)) ) {
+ throw new IllegalArgumentException("Invalid audio source " + audioSource);
+ }
+ mRecordSource = audioSource;
+
+ //--------------
+ // sample rate
+ if ((sampleRateInHz < AudioFormat.SAMPLE_RATE_HZ_MIN ||
+ sampleRateInHz > AudioFormat.SAMPLE_RATE_HZ_MAX) &&
+ sampleRateInHz != AudioFormat.SAMPLE_RATE_UNSPECIFIED) {
+ throw new IllegalArgumentException(sampleRateInHz
+ + "Hz is not a supported sample rate.");
+ }
+ mSampleRate = sampleRateInHz;
+
+ //--------------
+ // audio format
+ switch (audioFormat) {
+ case AudioFormat.ENCODING_DEFAULT:
+ mAudioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ break;
+ case AudioFormat.ENCODING_PCM_FLOAT:
+ case AudioFormat.ENCODING_PCM_16BIT:
+ case AudioFormat.ENCODING_PCM_8BIT:
+ mAudioFormat = audioFormat;
+ break;
+ default:
+ throw new IllegalArgumentException("Unsupported sample encoding " + audioFormat
+ + ". Should be ENCODING_PCM_8BIT, ENCODING_PCM_16BIT, or ENCODING_PCM_FLOAT.");
+ }
+ }
+
+
+ // Convenience method for the contructor's audio buffer size check.
+ // preconditions:
+ // mChannelCount is valid
+ // mAudioFormat is AudioFormat.ENCODING_PCM_8BIT, AudioFormat.ENCODING_PCM_16BIT,
+ // or AudioFormat.ENCODING_PCM_FLOAT
+ // postcondition:
+ // mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
+ private void audioBuffSizeCheck(int audioBufferSize) throws IllegalArgumentException {
+ // NB: this section is only valid with PCM data.
+ // To update when supporting compressed formats
+ int frameSizeInBytes = mChannelCount
+ * (AudioFormat.getBytesPerSample(mAudioFormat));
+ if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
+ throw new IllegalArgumentException("Invalid audio buffer size " + audioBufferSize
+ + " (frame size " + frameSizeInBytes + ")");
+ }
+
+ mNativeBufferSizeInBytes = audioBufferSize;
+ }
+
+
+
+ /**
+ * Releases the native AudioRecord resources.
+ * The object can no longer be used and the reference should be set to null
+ * after a call to release()
+ */
+ public void release() {
+ try {
+ stop();
+ } catch(IllegalStateException ise) {
+ // don't raise an exception, we're releasing the resources.
+ }
+ native_release();
+ mState = STATE_UNINITIALIZED;
+ }
+
+
+ @Override
+ protected void finalize() {
+ // will cause stop() to be called, and if appropriate, will handle fixed volume recording
+ release();
+ }
+
+
+ //--------------------------------------------------------------------------
+ // Getters
+ //--------------------
+ /**
+ * Returns the configured audio sink sample rate in Hz.
+ * The sink sample rate never changes after construction.
+ * If the constructor had a specific sample rate, then the sink sample rate is that value.
+ * If the constructor had {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED},
+ * then the sink sample rate is a route-dependent default value based on the source [sic].
+ */
+ public int getSampleRate() {
+ return mSampleRate;
+ }
+
+ /**
+ * Returns the audio recording source.
+ * @see MediaRecorder.AudioSource
+ */
+ public int getAudioSource() {
+ return mRecordSource;
+ }
+
+ /**
+ * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT},
+ * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}.
+ */
+ public int getAudioFormat() {
+ return mAudioFormat;
+ }
+
+ /**
+ * Returns the configured channel position mask.
+ * <p> See {@link AudioFormat#CHANNEL_IN_MONO}
+ * and {@link AudioFormat#CHANNEL_IN_STEREO}.
+ * This method may return {@link AudioFormat#CHANNEL_INVALID} if
+ * a channel index mask is used.
+ * Consider {@link #getFormat()} instead, to obtain an {@link AudioFormat},
+ * which contains both the channel position mask and the channel index mask.
+ */
+ public int getChannelConfiguration() {
+ return mChannelMask;
+ }
+
+ /**
+ * Returns the configured <code>AudioRecord</code> format.
+ * @return an {@link AudioFormat} containing the
+ * <code>AudioRecord</code> parameters at the time of configuration.
+ */
+ public @NonNull AudioFormat getFormat() {
+ AudioFormat.Builder builder = new AudioFormat.Builder()
+ .setSampleRate(mSampleRate)
+ .setEncoding(mAudioFormat);
+ if (mChannelMask != AudioFormat.CHANNEL_INVALID) {
+ builder.setChannelMask(mChannelMask);
+ }
+ if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID /* 0 */) {
+ builder.setChannelIndexMask(mChannelIndexMask);
+ }
+ return builder.build();
+ }
+
+ /**
+ * Returns the configured number of channels.
+ */
+ public int getChannelCount() {
+ return mChannelCount;
+ }
+
+ /**
+ * Returns the state of the AudioRecord instance. This is useful after the
+ * AudioRecord instance has been created to check if it was initialized
+ * properly. This ensures that the appropriate hardware resources have been
+ * acquired.
+ * @see AudioRecord#STATE_INITIALIZED
+ * @see AudioRecord#STATE_UNINITIALIZED
+ */
+ public int getState() {
+ return mState;
+ }
+
+ /**
+ * Returns the recording state of the AudioRecord instance.
+ * @see AudioRecord#RECORDSTATE_STOPPED
+ * @see AudioRecord#RECORDSTATE_RECORDING
+ */
+ public int getRecordingState() {
+ synchronized (mRecordingStateLock) {
+ return mRecordingState;
+ }
+ }
+
+ /**
+ * Returns the frame count of the native <code>AudioRecord</code> buffer.
+ * This is greater than or equal to the bufferSizeInBytes converted to frame units
+ * specified in the <code>AudioRecord</code> constructor or Builder.
+ * The native frame count may be enlarged to accommodate the requirements of the
+ * source on creation or if the <code>AudioRecord</code>
+ * is subsequently rerouted.
+ * @return current size in frames of the <code>AudioRecord</code> buffer.
+ * @throws IllegalStateException
+ */
+ public int getBufferSizeInFrames() {
+ return native_get_buffer_size_in_frames();
+ }
+
+ /**
+ * Returns the notification marker position expressed in frames.
+ */
+ public int getNotificationMarkerPosition() {
+ return native_get_marker_pos();
+ }
+
+ /**
+ * Returns the notification update period expressed in frames.
+ */
+ public int getPositionNotificationPeriod() {
+ return native_get_pos_update_period();
+ }
+
+ /**
+ * Poll for an {@link AudioTimestamp} on demand.
+ * <p>
+ * The AudioTimestamp reflects the frame delivery information at
+ * the earliest point available in the capture pipeline.
+ * <p>
+ * Calling {@link #startRecording()} following a {@link #stop()} will reset
+ * the frame count to 0.
+ *
+ * @param outTimestamp a caller provided non-null AudioTimestamp instance,
+ * which is updated with the AudioRecord frame delivery information upon success.
+ * @param timebase one of
+ * {@link AudioTimestamp#TIMEBASE_BOOTTIME AudioTimestamp.TIMEBASE_BOOTTIME} or
+ * {@link AudioTimestamp#TIMEBASE_MONOTONIC AudioTimestamp.TIMEBASE_MONOTONIC},
+ * used to select the clock for the AudioTimestamp time.
+ * @return {@link #SUCCESS} if a timestamp is available,
+ * or {@link #ERROR_INVALID_OPERATION} if a timestamp not available.
+ */
+ public int getTimestamp(@NonNull AudioTimestamp outTimestamp,
+ @AudioTimestamp.Timebase int timebase)
+ {
+ if (outTimestamp == null ||
+ (timebase != AudioTimestamp.TIMEBASE_BOOTTIME
+ && timebase != AudioTimestamp.TIMEBASE_MONOTONIC)) {
+ throw new IllegalArgumentException();
+ }
+ return native_get_timestamp(outTimestamp, timebase);
+ }
+
+ /**
+ * Returns the minimum buffer size required for the successful creation of an AudioRecord
+ * object, in byte units.
+ * Note that this size doesn't guarantee a smooth recording under load, and higher values
+ * should be chosen according to the expected frequency at which the AudioRecord instance
+ * will be polled for new data.
+ * See {@link #AudioRecord(int, int, int, int, int)} for more information on valid
+ * configuration values.
+ * @param sampleRateInHz the sample rate expressed in Hertz.
+ * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} is not permitted.
+ * @param channelConfig describes the configuration of the audio channels.
+ * See {@link AudioFormat#CHANNEL_IN_MONO} and
+ * {@link AudioFormat#CHANNEL_IN_STEREO}
+ * @param audioFormat the format in which the audio data is represented.
+ * See {@link AudioFormat#ENCODING_PCM_16BIT}.
+ * @return {@link #ERROR_BAD_VALUE} if the recording parameters are not supported by the
+ * hardware, or an invalid parameter was passed,
+ * or {@link #ERROR} if the implementation was unable to query the hardware for its
+ * input properties,
+ * or the minimum buffer size expressed in bytes.
+ * @see #AudioRecord(int, int, int, int, int)
+ */
+ static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
+ int channelCount = 0;
+ switch (channelConfig) {
+ case AudioFormat.CHANNEL_IN_DEFAULT: // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
+ case AudioFormat.CHANNEL_IN_MONO:
+ case AudioFormat.CHANNEL_CONFIGURATION_MONO:
+ channelCount = 1;
+ break;
+ case AudioFormat.CHANNEL_IN_STEREO:
+ case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
+ case (AudioFormat.CHANNEL_IN_FRONT | AudioFormat.CHANNEL_IN_BACK):
+ channelCount = 2;
+ break;
+ case AudioFormat.CHANNEL_INVALID:
+ default:
+ loge("getMinBufferSize(): Invalid channel configuration.");
+ return ERROR_BAD_VALUE;
+ }
+
+ int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
+ if (size == 0) {
+ return ERROR_BAD_VALUE;
+ }
+ else if (size == -1) {
+ return ERROR;
+ }
+ else {
+ return size;
+ }
+ }
+
+ /**
+ * Returns the audio session ID.
+ *
+ * @return the ID of the audio session this AudioRecord belongs to.
+ */
+ public int getAudioSessionId() {
+ return mSessionId;
+ }
+
+ //---------------------------------------------------------
+ // Transport control methods
+ //--------------------
+ /**
+ * Starts recording from the AudioRecord instance.
+ * @throws IllegalStateException
+ */
+ public void startRecording()
+ throws IllegalStateException {
+ if (mState != STATE_INITIALIZED) {
+ throw new IllegalStateException("startRecording() called on an "
+ + "uninitialized AudioRecord.");
+ }
+
+ // start recording
+ synchronized(mRecordingStateLock) {
+ if (native_start(MediaSyncEvent.SYNC_EVENT_NONE, 0) == SUCCESS) {
+ handleFullVolumeRec(true);
+ mRecordingState = RECORDSTATE_RECORDING;
+ }
+ }
+ }
+
+ /**
+ * Starts recording from the AudioRecord instance when the specified synchronization event
+ * occurs on the specified audio session.
+ * @throws IllegalStateException
+ * @param syncEvent event that triggers the capture.
+ * @see MediaSyncEvent
+ */
+ public void startRecording(MediaSyncEvent syncEvent)
+ throws IllegalStateException {
+ if (mState != STATE_INITIALIZED) {
+ throw new IllegalStateException("startRecording() called on an "
+ + "uninitialized AudioRecord.");
+ }
+
+ // start recording
+ synchronized(mRecordingStateLock) {
+ if (native_start(syncEvent.getType(), syncEvent.getAudioSessionId()) == SUCCESS) {
+ handleFullVolumeRec(true);
+ mRecordingState = RECORDSTATE_RECORDING;
+ }
+ }
+ }
+
+ /**
+ * Stops recording.
+ * @throws IllegalStateException
+ */
+ public void stop()
+ throws IllegalStateException {
+ if (mState != STATE_INITIALIZED) {
+ throw new IllegalStateException("stop() called on an uninitialized AudioRecord.");
+ }
+
+ // stop recording
+ synchronized(mRecordingStateLock) {
+ handleFullVolumeRec(false);
+ native_stop();
+ mRecordingState = RECORDSTATE_STOPPED;
+ }
+ }
+
+ private final IBinder mICallBack = new Binder();
+ private void handleFullVolumeRec(boolean starting) {
+ if (!mIsSubmixFullVolume) {
+ return;
+ }
+ final IBinder b = ServiceManager.getService(android.content.Context.AUDIO_SERVICE);
+ final IAudioService ias = IAudioService.Stub.asInterface(b);
+ try {
+ ias.forceRemoteSubmixFullVolume(starting, mICallBack);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error talking to AudioService when handling full submix volume", e);
+ }
+ }
+
+ //---------------------------------------------------------
+ // Audio data supply
+ //--------------------
+ /**
+ * Reads audio data from the audio hardware for recording into a byte array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInBytes index in audioData from which the data is written expressed in bytes.
+ * @param sizeInBytes the number of requested bytes.
+ * @return zero or the positive number of bytes that were read, or one of the following
+ * error codes. The number of bytes will not exceed sizeInBytes.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next read()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) {
+ return read(audioData, offsetInBytes, sizeInBytes, READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a byte array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * The format can be {@link AudioFormat#ENCODING_PCM_16BIT}, but this is deprecated.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInBytes index in audioData to which the data is written expressed in bytes.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInBytes the number of requested bytes.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return zero or the positive number of bytes that were read, or one of the following
+ * error codes. The number of bytes will be a multiple of the frame size in bytes
+ * not to exceed sizeInBytes.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next read()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int read(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes,
+ @ReadMode int readMode) {
+ if (mState != STATE_INITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
+ || (offsetInBytes + sizeInBytes < 0) // detect integer overflow
+ || (offsetInBytes + sizeInBytes > audioData.length)) {
+ return ERROR_BAD_VALUE;
+ }
+
+ return native_read_in_byte_array(audioData, offsetInBytes, sizeInBytes,
+ readMode == READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a short array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInShorts index in audioData to which the data is written expressed in shorts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInShorts the number of requested shorts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @return zero or the positive number of shorts that were read, or one of the following
+ * error codes. The number of shorts will be a multiple of the channel count not to exceed
+ * sizeInShorts.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next read()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) {
+ return read(audioData, offsetInShorts, sizeInShorts, READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a short array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInShorts index in audioData from which the data is written expressed in shorts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInShorts the number of requested shorts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return zero or the positive number of shorts that were read, or one of the following
+ * error codes. The number of shorts will be a multiple of the channel count not to exceed
+ * sizeInShorts.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next read()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int read(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts,
+ @ReadMode int readMode) {
+ if (mState != STATE_INITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
+ || (offsetInShorts + sizeInShorts < 0) // detect integer overflow
+ || (offsetInShorts + sizeInShorts > audioData.length)) {
+ return ERROR_BAD_VALUE;
+ }
+
+ return native_read_in_short_array(audioData, offsetInShorts, sizeInShorts,
+ readMode == READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a float array.
+ * The format specified in the AudioRecord constructor should be
+ * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array.
+ * @param audioData the array to which the recorded audio data is written.
+ * @param offsetInFloats index in audioData from which the data is written.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInFloats the number of requested floats.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return zero or the positive number of floats that were read, or one of the following
+ * error codes. The number of floats will be a multiple of the channel count not to exceed
+ * sizeInFloats.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next read()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int read(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats,
+ @ReadMode int readMode) {
+ if (mState == STATE_UNINITIALIZED) {
+ Log.e(TAG, "AudioRecord.read() called in invalid state STATE_UNINITIALIZED");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if (mAudioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
+ Log.e(TAG, "AudioRecord.read(float[] ...) requires format ENCODING_PCM_FLOAT");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ((audioData == null) || (offsetInFloats < 0) || (sizeInFloats < 0)
+ || (offsetInFloats + sizeInFloats < 0) // detect integer overflow
+ || (offsetInFloats + sizeInFloats > audioData.length)) {
+ return ERROR_BAD_VALUE;
+ }
+
+ return native_read_in_float_array(audioData, offsetInFloats, sizeInFloats,
+ readMode == READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a direct buffer. If this buffer
+ * is not a direct buffer, this method will always return 0.
+ * Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is
+ * unchanged after a call to this method.
+ * The representation of the data in the buffer will depend on the format specified in
+ * the AudioRecord constructor, and will be native endian.
+ * @param audioBuffer the direct buffer to which the recorded audio data is written.
+ * Data is written to audioBuffer.position().
+ * @param sizeInBytes the number of requested bytes. It is recommended but not enforced
+ * that the number of bytes requested be a multiple of the frame size (sample size in
+ * bytes multiplied by the channel count).
+ * @return zero or the positive number of bytes that were read, or one of the following
+ * error codes. The number of bytes will not exceed sizeInBytes and will be truncated to be
+ * a multiple of the frame size.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next read()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes) {
+ return read(audioBuffer, sizeInBytes, READ_BLOCKING);
+ }
+
+ /**
+ * Reads audio data from the audio hardware for recording into a direct buffer. If this buffer
+ * is not a direct buffer, this method will always return 0.
+ * Note that the value returned by {@link java.nio.Buffer#position()} on this buffer is
+ * unchanged after a call to this method.
+ * The representation of the data in the buffer will depend on the format specified in
+ * the AudioRecord constructor, and will be native endian.
+ * @param audioBuffer the direct buffer to which the recorded audio data is written.
+ * Data is written to audioBuffer.position().
+ * @param sizeInBytes the number of requested bytes. It is recommended but not enforced
+ * that the number of bytes requested be a multiple of the frame size (sample size in
+ * bytes multiplied by the channel count).
+ * @param readMode one of {@link #READ_BLOCKING}, {@link #READ_NON_BLOCKING}.
+ * <br>With {@link #READ_BLOCKING}, the read will block until all the requested data
+ * is read.
+ * <br>With {@link #READ_NON_BLOCKING}, the read will return immediately after
+ * reading as much audio data as possible without blocking.
+ * @return zero or the positive number of bytes that were read, or one of the following
+ * error codes. The number of bytes will not exceed sizeInBytes and will be truncated to be
+ * a multiple of the frame size.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the object isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the object is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next read()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int read(@NonNull ByteBuffer audioBuffer, int sizeInBytes, @ReadMode int readMode) {
+ if (mState != STATE_INITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((readMode != READ_BLOCKING) && (readMode != READ_NON_BLOCKING)) {
+ Log.e(TAG, "AudioRecord.read() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ( (audioBuffer == null) || (sizeInBytes < 0) ) {
+ return ERROR_BAD_VALUE;
+ }
+
+ return native_read_in_direct_buffer(audioBuffer, sizeInBytes, readMode == READ_BLOCKING);
+ }
+
+ //--------------------------------------------------------------------------
+ // Initialization / configuration
+ //--------------------
+ /**
+ * Sets the listener the AudioRecord notifies when a previously set marker is reached or
+ * for each periodic record head position update.
+ * @param listener
+ */
+ public void setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener) {
+ setRecordPositionUpdateListener(listener, null);
+ }
+
+ /**
+ * Sets the listener the AudioRecord notifies when a previously set marker is reached or
+ * for each periodic record head position update.
+ * Use this method to receive AudioRecord events in the Handler associated with another
+ * thread than the one in which you created the AudioRecord instance.
+ * @param listener
+ * @param handler the Handler that will receive the event notification messages.
+ */
+ public void setRecordPositionUpdateListener(OnRecordPositionUpdateListener listener,
+ Handler handler) {
+ synchronized (mPositionListenerLock) {
+
+ mPositionListener = listener;
+
+ if (listener != null) {
+ if (handler != null) {
+ mEventHandler = new NativeEventHandler(this, handler.getLooper());
+ } else {
+ // no given handler, use the looper the AudioRecord was created in
+ mEventHandler = new NativeEventHandler(this, mInitializationLooper);
+ }
+ } else {
+ mEventHandler = null;
+ }
+ }
+
+ }
+
+
+ /**
+ * Sets the marker position at which the listener is called, if set with
+ * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or
+ * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler)}.
+ * @param markerInFrames marker position expressed in frames
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
+ * {@link #ERROR_INVALID_OPERATION}
+ */
+ public int setNotificationMarkerPosition(int markerInFrames) {
+ if (mState == STATE_UNINITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ return native_set_marker_pos(markerInFrames);
+ }
+
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioRecord.
+ * Note: The query is only valid if the AudioRecord is currently recording. If it is not,
+ * <code>getRoutedDevice()</code> will return null.
+ */
+ @Override
+ public AudioDeviceInfo getRoutedDevice() {
+ int deviceId = native_getRoutedDeviceId();
+ if (deviceId == 0) {
+ return null;
+ }
+ AudioDeviceInfo[] devices =
+ AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_INPUTS);
+ for (int i = 0; i < devices.length; i++) {
+ if (devices[i].getId() == deviceId) {
+ return devices[i];
+ }
+ }
+ return null;
+ }
+
+ /*
+ * Call BEFORE adding a routing callback handler.
+ */
+ private void testEnableNativeRoutingCallbacksLocked() {
+ if (mRoutingChangeListeners.size() == 0) {
+ native_enableDeviceCallback();
+ }
+ }
+
+ /*
+ * Call AFTER removing a routing callback handler.
+ */
+ private void testDisableNativeRoutingCallbacksLocked() {
+ if (mRoutingChangeListeners.size() == 0) {
+ native_disableDeviceCallback();
+ }
+ }
+
+ //--------------------------------------------------------------------------
+ // (Re)Routing Info
+ //--------------------
+ /**
+ * The list of AudioRouting.OnRoutingChangedListener interfaces added (with
+ * {@link AudioRecord#addOnRoutingChangedListener} by an app to receive
+ * (re)routing notifications.
+ */
+ @GuardedBy("mRoutingChangeListeners")
+ private ArrayMap<AudioRouting.OnRoutingChangedListener,
+ NativeRoutingEventHandlerDelegate> mRoutingChangeListeners = new ArrayMap<>();
+
+ /**
+ * Adds an {@link AudioRouting.OnRoutingChangedListener} to receive notifications of
+ * routing changes on this AudioRecord.
+ * @param listener The {@link AudioRouting.OnRoutingChangedListener} interface to receive
+ * notifications of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ @Override
+ public void addOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener,
+ android.os.Handler handler) {
+ synchronized (mRoutingChangeListeners) {
+ if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
+ testEnableNativeRoutingCallbacksLocked();
+ mRoutingChangeListeners.put(
+ listener, new NativeRoutingEventHandlerDelegate(this, listener,
+ handler != null ? handler : new Handler(mInitializationLooper)));
+ }
+ }
+ }
+
+ /**
+ * Removes an {@link AudioRouting.OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link AudioRouting.OnRoutingChangedListener} interface
+ * to remove.
+ */
+ @Override
+ public void removeOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.containsKey(listener)) {
+ mRoutingChangeListeners.remove(listener);
+ testDisableNativeRoutingCallbacksLocked();
+ }
+ }
+ }
+
+ //--------------------------------------------------------------------------
+ // (Re)Routing Info
+ //--------------------
+ /**
+ * Defines the interface by which applications can receive notifications of
+ * routing changes for the associated {@link AudioRecord}.
+ *
+ * @deprecated users should switch to the general purpose
+ * {@link AudioRouting.OnRoutingChangedListener} class instead.
+ */
+ @Deprecated
+ public interface OnRoutingChangedListener extends AudioRouting.OnRoutingChangedListener {
+ /**
+ * Called when the routing of an AudioRecord changes from either and
+ * explicit or policy rerouting. Use {@link #getRoutedDevice()} to
+ * retrieve the newly routed-from device.
+ */
+ public void onRoutingChanged(AudioRecord audioRecord);
+
+ @Override
+ default public void onRoutingChanged(AudioRouting router) {
+ if (router instanceof AudioRecord) {
+ onRoutingChanged((AudioRecord) router);
+ }
+ }
+ }
+
+ /**
+ * Adds an {@link OnRoutingChangedListener} to receive notifications of routing changes
+ * on this AudioRecord.
+ * @param listener The {@link OnRoutingChangedListener} interface to receive notifications
+ * of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ * @deprecated users should switch to the general purpose
+ * {@link AudioRouting.OnRoutingChangedListener} class instead.
+ */
+ @Deprecated
+ public void addOnRoutingChangedListener(OnRoutingChangedListener listener,
+ android.os.Handler handler) {
+ addOnRoutingChangedListener((AudioRouting.OnRoutingChangedListener) listener, handler);
+ }
+
+ /**
+ * Removes an {@link OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link OnRoutingChangedListener} interface to remove.
+ * @deprecated users should switch to the general purpose
+ * {@link AudioRouting.OnRoutingChangedListener} class instead.
+ */
+ @Deprecated
+ public void removeOnRoutingChangedListener(OnRoutingChangedListener listener) {
+ removeOnRoutingChangedListener((AudioRouting.OnRoutingChangedListener) listener);
+ }
+
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativeRoutingEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeRoutingEventHandlerDelegate(final AudioRecord record,
+ final AudioRouting.OnRoutingChangedListener listener,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the AudioRecord was created in
+ looper = mInitializationLooper;
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ if (record == null) {
+ return;
+ }
+ switch(msg.what) {
+ case AudioSystem.NATIVE_EVENT_ROUTING_CHANGE:
+ if (listener != null) {
+ listener.onRoutingChanged(record);
+ }
+ break;
+ default:
+ loge("Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
+
+ /**
+ * Sends device list change notification to all listeners.
+ */
+ private void broadcastRoutingChange() {
+ AudioManager.resetAudioPortGeneration();
+ synchronized (mRoutingChangeListeners) {
+ for (NativeRoutingEventHandlerDelegate delegate : mRoutingChangeListeners.values()) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ handler.sendEmptyMessage(AudioSystem.NATIVE_EVENT_ROUTING_CHANGE);
+ }
+ }
+ }
+ }
+
+ /**
+ * Sets the period at which the listener is called, if set with
+ * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener)} or
+ * {@link #setRecordPositionUpdateListener(OnRecordPositionUpdateListener, Handler)}.
+ * It is possible for notifications to be lost if the period is too small.
+ * @param periodInFrames update period expressed in frames
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
+ */
+ public int setPositionNotificationPeriod(int periodInFrames) {
+ if (mState == STATE_UNINITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ return native_set_pos_update_period(periodInFrames);
+ }
+
+ //--------------------------------------------------------------------------
+ // Explicit Routing
+ //--------------------
+ private AudioDeviceInfo mPreferredDevice = null;
+
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the input to this AudioRecord.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio source.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if successful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio input device.
+ */
+ @Override
+ public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) {
+ // Do some validation....
+ if (deviceInfo != null && !deviceInfo.isSource()) {
+ return false;
+ }
+
+ int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0;
+ boolean status = native_setInputDevice(preferredDeviceId);
+ if (status == true) {
+ synchronized (this) {
+ mPreferredDevice = deviceInfo;
+ }
+ }
+ return status;
+ }
+
+ /**
+ * Returns the selected input specified by {@link #setPreferredDevice}. Note that this
+ * is not guarenteed to correspond to the actual device being used for recording.
+ */
+ @Override
+ public AudioDeviceInfo getPreferredDevice() {
+ synchronized (this) {
+ return mPreferredDevice;
+ }
+ }
+
+ //---------------------------------------------------------
+ // Interface definitions
+ //--------------------
+ /**
+ * Interface definition for a callback to be invoked when an AudioRecord has
+ * reached a notification marker set by {@link AudioRecord#setNotificationMarkerPosition(int)}
+ * or for periodic updates on the progress of the record head, as set by
+ * {@link AudioRecord#setPositionNotificationPeriod(int)}.
+ */
+ public interface OnRecordPositionUpdateListener {
+ /**
+ * Called on the listener to notify it that the previously set marker has been reached
+ * by the recording head.
+ */
+ void onMarkerReached(AudioRecord recorder);
+
+ /**
+ * Called on the listener to periodically notify it that the record head has reached
+ * a multiple of the notification period.
+ */
+ void onPeriodicNotification(AudioRecord recorder);
+ }
+
+
+
+ //---------------------------------------------------------
+ // Inner classes
+ //--------------------
+
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativeEventHandler extends Handler {
+ private final AudioRecord mAudioRecord;
+
+ NativeEventHandler(AudioRecord recorder, Looper looper) {
+ super(looper);
+ mAudioRecord = recorder;
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ OnRecordPositionUpdateListener listener = null;
+ synchronized (mPositionListenerLock) {
+ listener = mAudioRecord.mPositionListener;
+ }
+
+ switch (msg.what) {
+ case NATIVE_EVENT_MARKER:
+ if (listener != null) {
+ listener.onMarkerReached(mAudioRecord);
+ }
+ break;
+ case NATIVE_EVENT_NEW_POS:
+ if (listener != null) {
+ listener.onPeriodicNotification(mAudioRecord);
+ }
+ break;
+ default:
+ loge("Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ }
+
+ //---------------------------------------------------------
+ // Java methods called from the native side
+ //--------------------
+ @SuppressWarnings("unused")
+ private static void postEventFromNative(Object audiorecord_ref,
+ int what, int arg1, int arg2, Object obj) {
+ //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
+ AudioRecord recorder = (AudioRecord)((WeakReference)audiorecord_ref).get();
+ if (recorder == null) {
+ return;
+ }
+
+ if (what == AudioSystem.NATIVE_EVENT_ROUTING_CHANGE) {
+ recorder.broadcastRoutingChange();
+ return;
+ }
+
+ if (recorder.mEventHandler != null) {
+ Message m =
+ recorder.mEventHandler.obtainMessage(what, arg1, arg2, obj);
+ recorder.mEventHandler.sendMessage(m);
+ }
+
+ }
+
+
+ //---------------------------------------------------------
+ // Native methods called from the Java side
+ //--------------------
+
+ private native final int native_setup(Object audiorecord_this,
+ Object /*AudioAttributes*/ attributes,
+ int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
+ int buffSizeInBytes, int[] sessionId, String opPackageName,
+ long nativeRecordInJavaObj);
+
+ // TODO remove: implementation calls directly into implementation of native_release()
+ private native final void native_finalize();
+
+ /**
+ * @hide
+ */
+ public native final void native_release();
+
+ private native final int native_start(int syncEvent, int sessionId);
+
+ private native final void native_stop();
+
+ private native final int native_read_in_byte_array(byte[] audioData,
+ int offsetInBytes, int sizeInBytes, boolean isBlocking);
+
+ private native final int native_read_in_short_array(short[] audioData,
+ int offsetInShorts, int sizeInShorts, boolean isBlocking);
+
+ private native final int native_read_in_float_array(float[] audioData,
+ int offsetInFloats, int sizeInFloats, boolean isBlocking);
+
+ private native final int native_read_in_direct_buffer(Object jBuffer,
+ int sizeInBytes, boolean isBlocking);
+
+ private native final int native_get_buffer_size_in_frames();
+
+ private native final int native_set_marker_pos(int marker);
+ private native final int native_get_marker_pos();
+
+ private native final int native_set_pos_update_period(int updatePeriod);
+ private native final int native_get_pos_update_period();
+
+ static private native final int native_get_min_buff_size(
+ int sampleRateInHz, int channelCount, int audioFormat);
+
+ private native final boolean native_setInputDevice(int deviceId);
+ private native final int native_getRoutedDeviceId();
+ private native final void native_enableDeviceCallback();
+ private native final void native_disableDeviceCallback();
+
+ private native final int native_get_timestamp(@NonNull AudioTimestamp outTimestamp,
+ @AudioTimestamp.Timebase int timebase);
+
+ //---------------------------------------------------------
+ // Utility methods
+ //------------------
+
+ private static void logd(String msg) {
+ Log.d(TAG, msg);
+ }
+
+ private static void loge(String msg) {
+ Log.e(TAG, msg);
+ }
+}
diff --git a/android/media/AudioRecordRoutingProxy.java b/android/media/AudioRecordRoutingProxy.java
new file mode 100644
index 00000000..b0c19e4d
--- /dev/null
+++ b/android/media/AudioRecordRoutingProxy.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An AudioRecord connected to a native (C/C++) which allows access only to routing methods.
+ */
+class AudioRecordRoutingProxy extends AudioRecord {
+ /**
+ * A constructor which explicitly connects a Native (C++) AudioRecord. For use by
+ * the AudioRecordRoutingProxy subclass.
+ * @param nativeRecordInJavaObj A C/C++ pointer to a native AudioRecord
+ * (associated with an OpenSL ES recorder).
+ */
+ public AudioRecordRoutingProxy(long nativeRecordInJavaObj) {
+ super(nativeRecordInJavaObj);
+ }
+}
diff --git a/android/media/AudioRecordingConfiguration.java b/android/media/AudioRecordingConfiguration.java
new file mode 100644
index 00000000..984c5542
--- /dev/null
+++ b/android/media/AudioRecordingConfiguration.java
@@ -0,0 +1,284 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+import java.io.PrintWriter;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.ArrayList;
+import java.util.Objects;
+
+/**
+ * The AudioRecordingConfiguration class collects the information describing an audio recording
+ * session.
+ * <p>Direct polling (see {@link AudioManager#getActiveRecordingConfigurations()}) or callback
+ * (see {@link AudioManager#registerAudioRecordingCallback(android.media.AudioManager.AudioRecordingCallback, android.os.Handler)}
+ * methods are ways to receive information about the current recording configuration of the device.
+ * <p>An audio recording configuration contains information about the recording format as used by
+ * the application ({@link #getClientFormat()}, as well as the recording format actually used by
+ * the device ({@link #getFormat()}). The two recording formats may, for instance, be at different
+ * sampling rates due to hardware limitations (e.g. application recording at 44.1kHz whereas the
+ * device always records at 48kHz, and the Android framework resamples for the application).
+ * <p>The configuration also contains the use case for which audio is recorded
+ * ({@link #getClientAudioSource()}), enabling the ability to distinguish between different
+ * activities such as ongoing voice recognition or camcorder recording.
+ *
+ */
+public final class AudioRecordingConfiguration implements Parcelable {
+ private final static String TAG = new String("AudioRecordingConfiguration");
+
+ private final int mSessionId;
+
+ private final int mClientSource;
+
+ private final AudioFormat mDeviceFormat;
+ private final AudioFormat mClientFormat;
+
+ @NonNull private final String mClientPackageName;
+ private final int mClientUid;
+
+ private final int mPatchHandle;
+
+ /**
+ * @hide
+ */
+ public AudioRecordingConfiguration(int uid, int session, int source, AudioFormat clientFormat,
+ AudioFormat devFormat, int patchHandle, String packageName) {
+ mClientUid = uid;
+ mSessionId = session;
+ mClientSource = source;
+ mClientFormat = clientFormat;
+ mDeviceFormat = devFormat;
+ mPatchHandle = patchHandle;
+ mClientPackageName = packageName;
+ }
+
+ /**
+ * @hide
+ * For AudioService dump
+ * @param pw
+ */
+ public void dump(PrintWriter pw) {
+ pw.println(" " + toLogFriendlyString(this));
+ }
+
+ /**
+ * @hide
+ */
+ public static String toLogFriendlyString(AudioRecordingConfiguration arc) {
+ return new String("session:" + arc.mSessionId
+ + " -- source:" + MediaRecorder.toLogFriendlyAudioSource(arc.mClientSource)
+ + " -- uid:" + arc.mClientUid
+ + " -- patch:" + arc.mPatchHandle
+ + " -- pack:" + arc.mClientPackageName
+ + " -- format client=" + arc.mClientFormat.toLogFriendlyString()
+ + ", dev=" + arc.mDeviceFormat.toLogFriendlyString());
+ }
+
+ // Note that this method is called server side, so no "privileged" information is ever sent
+ // to a client that is not supposed to have access to it.
+ /**
+ * @hide
+ * Creates a copy of the recording configuration that is stripped of any data enabling
+ * identification of which application it is associated with ("anonymized").
+ * @param in
+ */
+ public static AudioRecordingConfiguration anonymizedCopy(AudioRecordingConfiguration in) {
+ return new AudioRecordingConfiguration( /*anonymized uid*/ -1,
+ in.mSessionId, in.mClientSource, in.mClientFormat,
+ in.mDeviceFormat, in.mPatchHandle, "" /*empty package name*/);
+ }
+
+ // matches the sources that return false in MediaRecorder.isSystemOnlyAudioSource(source)
+ /** @hide */
+ @IntDef({
+ MediaRecorder.AudioSource.DEFAULT,
+ MediaRecorder.AudioSource.MIC,
+ MediaRecorder.AudioSource.VOICE_UPLINK,
+ MediaRecorder.AudioSource.VOICE_DOWNLINK,
+ MediaRecorder.AudioSource.VOICE_CALL,
+ MediaRecorder.AudioSource.CAMCORDER,
+ MediaRecorder.AudioSource.VOICE_RECOGNITION,
+ MediaRecorder.AudioSource.VOICE_COMMUNICATION,
+ MediaRecorder.AudioSource.UNPROCESSED
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioSource {}
+
+ // documented return values match the sources that return false
+ // in MediaRecorder.isSystemOnlyAudioSource(source)
+ /**
+ * Returns the audio source being used for the recording.
+ * @return one of {@link MediaRecorder.AudioSource#DEFAULT},
+ * {@link MediaRecorder.AudioSource#MIC},
+ * {@link MediaRecorder.AudioSource#VOICE_UPLINK},
+ * {@link MediaRecorder.AudioSource#VOICE_DOWNLINK},
+ * {@link MediaRecorder.AudioSource#VOICE_CALL},
+ * {@link MediaRecorder.AudioSource#CAMCORDER},
+ * {@link MediaRecorder.AudioSource#VOICE_RECOGNITION},
+ * {@link MediaRecorder.AudioSource#VOICE_COMMUNICATION},
+ * {@link MediaRecorder.AudioSource#UNPROCESSED}.
+ */
+ public @AudioSource int getClientAudioSource() { return mClientSource; }
+
+ /**
+ * Returns the session number of the recording, see {@link AudioRecord#getAudioSessionId()}.
+ * @return the session number.
+ */
+ public int getClientAudioSessionId() { return mSessionId; }
+
+ /**
+ * Returns the audio format at which audio is recorded on this Android device.
+ * Note that it may differ from the client application recording format
+ * (see {@link #getClientFormat()}).
+ * @return the device recording format
+ */
+ public AudioFormat getFormat() { return mDeviceFormat; }
+
+ /**
+ * Returns the audio format at which the client application is recording audio.
+ * Note that it may differ from the actual recording format (see {@link #getFormat()}).
+ * @return the recording format
+ */
+ public AudioFormat getClientFormat() { return mClientFormat; }
+
+ /**
+ * @pending for SystemApi
+ * Returns the package name of the application performing the recording.
+ * Where there are multiple packages sharing the same user id through the "sharedUserId"
+ * mechanism, only the first one with that id will be returned
+ * (see {@link PackageManager#getPackagesForUid(int)}).
+ * <p>This information is only available if the caller has the
+ * {@link android.Manifest.permission.MODIFY_AUDIO_ROUTING} permission.
+ * <br>When called without the permission, the result is an empty string.
+ * @return the package name
+ */
+ public String getClientPackageName() { return mClientPackageName; }
+
+ /**
+ * @pending for SystemApi
+ * Returns the user id of the application performing the recording.
+ * <p>This information is only available if the caller has the
+ * {@link android.Manifest.permission.MODIFY_AUDIO_ROUTING}
+ * permission.
+ * <br>The result is -1 without the permission.
+ * @return the user id
+ */
+ public int getClientUid() { return mClientUid; }
+
+ /**
+ * Returns information about the audio input device used for this recording.
+ * @return the audio recording device or null if this information cannot be retrieved
+ */
+ public AudioDeviceInfo getAudioDevice() {
+ // build the AudioDeviceInfo from the patch handle
+ ArrayList<AudioPatch> patches = new ArrayList<AudioPatch>();
+ if (AudioManager.listAudioPatches(patches) != AudioManager.SUCCESS) {
+ Log.e(TAG, "Error retrieving list of audio patches");
+ return null;
+ }
+ for (int i = 0 ; i < patches.size() ; i++) {
+ final AudioPatch patch = patches.get(i);
+ if (patch.id() == mPatchHandle) {
+ final AudioPortConfig[] sources = patch.sources();
+ if ((sources != null) && (sources.length > 0)) {
+ // not supporting multiple sources, so just look at the first source
+ final int devId = sources[0].port().id();
+ final AudioDeviceInfo[] devices =
+ AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_INPUTS);
+ for (int j = 0; j < devices.length; j++) {
+ if (devices[j].getId() == devId) {
+ return devices[j];
+ }
+ }
+ }
+ // patch handle is unique, there won't be another with the same handle
+ break;
+ }
+ }
+ Log.e(TAG, "Couldn't find device for recording, did recording end already?");
+ return null;
+ }
+
+ public static final Parcelable.Creator<AudioRecordingConfiguration> CREATOR
+ = new Parcelable.Creator<AudioRecordingConfiguration>() {
+ /**
+ * Rebuilds an AudioRecordingConfiguration previously stored with writeToParcel().
+ * @param p Parcel object to read the AudioRecordingConfiguration from
+ * @return a new AudioRecordingConfiguration created from the data in the parcel
+ */
+ public AudioRecordingConfiguration createFromParcel(Parcel p) {
+ return new AudioRecordingConfiguration(p);
+ }
+ public AudioRecordingConfiguration[] newArray(int size) {
+ return new AudioRecordingConfiguration[size];
+ }
+ };
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mSessionId, mClientSource);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mSessionId);
+ dest.writeInt(mClientSource);
+ mClientFormat.writeToParcel(dest, 0);
+ mDeviceFormat.writeToParcel(dest, 0);
+ dest.writeInt(mPatchHandle);
+ dest.writeString(mClientPackageName);
+ dest.writeInt(mClientUid);
+ }
+
+ private AudioRecordingConfiguration(Parcel in) {
+ mSessionId = in.readInt();
+ mClientSource = in.readInt();
+ mClientFormat = AudioFormat.CREATOR.createFromParcel(in);
+ mDeviceFormat = AudioFormat.CREATOR.createFromParcel(in);
+ mPatchHandle = in.readInt();
+ mClientPackageName = in.readString();
+ mClientUid = in.readInt();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || !(o instanceof AudioRecordingConfiguration)) return false;
+
+ AudioRecordingConfiguration that = (AudioRecordingConfiguration) o;
+
+ return ((mClientUid == that.mClientUid)
+ && (mSessionId == that.mSessionId)
+ && (mClientSource == that.mClientSource)
+ && (mPatchHandle == that.mPatchHandle)
+ && (mClientFormat.equals(that.mClientFormat))
+ && (mDeviceFormat.equals(that.mDeviceFormat))
+ && (mClientPackageName.equals(that.mClientPackageName)));
+ }
+}
diff --git a/android/media/AudioRoutesInfo.java b/android/media/AudioRoutesInfo.java
new file mode 100644
index 00000000..83cd797a
--- /dev/null
+++ b/android/media/AudioRoutesInfo.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.text.TextUtils;
+
+/**
+ * Information available from AudioService about the current routes.
+ * @hide
+ */
+public class AudioRoutesInfo implements Parcelable {
+ public static final int MAIN_SPEAKER = 0;
+ public static final int MAIN_HEADSET = 1<<0;
+ public static final int MAIN_HEADPHONES = 1<<1;
+ public static final int MAIN_DOCK_SPEAKERS = 1<<2;
+ public static final int MAIN_HDMI = 1<<3;
+ public static final int MAIN_USB = 1<<4;
+
+ public CharSequence bluetoothName;
+ public int mainType = MAIN_SPEAKER;
+
+ public AudioRoutesInfo() {
+ }
+
+ public AudioRoutesInfo(AudioRoutesInfo o) {
+ bluetoothName = o.bluetoothName;
+ mainType = o.mainType;
+ }
+
+ AudioRoutesInfo(Parcel src) {
+ bluetoothName = TextUtils.CHAR_SEQUENCE_CREATOR.createFromParcel(src);
+ mainType = src.readInt();
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public String toString() {
+ return getClass().getSimpleName() + "{ type=" + typeToString(mainType)
+ + (TextUtils.isEmpty(bluetoothName) ? "" : ", bluetoothName=" + bluetoothName)
+ + " }";
+ }
+
+ private static String typeToString(int type) {
+ if (type == MAIN_SPEAKER) return "SPEAKER";
+ if ((type & MAIN_HEADSET) != 0) return "HEADSET";
+ if ((type & MAIN_HEADPHONES) != 0) return "HEADPHONES";
+ if ((type & MAIN_DOCK_SPEAKERS) != 0) return "DOCK_SPEAKERS";
+ if ((type & MAIN_HDMI) != 0) return "HDMI";
+ if ((type & MAIN_USB) != 0) return "USB";
+ return Integer.toHexString(type);
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ TextUtils.writeToParcel(bluetoothName, dest, flags);
+ dest.writeInt(mainType);
+ }
+
+ public static final Parcelable.Creator<AudioRoutesInfo> CREATOR
+ = new Parcelable.Creator<AudioRoutesInfo>() {
+ public AudioRoutesInfo createFromParcel(Parcel in) {
+ return new AudioRoutesInfo(in);
+ }
+
+ public AudioRoutesInfo[] newArray(int size) {
+ return new AudioRoutesInfo[size];
+ }
+ };
+}
diff --git a/android/media/AudioRouting.java b/android/media/AudioRouting.java
new file mode 100644
index 00000000..26fa631a
--- /dev/null
+++ b/android/media/AudioRouting.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Handler;
+import android.os.Looper;
+
+/**
+ * AudioRouting defines an interface for controlling routing and routing notifications in
+ * AudioTrack and AudioRecord objects.
+ */
+public interface AudioRouting {
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the output/input to/from.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio sink or source.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if succesful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio device.
+ */
+ public boolean setPreferredDevice(AudioDeviceInfo deviceInfo);
+
+ /**
+ * Returns the selected output/input specified by {@link #setPreferredDevice}. Note that this
+ * is not guaranteed to correspond to the actual device being used for playback/recording.
+ */
+ public AudioDeviceInfo getPreferredDevice();
+
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this
+ * AudioTrack/AudioRecord.
+ * Note: The query is only valid if the AudioTrack/AudioRecord is currently playing.
+ * If it is not, <code>getRoutedDevice()</code> will return null.
+ */
+ public AudioDeviceInfo getRoutedDevice();
+
+ /**
+ * Adds an {@link AudioRouting.OnRoutingChangedListener} to receive notifications of routing
+ * changes on this AudioTrack/AudioRecord.
+ * @param listener The {@link AudioRouting.OnRoutingChangedListener} interface to receive
+ * notifications of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ public void addOnRoutingChangedListener(OnRoutingChangedListener listener,
+ Handler handler);
+
+ /**
+ * Removes an {@link AudioRouting.OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link AudioRouting.OnRoutingChangedListener} interface
+ * to remove.
+ */
+ public void removeOnRoutingChangedListener(OnRoutingChangedListener listener);
+
+ /**
+ * Defines the interface by which applications can receive notifications of routing
+ * changes for the associated {@link AudioRouting}.
+ */
+ public interface OnRoutingChangedListener {
+ public void onRoutingChanged(AudioRouting router);
+ }
+}
diff --git a/android/media/AudioSystem.java b/android/media/AudioSystem.java
new file mode 100644
index 00000000..e56944df
--- /dev/null
+++ b/android/media/AudioSystem.java
@@ -0,0 +1,925 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.audiopolicy.AudioMix;
+import android.util.Log;
+
+import java.util.ArrayList;
+
+/* IF YOU CHANGE ANY OF THE CONSTANTS IN THIS FILE, DO NOT FORGET
+ * TO UPDATE THE CORRESPONDING NATIVE GLUE AND AudioManager.java.
+ * THANK YOU FOR YOUR COOPERATION.
+ */
+
+/**
+ * @hide
+ */
+public class AudioSystem
+{
+ private static final String TAG = "AudioSystem";
+ /* These values must be kept in sync with system/audio.h */
+ /*
+ * If these are modified, please also update Settings.System.VOLUME_SETTINGS
+ * and attrs.xml and AudioManager.java.
+ */
+ /** Used to identify the default audio stream volume */
+ public static final int STREAM_DEFAULT = -1;
+ /** Used to identify the volume of audio streams for phone calls */
+ public static final int STREAM_VOICE_CALL = 0;
+ /** Used to identify the volume of audio streams for system sounds */
+ public static final int STREAM_SYSTEM = 1;
+ /** Used to identify the volume of audio streams for the phone ring and message alerts */
+ public static final int STREAM_RING = 2;
+ /** Used to identify the volume of audio streams for music playback */
+ public static final int STREAM_MUSIC = 3;
+ /** Used to identify the volume of audio streams for alarms */
+ public static final int STREAM_ALARM = 4;
+ /** Used to identify the volume of audio streams for notifications */
+ public static final int STREAM_NOTIFICATION = 5;
+ /** Used to identify the volume of audio streams for phone calls when connected on bluetooth */
+ public static final int STREAM_BLUETOOTH_SCO = 6;
+ /** Used to identify the volume of audio streams for enforced system sounds in certain
+ * countries (e.g camera in Japan) */
+ public static final int STREAM_SYSTEM_ENFORCED = 7;
+ /** Used to identify the volume of audio streams for DTMF tones */
+ public static final int STREAM_DTMF = 8;
+ /** Used to identify the volume of audio streams exclusively transmitted through the
+ * speaker (TTS) of the device */
+ public static final int STREAM_TTS = 9;
+ /** Used to identify the volume of audio streams for accessibility prompts */
+ public static final int STREAM_ACCESSIBILITY = 10;
+ /**
+ * @deprecated Use {@link #numStreamTypes() instead}
+ */
+ public static final int NUM_STREAMS = 5;
+
+ // Expose only the getter method publicly so we can change it in the future
+ private static final int NUM_STREAM_TYPES = 11;
+ public static final int getNumStreamTypes() { return NUM_STREAM_TYPES; }
+
+ public static final String[] STREAM_NAMES = new String[] {
+ "STREAM_VOICE_CALL",
+ "STREAM_SYSTEM",
+ "STREAM_RING",
+ "STREAM_MUSIC",
+ "STREAM_ALARM",
+ "STREAM_NOTIFICATION",
+ "STREAM_BLUETOOTH_SCO",
+ "STREAM_SYSTEM_ENFORCED",
+ "STREAM_DTMF",
+ "STREAM_TTS",
+ "STREAM_ACCESSIBILITY"
+ };
+
+ /*
+ * Sets the microphone mute on or off.
+ *
+ * @param on set <var>true</var> to mute the microphone;
+ * <var>false</var> to turn mute off
+ * @return command completion status see AUDIO_STATUS_OK, see AUDIO_STATUS_ERROR
+ */
+ public static native int muteMicrophone(boolean on);
+
+ /*
+ * Checks whether the microphone mute is on or off.
+ *
+ * @return true if microphone is muted, false if it's not
+ */
+ public static native boolean isMicrophoneMuted();
+
+ /* modes for setPhoneState, must match AudioSystem.h audio_mode */
+ public static final int MODE_INVALID = -2;
+ public static final int MODE_CURRENT = -1;
+ public static final int MODE_NORMAL = 0;
+ public static final int MODE_RINGTONE = 1;
+ public static final int MODE_IN_CALL = 2;
+ public static final int MODE_IN_COMMUNICATION = 3;
+ public static final int NUM_MODES = 4;
+
+ public static String modeToString(int mode) {
+ switch (mode) {
+ case MODE_CURRENT: return "MODE_CURRENT";
+ case MODE_IN_CALL: return "MODE_IN_CALL";
+ case MODE_IN_COMMUNICATION: return "MODE_IN_COMMUNICATION";
+ case MODE_INVALID: return "MODE_INVALID";
+ case MODE_NORMAL: return "MODE_NORMAL";
+ case MODE_RINGTONE: return "MODE_RINGTONE";
+ default: return "unknown mode (" + mode + ")";
+ }
+ }
+
+ /* Routing bits for the former setRouting/getRouting API */
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_EARPIECE = (1 << 0);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_SPEAKER = (1 << 1);
+ /** @deprecated use {@link #ROUTE_BLUETOOTH_SCO} */
+ @Deprecated public static final int ROUTE_BLUETOOTH = (1 << 2);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_BLUETOOTH_SCO = (1 << 2);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_HEADSET = (1 << 3);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_BLUETOOTH_A2DP = (1 << 4);
+ /** @deprecated */
+ @Deprecated public static final int ROUTE_ALL = 0xFFFFFFFF;
+
+ // Keep in sync with system/media/audio/include/system/audio.h
+ public static final int AUDIO_SESSION_ALLOCATE = 0;
+
+ /*
+ * Checks whether the specified stream type is active.
+ *
+ * return true if any track playing on this stream is active.
+ */
+ public static native boolean isStreamActive(int stream, int inPastMs);
+
+ /*
+ * Checks whether the specified stream type is active on a remotely connected device. The notion
+ * of what constitutes a remote device is enforced by the audio policy manager of the platform.
+ *
+ * return true if any track playing on this stream is active on a remote device.
+ */
+ public static native boolean isStreamActiveRemotely(int stream, int inPastMs);
+
+ /*
+ * Checks whether the specified audio source is active.
+ *
+ * return true if any recorder using this source is currently recording
+ */
+ public static native boolean isSourceActive(int source);
+
+ /*
+ * Returns a new unused audio session ID
+ */
+ public static native int newAudioSessionId();
+
+ /*
+ * Returns a new unused audio player ID
+ */
+ public static native int newAudioPlayerId();
+
+
+ /*
+ * Sets a group generic audio configuration parameters. The use of these parameters
+ * are platform dependent, see libaudio
+ *
+ * param keyValuePairs list of parameters key value pairs in the form:
+ * key1=value1;key2=value2;...
+ */
+ public static native int setParameters(String keyValuePairs);
+
+ /*
+ * Gets a group generic audio configuration parameters. The use of these parameters
+ * are platform dependent, see libaudio
+ *
+ * param keys list of parameters
+ * return value: list of parameters key value pairs in the form:
+ * key1=value1;key2=value2;...
+ */
+ public static native String getParameters(String keys);
+
+ // These match the enum AudioError in frameworks/base/core/jni/android_media_AudioSystem.cpp
+ /* Command sucessful or Media server restarted. see ErrorCallback */
+ public static final int AUDIO_STATUS_OK = 0;
+ /* Command failed or unspecified audio error. see ErrorCallback */
+ public static final int AUDIO_STATUS_ERROR = 1;
+ /* Media server died. see ErrorCallback */
+ public static final int AUDIO_STATUS_SERVER_DIED = 100;
+
+ private static ErrorCallback mErrorCallback;
+
+ /*
+ * Handles the audio error callback.
+ */
+ public interface ErrorCallback
+ {
+ /*
+ * Callback for audio server errors.
+ * param error error code:
+ * - AUDIO_STATUS_OK
+ * - AUDIO_STATUS_SERVER_DIED
+ * - AUDIO_STATUS_ERROR
+ */
+ void onError(int error);
+ };
+
+ /*
+ * Registers a callback to be invoked when an error occurs.
+ * @param cb the callback to run
+ */
+ public static void setErrorCallback(ErrorCallback cb)
+ {
+ synchronized (AudioSystem.class) {
+ mErrorCallback = cb;
+ if (cb != null) {
+ cb.onError(checkAudioFlinger());
+ }
+ }
+ }
+
+ private static void errorCallbackFromNative(int error)
+ {
+ ErrorCallback errorCallback = null;
+ synchronized (AudioSystem.class) {
+ if (mErrorCallback != null) {
+ errorCallback = mErrorCallback;
+ }
+ }
+ if (errorCallback != null) {
+ errorCallback.onError(error);
+ }
+ }
+
+ /**
+ * Handles events from the audio policy manager about dynamic audio policies
+ * @see android.media.audiopolicy.AudioPolicy
+ */
+ public interface DynamicPolicyCallback
+ {
+ void onDynamicPolicyMixStateUpdate(String regId, int state);
+ }
+
+ //keep in sync with include/media/AudioPolicy.h
+ private final static int DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE = 0;
+
+ private static DynamicPolicyCallback sDynPolicyCallback;
+
+ public static void setDynamicPolicyCallback(DynamicPolicyCallback cb)
+ {
+ synchronized (AudioSystem.class) {
+ sDynPolicyCallback = cb;
+ native_register_dynamic_policy_callback();
+ }
+ }
+
+ private static void dynamicPolicyCallbackFromNative(int event, String regId, int val)
+ {
+ DynamicPolicyCallback cb = null;
+ synchronized (AudioSystem.class) {
+ if (sDynPolicyCallback != null) {
+ cb = sDynPolicyCallback;
+ }
+ }
+ if (cb != null) {
+ switch(event) {
+ case DYNAMIC_POLICY_EVENT_MIX_STATE_UPDATE:
+ cb.onDynamicPolicyMixStateUpdate(regId, val);
+ break;
+ default:
+ Log.e(TAG, "dynamicPolicyCallbackFromNative: unknown event " + event);
+ }
+ }
+ }
+
+ /**
+ * Handles events from the audio policy manager about recording events
+ * @see android.media.AudioManager.AudioRecordingCallback
+ */
+ public interface AudioRecordingCallback
+ {
+ /**
+ * Callback for recording activity notifications events
+ * @param event
+ * @param uid uid of the client app performing the recording
+ * @param session
+ * @param source
+ * @param recordingFormat an array of ints containing respectively the client and device
+ * recording configurations (2*3 ints), followed by the patch handle:
+ * index 0: client format
+ * 1: client channel mask
+ * 2: client sample rate
+ * 3: device format
+ * 4: device channel mask
+ * 5: device sample rate
+ * 6: patch handle
+ * @param packName package name of the client app performing the recording. NOT SUPPORTED
+ */
+ void onRecordingConfigurationChanged(int event, int uid, int session, int source,
+ int[] recordingFormat, String packName);
+ }
+
+ private static AudioRecordingCallback sRecordingCallback;
+
+ public static void setRecordingCallback(AudioRecordingCallback cb) {
+ synchronized (AudioSystem.class) {
+ sRecordingCallback = cb;
+ native_register_recording_callback();
+ }
+ }
+
+ /**
+ * Callback from native for recording configuration updates.
+ * @param event
+ * @param session
+ * @param source
+ * @param recordingFormat see
+ * {@link AudioRecordingCallback#onRecordingConfigurationChanged(int, int, int, int, int[])}
+ * for the description of the record format.
+ */
+ private static void recordingCallbackFromNative(int event, int uid, int session, int source,
+ int[] recordingFormat) {
+ AudioRecordingCallback cb = null;
+ synchronized (AudioSystem.class) {
+ cb = sRecordingCallback;
+ }
+ if (cb != null) {
+ // TODO receive package name from native
+ cb.onRecordingConfigurationChanged(event, uid, session, source, recordingFormat, "");
+ }
+ }
+
+ /*
+ * Error codes used by public APIs (AudioTrack, AudioRecord, AudioManager ...)
+ * Must be kept in sync with frameworks/base/core/jni/android_media_AudioErrors.h
+ */
+ public static final int SUCCESS = 0;
+ public static final int ERROR = -1;
+ public static final int BAD_VALUE = -2;
+ public static final int INVALID_OPERATION = -3;
+ public static final int PERMISSION_DENIED = -4;
+ public static final int NO_INIT = -5;
+ public static final int DEAD_OBJECT = -6;
+ public static final int WOULD_BLOCK = -7;
+
+ /*
+ * AudioPolicyService methods
+ */
+
+ //
+ // audio device definitions: must be kept in sync with values in system/core/audio.h
+ //
+
+ public static final int DEVICE_NONE = 0x0;
+ // reserved bits
+ public static final int DEVICE_BIT_IN = 0x80000000;
+ public static final int DEVICE_BIT_DEFAULT = 0x40000000;
+ // output devices, be sure to update AudioManager.java also
+ public static final int DEVICE_OUT_EARPIECE = 0x1;
+ public static final int DEVICE_OUT_SPEAKER = 0x2;
+ public static final int DEVICE_OUT_WIRED_HEADSET = 0x4;
+ public static final int DEVICE_OUT_WIRED_HEADPHONE = 0x8;
+ public static final int DEVICE_OUT_BLUETOOTH_SCO = 0x10;
+ public static final int DEVICE_OUT_BLUETOOTH_SCO_HEADSET = 0x20;
+ public static final int DEVICE_OUT_BLUETOOTH_SCO_CARKIT = 0x40;
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP = 0x80;
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES = 0x100;
+ public static final int DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER = 0x200;
+ public static final int DEVICE_OUT_AUX_DIGITAL = 0x400;
+ public static final int DEVICE_OUT_HDMI = DEVICE_OUT_AUX_DIGITAL;
+ public static final int DEVICE_OUT_ANLG_DOCK_HEADSET = 0x800;
+ public static final int DEVICE_OUT_DGTL_DOCK_HEADSET = 0x1000;
+ public static final int DEVICE_OUT_USB_ACCESSORY = 0x2000;
+ public static final int DEVICE_OUT_USB_DEVICE = 0x4000;
+ public static final int DEVICE_OUT_REMOTE_SUBMIX = 0x8000;
+ public static final int DEVICE_OUT_TELEPHONY_TX = 0x10000;
+ public static final int DEVICE_OUT_LINE = 0x20000;
+ public static final int DEVICE_OUT_HDMI_ARC = 0x40000;
+ public static final int DEVICE_OUT_SPDIF = 0x80000;
+ public static final int DEVICE_OUT_FM = 0x100000;
+ public static final int DEVICE_OUT_AUX_LINE = 0x200000;
+ public static final int DEVICE_OUT_SPEAKER_SAFE = 0x400000;
+ public static final int DEVICE_OUT_IP = 0x800000;
+ public static final int DEVICE_OUT_BUS = 0x1000000;
+ public static final int DEVICE_OUT_PROXY = 0x2000000;
+ public static final int DEVICE_OUT_USB_HEADSET = 0x4000000;
+
+ public static final int DEVICE_OUT_DEFAULT = DEVICE_BIT_DEFAULT;
+
+ public static final int DEVICE_OUT_ALL = (DEVICE_OUT_EARPIECE |
+ DEVICE_OUT_SPEAKER |
+ DEVICE_OUT_WIRED_HEADSET |
+ DEVICE_OUT_WIRED_HEADPHONE |
+ DEVICE_OUT_BLUETOOTH_SCO |
+ DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
+ DEVICE_OUT_BLUETOOTH_SCO_CARKIT |
+ DEVICE_OUT_BLUETOOTH_A2DP |
+ DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
+ DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER |
+ DEVICE_OUT_HDMI |
+ DEVICE_OUT_ANLG_DOCK_HEADSET |
+ DEVICE_OUT_DGTL_DOCK_HEADSET |
+ DEVICE_OUT_USB_ACCESSORY |
+ DEVICE_OUT_USB_DEVICE |
+ DEVICE_OUT_REMOTE_SUBMIX |
+ DEVICE_OUT_TELEPHONY_TX |
+ DEVICE_OUT_LINE |
+ DEVICE_OUT_HDMI_ARC |
+ DEVICE_OUT_SPDIF |
+ DEVICE_OUT_FM |
+ DEVICE_OUT_AUX_LINE |
+ DEVICE_OUT_SPEAKER_SAFE |
+ DEVICE_OUT_IP |
+ DEVICE_OUT_BUS |
+ DEVICE_OUT_PROXY |
+ DEVICE_OUT_USB_HEADSET |
+ DEVICE_OUT_DEFAULT);
+ public static final int DEVICE_OUT_ALL_A2DP = (DEVICE_OUT_BLUETOOTH_A2DP |
+ DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES |
+ DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER);
+ public static final int DEVICE_OUT_ALL_SCO = (DEVICE_OUT_BLUETOOTH_SCO |
+ DEVICE_OUT_BLUETOOTH_SCO_HEADSET |
+ DEVICE_OUT_BLUETOOTH_SCO_CARKIT);
+ public static final int DEVICE_OUT_ALL_USB = (DEVICE_OUT_USB_ACCESSORY |
+ DEVICE_OUT_USB_DEVICE |
+ DEVICE_OUT_USB_HEADSET);
+ public static final int DEVICE_OUT_ALL_HDMI_SYSTEM_AUDIO = (DEVICE_OUT_AUX_LINE |
+ DEVICE_OUT_HDMI_ARC |
+ DEVICE_OUT_SPDIF);
+ public static final int DEVICE_ALL_HDMI_SYSTEM_AUDIO_AND_SPEAKER =
+ (DEVICE_OUT_ALL_HDMI_SYSTEM_AUDIO |
+ DEVICE_OUT_SPEAKER);
+
+ // input devices
+ public static final int DEVICE_IN_COMMUNICATION = DEVICE_BIT_IN | 0x1;
+ public static final int DEVICE_IN_AMBIENT = DEVICE_BIT_IN | 0x2;
+ public static final int DEVICE_IN_BUILTIN_MIC = DEVICE_BIT_IN | 0x4;
+ public static final int DEVICE_IN_BLUETOOTH_SCO_HEADSET = DEVICE_BIT_IN | 0x8;
+ public static final int DEVICE_IN_WIRED_HEADSET = DEVICE_BIT_IN | 0x10;
+ public static final int DEVICE_IN_AUX_DIGITAL = DEVICE_BIT_IN | 0x20;
+ public static final int DEVICE_IN_HDMI = DEVICE_IN_AUX_DIGITAL;
+ public static final int DEVICE_IN_VOICE_CALL = DEVICE_BIT_IN | 0x40;
+ public static final int DEVICE_IN_TELEPHONY_RX = DEVICE_IN_VOICE_CALL;
+ public static final int DEVICE_IN_BACK_MIC = DEVICE_BIT_IN | 0x80;
+ public static final int DEVICE_IN_REMOTE_SUBMIX = DEVICE_BIT_IN | 0x100;
+ public static final int DEVICE_IN_ANLG_DOCK_HEADSET = DEVICE_BIT_IN | 0x200;
+ public static final int DEVICE_IN_DGTL_DOCK_HEADSET = DEVICE_BIT_IN | 0x400;
+ public static final int DEVICE_IN_USB_ACCESSORY = DEVICE_BIT_IN | 0x800;
+ public static final int DEVICE_IN_USB_DEVICE = DEVICE_BIT_IN | 0x1000;
+ public static final int DEVICE_IN_FM_TUNER = DEVICE_BIT_IN | 0x2000;
+ public static final int DEVICE_IN_TV_TUNER = DEVICE_BIT_IN | 0x4000;
+ public static final int DEVICE_IN_LINE = DEVICE_BIT_IN | 0x8000;
+ public static final int DEVICE_IN_SPDIF = DEVICE_BIT_IN | 0x10000;
+ public static final int DEVICE_IN_BLUETOOTH_A2DP = DEVICE_BIT_IN | 0x20000;
+ public static final int DEVICE_IN_LOOPBACK = DEVICE_BIT_IN | 0x40000;
+ public static final int DEVICE_IN_IP = DEVICE_BIT_IN | 0x80000;
+ public static final int DEVICE_IN_BUS = DEVICE_BIT_IN | 0x100000;
+ public static final int DEVICE_IN_PROXY = DEVICE_BIT_IN | 0x1000000;
+ public static final int DEVICE_IN_USB_HEADSET = DEVICE_BIT_IN | 0x2000000;
+ public static final int DEVICE_IN_DEFAULT = DEVICE_BIT_IN | DEVICE_BIT_DEFAULT;
+
+ public static final int DEVICE_IN_ALL = (DEVICE_IN_COMMUNICATION |
+ DEVICE_IN_AMBIENT |
+ DEVICE_IN_BUILTIN_MIC |
+ DEVICE_IN_BLUETOOTH_SCO_HEADSET |
+ DEVICE_IN_WIRED_HEADSET |
+ DEVICE_IN_HDMI |
+ DEVICE_IN_TELEPHONY_RX |
+ DEVICE_IN_BACK_MIC |
+ DEVICE_IN_REMOTE_SUBMIX |
+ DEVICE_IN_ANLG_DOCK_HEADSET |
+ DEVICE_IN_DGTL_DOCK_HEADSET |
+ DEVICE_IN_USB_ACCESSORY |
+ DEVICE_IN_USB_DEVICE |
+ DEVICE_IN_FM_TUNER |
+ DEVICE_IN_TV_TUNER |
+ DEVICE_IN_LINE |
+ DEVICE_IN_SPDIF |
+ DEVICE_IN_BLUETOOTH_A2DP |
+ DEVICE_IN_LOOPBACK |
+ DEVICE_IN_IP |
+ DEVICE_IN_BUS |
+ DEVICE_IN_PROXY |
+ DEVICE_IN_USB_HEADSET |
+ DEVICE_IN_DEFAULT);
+ public static final int DEVICE_IN_ALL_SCO = DEVICE_IN_BLUETOOTH_SCO_HEADSET;
+ public static final int DEVICE_IN_ALL_USB = (DEVICE_IN_USB_ACCESSORY |
+ DEVICE_IN_USB_DEVICE |
+ DEVICE_IN_USB_HEADSET);
+
+ // device states, must match AudioSystem::device_connection_state
+ public static final int DEVICE_STATE_UNAVAILABLE = 0;
+ public static final int DEVICE_STATE_AVAILABLE = 1;
+ private static final int NUM_DEVICE_STATES = 1;
+
+ public static String deviceStateToString(int state) {
+ switch (state) {
+ case DEVICE_STATE_UNAVAILABLE: return "DEVICE_STATE_UNAVAILABLE";
+ case DEVICE_STATE_AVAILABLE: return "DEVICE_STATE_AVAILABLE";
+ default: return "unknown state (" + state + ")";
+ }
+ }
+
+ public static final String DEVICE_OUT_EARPIECE_NAME = "earpiece";
+ public static final String DEVICE_OUT_SPEAKER_NAME = "speaker";
+ public static final String DEVICE_OUT_WIRED_HEADSET_NAME = "headset";
+ public static final String DEVICE_OUT_WIRED_HEADPHONE_NAME = "headphone";
+ public static final String DEVICE_OUT_BLUETOOTH_SCO_NAME = "bt_sco";
+ public static final String DEVICE_OUT_BLUETOOTH_SCO_HEADSET_NAME = "bt_sco_hs";
+ public static final String DEVICE_OUT_BLUETOOTH_SCO_CARKIT_NAME = "bt_sco_carkit";
+ public static final String DEVICE_OUT_BLUETOOTH_A2DP_NAME = "bt_a2dp";
+ public static final String DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES_NAME = "bt_a2dp_hp";
+ public static final String DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER_NAME = "bt_a2dp_spk";
+ public static final String DEVICE_OUT_AUX_DIGITAL_NAME = "aux_digital";
+ public static final String DEVICE_OUT_HDMI_NAME = "hdmi";
+ public static final String DEVICE_OUT_ANLG_DOCK_HEADSET_NAME = "analog_dock";
+ public static final String DEVICE_OUT_DGTL_DOCK_HEADSET_NAME = "digital_dock";
+ public static final String DEVICE_OUT_USB_ACCESSORY_NAME = "usb_accessory";
+ public static final String DEVICE_OUT_USB_DEVICE_NAME = "usb_device";
+ public static final String DEVICE_OUT_REMOTE_SUBMIX_NAME = "remote_submix";
+ public static final String DEVICE_OUT_TELEPHONY_TX_NAME = "telephony_tx";
+ public static final String DEVICE_OUT_LINE_NAME = "line";
+ public static final String DEVICE_OUT_HDMI_ARC_NAME = "hmdi_arc";
+ public static final String DEVICE_OUT_SPDIF_NAME = "spdif";
+ public static final String DEVICE_OUT_FM_NAME = "fm_transmitter";
+ public static final String DEVICE_OUT_AUX_LINE_NAME = "aux_line";
+ public static final String DEVICE_OUT_SPEAKER_SAFE_NAME = "speaker_safe";
+ public static final String DEVICE_OUT_IP_NAME = "ip";
+ public static final String DEVICE_OUT_BUS_NAME = "bus";
+ public static final String DEVICE_OUT_PROXY_NAME = "proxy";
+ public static final String DEVICE_OUT_USB_HEADSET_NAME = "usb_headset";
+
+ public static final String DEVICE_IN_COMMUNICATION_NAME = "communication";
+ public static final String DEVICE_IN_AMBIENT_NAME = "ambient";
+ public static final String DEVICE_IN_BUILTIN_MIC_NAME = "mic";
+ public static final String DEVICE_IN_BLUETOOTH_SCO_HEADSET_NAME = "bt_sco_hs";
+ public static final String DEVICE_IN_WIRED_HEADSET_NAME = "headset";
+ public static final String DEVICE_IN_AUX_DIGITAL_NAME = "aux_digital";
+ public static final String DEVICE_IN_TELEPHONY_RX_NAME = "telephony_rx";
+ public static final String DEVICE_IN_BACK_MIC_NAME = "back_mic";
+ public static final String DEVICE_IN_REMOTE_SUBMIX_NAME = "remote_submix";
+ public static final String DEVICE_IN_ANLG_DOCK_HEADSET_NAME = "analog_dock";
+ public static final String DEVICE_IN_DGTL_DOCK_HEADSET_NAME = "digital_dock";
+ public static final String DEVICE_IN_USB_ACCESSORY_NAME = "usb_accessory";
+ public static final String DEVICE_IN_USB_DEVICE_NAME = "usb_device";
+ public static final String DEVICE_IN_FM_TUNER_NAME = "fm_tuner";
+ public static final String DEVICE_IN_TV_TUNER_NAME = "tv_tuner";
+ public static final String DEVICE_IN_LINE_NAME = "line";
+ public static final String DEVICE_IN_SPDIF_NAME = "spdif";
+ public static final String DEVICE_IN_BLUETOOTH_A2DP_NAME = "bt_a2dp";
+ public static final String DEVICE_IN_LOOPBACK_NAME = "loopback";
+ public static final String DEVICE_IN_IP_NAME = "ip";
+ public static final String DEVICE_IN_BUS_NAME = "bus";
+ public static final String DEVICE_IN_PROXY_NAME = "proxy";
+ public static final String DEVICE_IN_USB_HEADSET_NAME = "usb_headset";
+
+ public static String getOutputDeviceName(int device)
+ {
+ switch(device) {
+ case DEVICE_OUT_EARPIECE:
+ return DEVICE_OUT_EARPIECE_NAME;
+ case DEVICE_OUT_SPEAKER:
+ return DEVICE_OUT_SPEAKER_NAME;
+ case DEVICE_OUT_WIRED_HEADSET:
+ return DEVICE_OUT_WIRED_HEADSET_NAME;
+ case DEVICE_OUT_WIRED_HEADPHONE:
+ return DEVICE_OUT_WIRED_HEADPHONE_NAME;
+ case DEVICE_OUT_BLUETOOTH_SCO:
+ return DEVICE_OUT_BLUETOOTH_SCO_NAME;
+ case DEVICE_OUT_BLUETOOTH_SCO_HEADSET:
+ return DEVICE_OUT_BLUETOOTH_SCO_HEADSET_NAME;
+ case DEVICE_OUT_BLUETOOTH_SCO_CARKIT:
+ return DEVICE_OUT_BLUETOOTH_SCO_CARKIT_NAME;
+ case DEVICE_OUT_BLUETOOTH_A2DP:
+ return DEVICE_OUT_BLUETOOTH_A2DP_NAME;
+ case DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES:
+ return DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES_NAME;
+ case DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER:
+ return DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER_NAME;
+ case DEVICE_OUT_HDMI:
+ return DEVICE_OUT_HDMI_NAME;
+ case DEVICE_OUT_ANLG_DOCK_HEADSET:
+ return DEVICE_OUT_ANLG_DOCK_HEADSET_NAME;
+ case DEVICE_OUT_DGTL_DOCK_HEADSET:
+ return DEVICE_OUT_DGTL_DOCK_HEADSET_NAME;
+ case DEVICE_OUT_USB_ACCESSORY:
+ return DEVICE_OUT_USB_ACCESSORY_NAME;
+ case DEVICE_OUT_USB_DEVICE:
+ return DEVICE_OUT_USB_DEVICE_NAME;
+ case DEVICE_OUT_REMOTE_SUBMIX:
+ return DEVICE_OUT_REMOTE_SUBMIX_NAME;
+ case DEVICE_OUT_TELEPHONY_TX:
+ return DEVICE_OUT_TELEPHONY_TX_NAME;
+ case DEVICE_OUT_LINE:
+ return DEVICE_OUT_LINE_NAME;
+ case DEVICE_OUT_HDMI_ARC:
+ return DEVICE_OUT_HDMI_ARC_NAME;
+ case DEVICE_OUT_SPDIF:
+ return DEVICE_OUT_SPDIF_NAME;
+ case DEVICE_OUT_FM:
+ return DEVICE_OUT_FM_NAME;
+ case DEVICE_OUT_AUX_LINE:
+ return DEVICE_OUT_AUX_LINE_NAME;
+ case DEVICE_OUT_SPEAKER_SAFE:
+ return DEVICE_OUT_SPEAKER_SAFE_NAME;
+ case DEVICE_OUT_IP:
+ return DEVICE_OUT_IP_NAME;
+ case DEVICE_OUT_BUS:
+ return DEVICE_OUT_BUS_NAME;
+ case DEVICE_OUT_PROXY:
+ return DEVICE_OUT_PROXY_NAME;
+ case DEVICE_OUT_USB_HEADSET:
+ return DEVICE_OUT_USB_HEADSET_NAME;
+ case DEVICE_OUT_DEFAULT:
+ default:
+ return Integer.toString(device);
+ }
+ }
+
+ public static String getInputDeviceName(int device)
+ {
+ switch(device) {
+ case DEVICE_IN_COMMUNICATION:
+ return DEVICE_IN_COMMUNICATION_NAME;
+ case DEVICE_IN_AMBIENT:
+ return DEVICE_IN_AMBIENT_NAME;
+ case DEVICE_IN_BUILTIN_MIC:
+ return DEVICE_IN_BUILTIN_MIC_NAME;
+ case DEVICE_IN_BLUETOOTH_SCO_HEADSET:
+ return DEVICE_IN_BLUETOOTH_SCO_HEADSET_NAME;
+ case DEVICE_IN_WIRED_HEADSET:
+ return DEVICE_IN_WIRED_HEADSET_NAME;
+ case DEVICE_IN_AUX_DIGITAL:
+ return DEVICE_IN_AUX_DIGITAL_NAME;
+ case DEVICE_IN_TELEPHONY_RX:
+ return DEVICE_IN_TELEPHONY_RX_NAME;
+ case DEVICE_IN_BACK_MIC:
+ return DEVICE_IN_BACK_MIC_NAME;
+ case DEVICE_IN_REMOTE_SUBMIX:
+ return DEVICE_IN_REMOTE_SUBMIX_NAME;
+ case DEVICE_IN_ANLG_DOCK_HEADSET:
+ return DEVICE_IN_ANLG_DOCK_HEADSET_NAME;
+ case DEVICE_IN_DGTL_DOCK_HEADSET:
+ return DEVICE_IN_DGTL_DOCK_HEADSET_NAME;
+ case DEVICE_IN_USB_ACCESSORY:
+ return DEVICE_IN_USB_ACCESSORY_NAME;
+ case DEVICE_IN_USB_DEVICE:
+ return DEVICE_IN_USB_DEVICE_NAME;
+ case DEVICE_IN_FM_TUNER:
+ return DEVICE_IN_FM_TUNER_NAME;
+ case DEVICE_IN_TV_TUNER:
+ return DEVICE_IN_TV_TUNER_NAME;
+ case DEVICE_IN_LINE:
+ return DEVICE_IN_LINE_NAME;
+ case DEVICE_IN_SPDIF:
+ return DEVICE_IN_SPDIF_NAME;
+ case DEVICE_IN_BLUETOOTH_A2DP:
+ return DEVICE_IN_BLUETOOTH_A2DP_NAME;
+ case DEVICE_IN_LOOPBACK:
+ return DEVICE_IN_LOOPBACK_NAME;
+ case DEVICE_IN_IP:
+ return DEVICE_IN_IP_NAME;
+ case DEVICE_IN_BUS:
+ return DEVICE_IN_BUS_NAME;
+ case DEVICE_IN_PROXY:
+ return DEVICE_IN_PROXY_NAME;
+ case DEVICE_IN_USB_HEADSET:
+ return DEVICE_IN_USB_HEADSET_NAME;
+ case DEVICE_IN_DEFAULT:
+ default:
+ return Integer.toString(device);
+ }
+ }
+
+ // phone state, match audio_mode???
+ public static final int PHONE_STATE_OFFCALL = 0;
+ public static final int PHONE_STATE_RINGING = 1;
+ public static final int PHONE_STATE_INCALL = 2;
+
+ // device categories config for setForceUse, must match audio_policy_forced_cfg_t
+ public static final int FORCE_NONE = 0;
+ public static final int FORCE_SPEAKER = 1;
+ public static final int FORCE_HEADPHONES = 2;
+ public static final int FORCE_BT_SCO = 3;
+ public static final int FORCE_BT_A2DP = 4;
+ public static final int FORCE_WIRED_ACCESSORY = 5;
+ public static final int FORCE_BT_CAR_DOCK = 6;
+ public static final int FORCE_BT_DESK_DOCK = 7;
+ public static final int FORCE_ANALOG_DOCK = 8;
+ public static final int FORCE_DIGITAL_DOCK = 9;
+ public static final int FORCE_NO_BT_A2DP = 10;
+ public static final int FORCE_SYSTEM_ENFORCED = 11;
+ public static final int FORCE_HDMI_SYSTEM_AUDIO_ENFORCED = 12;
+ public static final int FORCE_ENCODED_SURROUND_NEVER = 13;
+ public static final int FORCE_ENCODED_SURROUND_ALWAYS = 14;
+ public static final int NUM_FORCE_CONFIG = 15;
+ public static final int FORCE_DEFAULT = FORCE_NONE;
+
+ public static String forceUseConfigToString(int config) {
+ switch (config) {
+ case FORCE_NONE: return "FORCE_NONE";
+ case FORCE_SPEAKER: return "FORCE_SPEAKER";
+ case FORCE_HEADPHONES: return "FORCE_HEADPHONES";
+ case FORCE_BT_SCO: return "FORCE_BT_SCO";
+ case FORCE_BT_A2DP: return "FORCE_BT_A2DP";
+ case FORCE_WIRED_ACCESSORY: return "FORCE_WIRED_ACCESSORY";
+ case FORCE_BT_CAR_DOCK: return "FORCE_BT_CAR_DOCK";
+ case FORCE_BT_DESK_DOCK: return "FORCE_BT_DESK_DOCK";
+ case FORCE_ANALOG_DOCK: return "FORCE_ANALOG_DOCK";
+ case FORCE_DIGITAL_DOCK: return "FORCE_DIGITAL_DOCK";
+ case FORCE_NO_BT_A2DP: return "FORCE_NO_BT_A2DP";
+ case FORCE_SYSTEM_ENFORCED: return "FORCE_SYSTEM_ENFORCED";
+ case FORCE_HDMI_SYSTEM_AUDIO_ENFORCED: return "FORCE_HDMI_SYSTEM_AUDIO_ENFORCED";
+ case FORCE_ENCODED_SURROUND_NEVER: return "FORCE_ENCODED_SURROUND_NEVER";
+ case FORCE_ENCODED_SURROUND_ALWAYS: return "FORCE_ENCODED_SURROUND_ALWAYS";
+ default: return "unknown config (" + config + ")" ;
+ }
+ }
+
+ // usage for setForceUse, must match audio_policy_force_use_t
+ public static final int FOR_COMMUNICATION = 0;
+ public static final int FOR_MEDIA = 1;
+ public static final int FOR_RECORD = 2;
+ public static final int FOR_DOCK = 3;
+ public static final int FOR_SYSTEM = 4;
+ public static final int FOR_HDMI_SYSTEM_AUDIO = 5;
+ public static final int FOR_ENCODED_SURROUND = 6;
+ private static final int NUM_FORCE_USE = 7;
+
+ public static String forceUseUsageToString(int usage) {
+ switch (usage) {
+ case FOR_COMMUNICATION: return "FOR_COMMUNICATION";
+ case FOR_MEDIA: return "FOR_MEDIA";
+ case FOR_RECORD: return "FOR_RECORD";
+ case FOR_DOCK: return "FOR_DOCK";
+ case FOR_SYSTEM: return "FOR_SYSTEM";
+ case FOR_HDMI_SYSTEM_AUDIO: return "FOR_HDMI_SYSTEM_AUDIO";
+ case FOR_ENCODED_SURROUND: return "FOR_ENCODED_SURROUND";
+ default: return "unknown usage (" + usage + ")" ;
+ }
+ }
+
+ // usage for AudioRecord.startRecordingSync(), must match AudioSystem::sync_event_t
+ public static final int SYNC_EVENT_NONE = 0;
+ public static final int SYNC_EVENT_PRESENTATION_COMPLETE = 1;
+
+ /**
+ * @return command completion status, one of {@link #AUDIO_STATUS_OK},
+ * {@link #AUDIO_STATUS_ERROR} or {@link #AUDIO_STATUS_SERVER_DIED}
+ */
+ public static native int setDeviceConnectionState(int device, int state,
+ String device_address, String device_name);
+ public static native int getDeviceConnectionState(int device, String device_address);
+ public static native int handleDeviceConfigChange(int device,
+ String device_address,
+ String device_name);
+ public static native int setPhoneState(int state);
+ public static native int setForceUse(int usage, int config);
+ public static native int getForceUse(int usage);
+ public static native int initStreamVolume(int stream, int indexMin, int indexMax);
+ public static native int setStreamVolumeIndex(int stream, int index, int device);
+ public static native int getStreamVolumeIndex(int stream, int device);
+ public static native int setMasterVolume(float value);
+ public static native float getMasterVolume();
+ public static native int setMasterMute(boolean mute);
+ public static native boolean getMasterMute();
+ public static native int getDevicesForStream(int stream);
+
+ /** @hide returns true if master mono is enabled. */
+ public static native boolean getMasterMono();
+ /** @hide enables or disables the master mono mode. */
+ public static native int setMasterMono(boolean mono);
+
+ // helpers for android.media.AudioManager.getProperty(), see description there for meaning
+ public static native int getPrimaryOutputSamplingRate();
+ public static native int getPrimaryOutputFrameCount();
+ public static native int getOutputLatency(int stream);
+
+ public static native int setLowRamDevice(boolean isLowRamDevice);
+ public static native int checkAudioFlinger();
+
+ public static native int listAudioPorts(ArrayList<AudioPort> ports, int[] generation);
+ public static native int createAudioPatch(AudioPatch[] patch,
+ AudioPortConfig[] sources, AudioPortConfig[] sinks);
+ public static native int releaseAudioPatch(AudioPatch patch);
+ public static native int listAudioPatches(ArrayList<AudioPatch> patches, int[] generation);
+ public static native int setAudioPortConfig(AudioPortConfig config);
+
+ // declare this instance as having a dynamic policy callback handler
+ private static native final void native_register_dynamic_policy_callback();
+ // declare this instance as having a recording configuration update callback handler
+ private static native final void native_register_recording_callback();
+
+ // must be kept in sync with value in include/system/audio.h
+ public static final int AUDIO_HW_SYNC_INVALID = 0;
+
+ public static native int getAudioHwSyncForSession(int sessionId);
+
+ public static native int registerPolicyMixes(ArrayList<AudioMix> mixes, boolean register);
+
+ public static native int systemReady();
+
+ public static native float getStreamVolumeDB(int stream, int index, int device);
+
+ // Items shared with audio service
+
+ /**
+ * The delay before playing a sound. This small period exists so the user
+ * can press another key (non-volume keys, too) to have it NOT be audible.
+ * <p>
+ * PhoneWindow will implement this part.
+ */
+ public static final int PLAY_SOUND_DELAY = 300;
+
+ /**
+ * Constant to identify a focus stack entry that is used to hold the focus while the phone
+ * is ringing or during a call. Used by com.android.internal.telephony.CallManager when
+ * entering and exiting calls.
+ */
+ public final static String IN_VOICE_COMM_FOCUS_ID = "AudioFocus_For_Phone_Ring_And_Calls";
+
+ /**
+ * @see AudioManager#setVibrateSetting(int, int)
+ */
+ public static int getValueForVibrateSetting(int existingValue, int vibrateType,
+ int vibrateSetting) {
+
+ // First clear the existing setting. Each vibrate type has two bits in
+ // the value. Note '3' is '11' in binary.
+ existingValue &= ~(3 << (vibrateType * 2));
+
+ // Set into the old value
+ existingValue |= (vibrateSetting & 3) << (vibrateType * 2);
+
+ return existingValue;
+ }
+
+ public static int getDefaultStreamVolume(int streamType) {
+ return DEFAULT_STREAM_VOLUME[streamType];
+ }
+
+ public static int[] DEFAULT_STREAM_VOLUME = new int[] {
+ 4, // STREAM_VOICE_CALL
+ 7, // STREAM_SYSTEM
+ 5, // STREAM_RING
+ 5, // STREAM_MUSIC
+ 6, // STREAM_ALARM
+ 5, // STREAM_NOTIFICATION
+ 7, // STREAM_BLUETOOTH_SCO
+ 7, // STREAM_SYSTEM_ENFORCED
+ 5, // STREAM_DTMF
+ 5, // STREAM_TTS
+ 5, // STREAM_ACCESSIBILITY
+ };
+
+ public static String streamToString(int stream) {
+ if (stream >= 0 && stream < STREAM_NAMES.length) return STREAM_NAMES[stream];
+ if (stream == AudioManager.USE_DEFAULT_STREAM_TYPE) return "USE_DEFAULT_STREAM_TYPE";
+ return "UNKNOWN_STREAM_" + stream;
+ }
+
+ /** The platform has no specific capabilities */
+ public static final int PLATFORM_DEFAULT = 0;
+ /** The platform is voice call capable (a phone) */
+ public static final int PLATFORM_VOICE = 1;
+ /** The platform is a television or a set-top box */
+ public static final int PLATFORM_TELEVISION = 2;
+
+ /**
+ * Return the platform type that this is running on. One of:
+ * <ul>
+ * <li>{@link #PLATFORM_VOICE}</li>
+ * <li>{@link #PLATFORM_TELEVISION}</li>
+ * <li>{@link #PLATFORM_DEFAULT}</li>
+ * </ul>
+ */
+ public static int getPlatformType(Context context) {
+ if (context.getResources().getBoolean(com.android.internal.R.bool.config_voice_capable)) {
+ return PLATFORM_VOICE;
+ } else if (context.getPackageManager().hasSystemFeature(PackageManager.FEATURE_LEANBACK)) {
+ return PLATFORM_TELEVISION;
+ } else {
+ return PLATFORM_DEFAULT;
+ }
+ }
+
+ /**
+ * @hide
+ * @return whether the system uses a single volume stream.
+ */
+ public static boolean isSingleVolume(Context context) {
+ boolean forceSingleVolume = context.getResources().getBoolean(
+ com.android.internal.R.bool.config_single_volume);
+ return getPlatformType(context) == PLATFORM_TELEVISION || forceSingleVolume;
+ }
+
+ public static final int DEFAULT_MUTE_STREAMS_AFFECTED =
+ (1 << STREAM_MUSIC) |
+ (1 << STREAM_RING) |
+ (1 << STREAM_NOTIFICATION) |
+ (1 << STREAM_SYSTEM);
+
+ /**
+ * Event posted by AudioTrack and AudioRecord JNI (JNIDeviceCallback) when routing changes.
+ * Keep in sync with core/jni/android_media_DeviceCallback.h.
+ */
+ final static int NATIVE_EVENT_ROUTING_CHANGE = 1000;
+}
+
diff --git a/android/media/AudioTimestamp.java b/android/media/AudioTimestamp.java
new file mode 100644
index 00000000..be8ca151
--- /dev/null
+++ b/android/media/AudioTimestamp.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import android.annotation.IntDef;
+
+/**
+ * Structure that groups a position in frame units relative to an assumed audio stream,
+ * together with the estimated time when that frame enters or leaves the audio
+ * processing pipeline on that device. This can be used to coordinate events
+ * and interactions with the external environment.
+ * <p>
+ * The time is based on the implementation's best effort, using whatever knowledge
+ * is available to the system, but cannot account for any delay unknown to the implementation.
+ *
+ * @see AudioTrack#getTimestamp AudioTrack.getTimestamp(AudioTimestamp)
+ * @see AudioRecord#getTimestamp AudioRecord.getTimestamp(AudioTimestamp, int)
+ */
+public final class AudioTimestamp
+{
+ /**
+ * Clock monotonic or its equivalent on the system,
+ * in the same units and timebase as {@link java.lang.System#nanoTime}.
+ */
+ public static final int TIMEBASE_MONOTONIC = 0;
+
+ /**
+ * Clock monotonic including suspend time or its equivalent on the system,
+ * in the same units and timebase as {@link android.os.SystemClock#elapsedRealtimeNanos}.
+ */
+ public static final int TIMEBASE_BOOTTIME = 1;
+
+ /** @hide */
+ @IntDef({
+ TIMEBASE_MONOTONIC,
+ TIMEBASE_BOOTTIME,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Timebase {}
+
+ /**
+ * Position in frames relative to start of an assumed audio stream.
+ * <p>
+ * When obtained through
+ * {@link AudioRecord#getTimestamp AudioRecord.getTimestamp(AudioTimestamp, int)},
+ * all 64 bits of position are valid.
+ * <p>
+ * When obtained through
+ * {@link AudioTrack#getTimestamp AudioTrack.getTimestamp(AudioTimestamp)},
+ * the low-order 32 bits of position is in wrapping frame units similar to
+ * {@link AudioTrack#getPlaybackHeadPosition AudioTrack.getPlaybackHeadPosition()}.
+ */
+ public long framePosition;
+
+ /**
+ * Time associated with the frame in the audio pipeline.
+ * <p>
+ * When obtained through
+ * {@link AudioRecord#getTimestamp AudioRecord.getTimestamp(AudioTimestamp, int)},
+ * this is the estimated time in nanoseconds when the frame referred to by
+ * {@link #framePosition} was captured. The timebase is either
+ * {@link #TIMEBASE_MONOTONIC} or {@link #TIMEBASE_BOOTTIME}, depending
+ * on the timebase parameter used in
+ * {@link AudioRecord#getTimestamp AudioRecord.getTimestamp(AudioTimestamp, int)}.
+ * <p>
+ * When obtained through
+ * {@link AudioTrack#getTimestamp AudioTrack.getTimestamp(AudioTimestamp)},
+ * this is the estimated time when the frame was presented or is committed to be presented,
+ * with a timebase of {@link #TIMEBASE_MONOTONIC}.
+ */
+ public long nanoTime;
+}
diff --git a/android/media/AudioTrack.java b/android/media/AudioTrack.java
new file mode 100644
index 00000000..50145f8a
--- /dev/null
+++ b/android/media/AudioTrack.java
@@ -0,0 +1,3149 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.ref.WeakReference;
+import java.lang.Math;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.NioUtils;
+import java.util.Collection;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.app.ActivityThread;
+import android.content.Context;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.Process;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.ArrayMap;
+import android.util.Log;
+
+import com.android.internal.annotations.GuardedBy;
+
+/**
+ * The AudioTrack class manages and plays a single audio resource for Java applications.
+ * It allows streaming of PCM audio buffers to the audio sink for playback. This is
+ * achieved by "pushing" the data to the AudioTrack object using one of the
+ * {@link #write(byte[], int, int)}, {@link #write(short[], int, int)},
+ * and {@link #write(float[], int, int, int)} methods.
+ *
+ * <p>An AudioTrack instance can operate under two modes: static or streaming.<br>
+ * In Streaming mode, the application writes a continuous stream of data to the AudioTrack, using
+ * one of the {@code write()} methods. These are blocking and return when the data has been
+ * transferred from the Java layer to the native layer and queued for playback. The streaming
+ * mode is most useful when playing blocks of audio data that for instance are:
+ *
+ * <ul>
+ * <li>too big to fit in memory because of the duration of the sound to play,</li>
+ * <li>too big to fit in memory because of the characteristics of the audio data
+ * (high sampling rate, bits per sample ...)</li>
+ * <li>received or generated while previously queued audio is playing.</li>
+ * </ul>
+ *
+ * The static mode should be chosen when dealing with short sounds that fit in memory and
+ * that need to be played with the smallest latency possible. The static mode will
+ * therefore be preferred for UI and game sounds that are played often, and with the
+ * smallest overhead possible.
+ *
+ * <p>Upon creation, an AudioTrack object initializes its associated audio buffer.
+ * The size of this buffer, specified during the construction, determines how long an AudioTrack
+ * can play before running out of data.<br>
+ * For an AudioTrack using the static mode, this size is the maximum size of the sound that can
+ * be played from it.<br>
+ * For the streaming mode, data will be written to the audio sink in chunks of
+ * sizes less than or equal to the total buffer size.
+ *
+ * AudioTrack is not final and thus permits subclasses, but such use is not recommended.
+ */
+public class AudioTrack extends PlayerBase
+ implements AudioRouting
+ , VolumeAutomation
+{
+ //---------------------------------------------------------
+ // Constants
+ //--------------------
+ /** Minimum value for a linear gain or auxiliary effect level.
+ * This value must be exactly equal to 0.0f; do not change it.
+ */
+ private static final float GAIN_MIN = 0.0f;
+ /** Maximum value for a linear gain or auxiliary effect level.
+ * This value must be greater than or equal to 1.0f.
+ */
+ private static final float GAIN_MAX = 1.0f;
+
+ /** Maximum value for AudioTrack channel count
+ * @hide public for MediaCode only, do not un-hide or change to a numeric literal
+ */
+ public static final int CHANNEL_COUNT_MAX = native_get_FCC_8();
+
+ /** indicates AudioTrack state is stopped */
+ public static final int PLAYSTATE_STOPPED = 1; // matches SL_PLAYSTATE_STOPPED
+ /** indicates AudioTrack state is paused */
+ public static final int PLAYSTATE_PAUSED = 2; // matches SL_PLAYSTATE_PAUSED
+ /** indicates AudioTrack state is playing */
+ public static final int PLAYSTATE_PLAYING = 3; // matches SL_PLAYSTATE_PLAYING
+
+ // keep these values in sync with android_media_AudioTrack.cpp
+ /**
+ * Creation mode where audio data is transferred from Java to the native layer
+ * only once before the audio starts playing.
+ */
+ public static final int MODE_STATIC = 0;
+ /**
+ * Creation mode where audio data is streamed from Java to the native layer
+ * as the audio is playing.
+ */
+ public static final int MODE_STREAM = 1;
+
+ /** @hide */
+ @IntDef({
+ MODE_STATIC,
+ MODE_STREAM
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface TransferMode {}
+
+ /**
+ * State of an AudioTrack that was not successfully initialized upon creation.
+ */
+ public static final int STATE_UNINITIALIZED = 0;
+ /**
+ * State of an AudioTrack that is ready to be used.
+ */
+ public static final int STATE_INITIALIZED = 1;
+ /**
+ * State of a successfully initialized AudioTrack that uses static data,
+ * but that hasn't received that data yet.
+ */
+ public static final int STATE_NO_STATIC_DATA = 2;
+
+ /**
+ * Denotes a successful operation.
+ */
+ public static final int SUCCESS = AudioSystem.SUCCESS;
+ /**
+ * Denotes a generic operation failure.
+ */
+ public static final int ERROR = AudioSystem.ERROR;
+ /**
+ * Denotes a failure due to the use of an invalid value.
+ */
+ public static final int ERROR_BAD_VALUE = AudioSystem.BAD_VALUE;
+ /**
+ * Denotes a failure due to the improper use of a method.
+ */
+ public static final int ERROR_INVALID_OPERATION = AudioSystem.INVALID_OPERATION;
+ /**
+ * An error code indicating that the object reporting it is no longer valid and needs to
+ * be recreated.
+ */
+ public static final int ERROR_DEAD_OBJECT = AudioSystem.DEAD_OBJECT;
+ /**
+ * {@link #getTimestampWithStatus(AudioTimestamp)} is called in STOPPED or FLUSHED state,
+ * or immediately after start/ACTIVE.
+ * @hide
+ */
+ public static final int ERROR_WOULD_BLOCK = AudioSystem.WOULD_BLOCK;
+
+ // Error codes:
+ // to keep in sync with frameworks/base/core/jni/android_media_AudioTrack.cpp
+ private static final int ERROR_NATIVESETUP_AUDIOSYSTEM = -16;
+ private static final int ERROR_NATIVESETUP_INVALIDCHANNELMASK = -17;
+ private static final int ERROR_NATIVESETUP_INVALIDFORMAT = -18;
+ private static final int ERROR_NATIVESETUP_INVALIDSTREAMTYPE = -19;
+ private static final int ERROR_NATIVESETUP_NATIVEINITFAILED = -20;
+
+ // Events:
+ // to keep in sync with frameworks/av/include/media/AudioTrack.h
+ /**
+ * Event id denotes when playback head has reached a previously set marker.
+ */
+ private static final int NATIVE_EVENT_MARKER = 3;
+ /**
+ * Event id denotes when previously set update period has elapsed during playback.
+ */
+ private static final int NATIVE_EVENT_NEW_POS = 4;
+
+ private final static String TAG = "android.media.AudioTrack";
+
+
+ /** @hide */
+ @IntDef({
+ WRITE_BLOCKING,
+ WRITE_NON_BLOCKING
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface WriteMode {}
+
+ /**
+ * The write mode indicating the write operation will block until all data has been written,
+ * to be used as the actual value of the writeMode parameter in
+ * {@link #write(byte[], int, int, int)}, {@link #write(short[], int, int, int)},
+ * {@link #write(float[], int, int, int)}, {@link #write(ByteBuffer, int, int)}, and
+ * {@link #write(ByteBuffer, int, int, long)}.
+ */
+ public final static int WRITE_BLOCKING = 0;
+
+ /**
+ * The write mode indicating the write operation will return immediately after
+ * queuing as much audio data for playback as possible without blocking,
+ * to be used as the actual value of the writeMode parameter in
+ * {@link #write(ByteBuffer, int, int)}, {@link #write(short[], int, int, int)},
+ * {@link #write(float[], int, int, int)}, {@link #write(ByteBuffer, int, int)}, and
+ * {@link #write(ByteBuffer, int, int, long)}.
+ */
+ public final static int WRITE_NON_BLOCKING = 1;
+
+ /** @hide */
+ @IntDef({
+ PERFORMANCE_MODE_NONE,
+ PERFORMANCE_MODE_LOW_LATENCY,
+ PERFORMANCE_MODE_POWER_SAVING
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PerformanceMode {}
+
+ /**
+ * Default performance mode for an {@link AudioTrack}.
+ */
+ public static final int PERFORMANCE_MODE_NONE = 0;
+
+ /**
+ * Low latency performance mode for an {@link AudioTrack}.
+ * If the device supports it, this mode
+ * enables a lower latency path through to the audio output sink.
+ * Effects may no longer work with such an {@code AudioTrack} and
+ * the sample rate must match that of the output sink.
+ * <p>
+ * Applications should be aware that low latency requires careful
+ * buffer management, with smaller chunks of audio data written by each
+ * {@code write()} call.
+ * <p>
+ * If this flag is used without specifying a {@code bufferSizeInBytes} then the
+ * {@code AudioTrack}'s actual buffer size may be too small.
+ * It is recommended that a fairly
+ * large buffer should be specified when the {@code AudioTrack} is created.
+ * Then the actual size can be reduced by calling
+ * {@link #setBufferSizeInFrames(int)}. The buffer size can be optimized
+ * by lowering it after each {@code write()} call until the audio glitches,
+ * which is detected by calling
+ * {@link #getUnderrunCount()}. Then the buffer size can be increased
+ * until there are no glitches.
+ * This tuning step should be done while playing silence.
+ * This technique provides a compromise between latency and glitch rate.
+ */
+ public static final int PERFORMANCE_MODE_LOW_LATENCY = 1;
+
+ /**
+ * Power saving performance mode for an {@link AudioTrack}.
+ * If the device supports it, this
+ * mode will enable a lower power path to the audio output sink.
+ * In addition, this lower power path typically will have
+ * deeper internal buffers and better underrun resistance,
+ * with a tradeoff of higher latency.
+ * <p>
+ * In this mode, applications should attempt to use a larger buffer size
+ * and deliver larger chunks of audio data per {@code write()} call.
+ * Use {@link #getBufferSizeInFrames()} to determine
+ * the actual buffer size of the {@code AudioTrack} as it may have increased
+ * to accommodate a deeper buffer.
+ */
+ public static final int PERFORMANCE_MODE_POWER_SAVING = 2;
+
+ // keep in sync with system/media/audio/include/system/audio-base.h
+ private static final int AUDIO_OUTPUT_FLAG_FAST = 0x4;
+ private static final int AUDIO_OUTPUT_FLAG_DEEP_BUFFER = 0x8;
+
+ // Size of HW_AV_SYNC track AV header.
+ private static final float HEADER_V2_SIZE_BYTES = 20.0f;
+
+ //--------------------------------------------------------------------------
+ // Member variables
+ //--------------------
+ /**
+ * Indicates the state of the AudioTrack instance.
+ * One of STATE_UNINITIALIZED, STATE_INITIALIZED, or STATE_NO_STATIC_DATA.
+ */
+ private int mState = STATE_UNINITIALIZED;
+ /**
+ * Indicates the play state of the AudioTrack instance.
+ * One of PLAYSTATE_STOPPED, PLAYSTATE_PAUSED, or PLAYSTATE_PLAYING.
+ */
+ private int mPlayState = PLAYSTATE_STOPPED;
+ /**
+ * Lock to ensure mPlayState updates reflect the actual state of the object.
+ */
+ private final Object mPlayStateLock = new Object();
+ /**
+ * Sizes of the audio buffer.
+ * These values are set during construction and can be stale.
+ * To obtain the current audio buffer frame count use {@link #getBufferSizeInFrames()}.
+ */
+ private int mNativeBufferSizeInBytes = 0;
+ private int mNativeBufferSizeInFrames = 0;
+ /**
+ * Handler for events coming from the native code.
+ */
+ private NativePositionEventHandlerDelegate mEventHandlerDelegate;
+ /**
+ * Looper associated with the thread that creates the AudioTrack instance.
+ */
+ private final Looper mInitializationLooper;
+ /**
+ * The audio data source sampling rate in Hz.
+ * Never {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED}.
+ */
+ private int mSampleRate; // initialized by all constructors via audioParamCheck()
+ /**
+ * The number of audio output channels (1 is mono, 2 is stereo, etc.).
+ */
+ private int mChannelCount = 1;
+ /**
+ * The audio channel mask used for calling native AudioTrack
+ */
+ private int mChannelMask = AudioFormat.CHANNEL_OUT_MONO;
+
+ /**
+ * The type of the audio stream to play. See
+ * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
+ * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
+ * {@link AudioManager#STREAM_ALARM}, {@link AudioManager#STREAM_NOTIFICATION}, and
+ * {@link AudioManager#STREAM_DTMF}.
+ */
+ private int mStreamType = AudioManager.STREAM_MUSIC;
+
+ /**
+ * The way audio is consumed by the audio sink, one of MODE_STATIC or MODE_STREAM.
+ */
+ private int mDataLoadMode = MODE_STREAM;
+ /**
+ * The current channel position mask, as specified on AudioTrack creation.
+ * Can be set simultaneously with channel index mask {@link #mChannelIndexMask}.
+ * May be set to {@link AudioFormat#CHANNEL_INVALID} if a channel index mask is specified.
+ */
+ private int mChannelConfiguration = AudioFormat.CHANNEL_OUT_MONO;
+ /**
+ * The channel index mask if specified, otherwise 0.
+ */
+ private int mChannelIndexMask = 0;
+ /**
+ * The encoding of the audio samples.
+ * @see AudioFormat#ENCODING_PCM_8BIT
+ * @see AudioFormat#ENCODING_PCM_16BIT
+ * @see AudioFormat#ENCODING_PCM_FLOAT
+ */
+ private int mAudioFormat; // initialized by all constructors via audioParamCheck()
+ /**
+ * Audio session ID
+ */
+ private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
+ /**
+ * HW_AV_SYNC track AV Sync Header
+ */
+ private ByteBuffer mAvSyncHeader = null;
+ /**
+ * HW_AV_SYNC track audio data bytes remaining to write after current AV sync header
+ */
+ private int mAvSyncBytesRemaining = 0;
+ /**
+ * Offset of the first sample of the audio in byte from start of HW_AV_SYNC track AV header.
+ */
+ private int mOffset = 0;
+
+ //--------------------------------
+ // Used exclusively by native code
+ //--------------------
+ /**
+ * @hide
+ * Accessed by native methods: provides access to C++ AudioTrack object.
+ */
+ @SuppressWarnings("unused")
+ protected long mNativeTrackInJavaObj;
+ /**
+ * Accessed by native methods: provides access to the JNI data (i.e. resources used by
+ * the native AudioTrack object, but not stored in it).
+ */
+ @SuppressWarnings("unused")
+ private long mJniData;
+
+
+ //--------------------------------------------------------------------------
+ // Constructor, Finalize
+ //--------------------
+ /**
+ * Class constructor.
+ * @param streamType the type of the audio stream. See
+ * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
+ * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
+ * {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
+ * @param sampleRateInHz the initial source sample rate expressed in Hz.
+ * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} means to use a route-dependent value
+ * which is usually the sample rate of the sink.
+ * {@link #getSampleRate()} can be used to retrieve the actual sample rate chosen.
+ * @param channelConfig describes the configuration of the audio channels.
+ * See {@link AudioFormat#CHANNEL_OUT_MONO} and
+ * {@link AudioFormat#CHANNEL_OUT_STEREO}
+ * @param audioFormat the format in which the audio data is represented.
+ * See {@link AudioFormat#ENCODING_PCM_16BIT},
+ * {@link AudioFormat#ENCODING_PCM_8BIT},
+ * and {@link AudioFormat#ENCODING_PCM_FLOAT}.
+ * @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
+ * read from for playback. This should be a nonzero multiple of the frame size in bytes.
+ * <p> If the track's creation mode is {@link #MODE_STATIC},
+ * this is the maximum length sample, or audio clip, that can be played by this instance.
+ * <p> If the track's creation mode is {@link #MODE_STREAM},
+ * this should be the desired buffer size
+ * for the <code>AudioTrack</code> to satisfy the application's
+ * latency requirements.
+ * If <code>bufferSizeInBytes</code> is less than the
+ * minimum buffer size for the output sink, it is increased to the minimum
+ * buffer size.
+ * The method {@link #getBufferSizeInFrames()} returns the
+ * actual size in frames of the buffer created, which
+ * determines the minimum frequency to write
+ * to the streaming <code>AudioTrack</code> to avoid underrun.
+ * See {@link #getMinBufferSize(int, int, int)} to determine the estimated minimum buffer size
+ * for an AudioTrack instance in streaming mode.
+ * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
+ * @throws java.lang.IllegalArgumentException
+ * @deprecated use {@link Builder} or
+ * {@link #AudioTrack(AudioAttributes, AudioFormat, int, int, int)} to specify the
+ * {@link AudioAttributes} instead of the stream type which is only for volume control.
+ */
+ public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
+ int bufferSizeInBytes, int mode)
+ throws IllegalArgumentException {
+ this(streamType, sampleRateInHz, channelConfig, audioFormat,
+ bufferSizeInBytes, mode, AudioManager.AUDIO_SESSION_ID_GENERATE);
+ }
+
+ /**
+ * Class constructor with audio session. Use this constructor when the AudioTrack must be
+ * attached to a particular audio session. The primary use of the audio session ID is to
+ * associate audio effects to a particular instance of AudioTrack: if an audio session ID
+ * is provided when creating an AudioEffect, this effect will be applied only to audio tracks
+ * and media players in the same session and not to the output mix.
+ * When an AudioTrack is created without specifying a session, it will create its own session
+ * which can be retrieved by calling the {@link #getAudioSessionId()} method.
+ * If a non-zero session ID is provided, this AudioTrack will share effects attached to this
+ * session
+ * with all other media players or audio tracks in the same session, otherwise a new session
+ * will be created for this track if none is supplied.
+ * @param streamType the type of the audio stream. See
+ * {@link AudioManager#STREAM_VOICE_CALL}, {@link AudioManager#STREAM_SYSTEM},
+ * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_MUSIC},
+ * {@link AudioManager#STREAM_ALARM}, and {@link AudioManager#STREAM_NOTIFICATION}.
+ * @param sampleRateInHz the initial source sample rate expressed in Hz.
+ * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} means to use a route-dependent value
+ * which is usually the sample rate of the sink.
+ * @param channelConfig describes the configuration of the audio channels.
+ * See {@link AudioFormat#CHANNEL_OUT_MONO} and
+ * {@link AudioFormat#CHANNEL_OUT_STEREO}
+ * @param audioFormat the format in which the audio data is represented.
+ * See {@link AudioFormat#ENCODING_PCM_16BIT} and
+ * {@link AudioFormat#ENCODING_PCM_8BIT},
+ * and {@link AudioFormat#ENCODING_PCM_FLOAT}.
+ * @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
+ * read from for playback. This should be a nonzero multiple of the frame size in bytes.
+ * <p> If the track's creation mode is {@link #MODE_STATIC},
+ * this is the maximum length sample, or audio clip, that can be played by this instance.
+ * <p> If the track's creation mode is {@link #MODE_STREAM},
+ * this should be the desired buffer size
+ * for the <code>AudioTrack</code> to satisfy the application's
+ * latency requirements.
+ * If <code>bufferSizeInBytes</code> is less than the
+ * minimum buffer size for the output sink, it is increased to the minimum
+ * buffer size.
+ * The method {@link #getBufferSizeInFrames()} returns the
+ * actual size in frames of the buffer created, which
+ * determines the minimum frequency to write
+ * to the streaming <code>AudioTrack</code> to avoid underrun.
+ * You can write data into this buffer in smaller chunks than this size.
+ * See {@link #getMinBufferSize(int, int, int)} to determine the estimated minimum buffer size
+ * for an AudioTrack instance in streaming mode.
+ * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}
+ * @param sessionId Id of audio session the AudioTrack must be attached to
+ * @throws java.lang.IllegalArgumentException
+ * @deprecated use {@link Builder} or
+ * {@link #AudioTrack(AudioAttributes, AudioFormat, int, int, int)} to specify the
+ * {@link AudioAttributes} instead of the stream type which is only for volume control.
+ */
+ public AudioTrack(int streamType, int sampleRateInHz, int channelConfig, int audioFormat,
+ int bufferSizeInBytes, int mode, int sessionId)
+ throws IllegalArgumentException {
+ // mState already == STATE_UNINITIALIZED
+ this((new AudioAttributes.Builder())
+ .setLegacyStreamType(streamType)
+ .build(),
+ (new AudioFormat.Builder())
+ .setChannelMask(channelConfig)
+ .setEncoding(audioFormat)
+ .setSampleRate(sampleRateInHz)
+ .build(),
+ bufferSizeInBytes,
+ mode, sessionId);
+ deprecateStreamTypeForPlayback(streamType, "AudioTrack", "AudioTrack()");
+ }
+
+ /**
+ * Class constructor with {@link AudioAttributes} and {@link AudioFormat}.
+ * @param attributes a non-null {@link AudioAttributes} instance.
+ * @param format a non-null {@link AudioFormat} instance describing the format of the data
+ * that will be played through this AudioTrack. See {@link AudioFormat.Builder} for
+ * configuring the audio format parameters such as encoding, channel mask and sample rate.
+ * @param bufferSizeInBytes the total size (in bytes) of the internal buffer where audio data is
+ * read from for playback. This should be a nonzero multiple of the frame size in bytes.
+ * <p> If the track's creation mode is {@link #MODE_STATIC},
+ * this is the maximum length sample, or audio clip, that can be played by this instance.
+ * <p> If the track's creation mode is {@link #MODE_STREAM},
+ * this should be the desired buffer size
+ * for the <code>AudioTrack</code> to satisfy the application's
+ * latency requirements.
+ * If <code>bufferSizeInBytes</code> is less than the
+ * minimum buffer size for the output sink, it is increased to the minimum
+ * buffer size.
+ * The method {@link #getBufferSizeInFrames()} returns the
+ * actual size in frames of the buffer created, which
+ * determines the minimum frequency to write
+ * to the streaming <code>AudioTrack</code> to avoid underrun.
+ * See {@link #getMinBufferSize(int, int, int)} to determine the estimated minimum buffer size
+ * for an AudioTrack instance in streaming mode.
+ * @param mode streaming or static buffer. See {@link #MODE_STATIC} and {@link #MODE_STREAM}.
+ * @param sessionId ID of audio session the AudioTrack must be attached to, or
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE} if the session isn't known at construction
+ * time. See also {@link AudioManager#generateAudioSessionId()} to obtain a session ID before
+ * construction.
+ * @throws IllegalArgumentException
+ */
+ public AudioTrack(AudioAttributes attributes, AudioFormat format, int bufferSizeInBytes,
+ int mode, int sessionId)
+ throws IllegalArgumentException {
+ super(attributes, AudioPlaybackConfiguration.PLAYER_TYPE_JAM_AUDIOTRACK);
+ // mState already == STATE_UNINITIALIZED
+
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat");
+ }
+
+ // Check if we should enable deep buffer mode
+ if (shouldEnablePowerSaving(mAttributes, format, bufferSizeInBytes, mode)) {
+ mAttributes = new AudioAttributes.Builder(mAttributes)
+ .replaceFlags((mAttributes.getAllFlags()
+ | AudioAttributes.FLAG_DEEP_BUFFER)
+ & ~AudioAttributes.FLAG_LOW_LATENCY)
+ .build();
+ }
+
+ // remember which looper is associated with the AudioTrack instantiation
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+
+ int rate = format.getSampleRate();
+ if (rate == AudioFormat.SAMPLE_RATE_UNSPECIFIED) {
+ rate = 0;
+ }
+
+ int channelIndexMask = 0;
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_INDEX_MASK) != 0) {
+ channelIndexMask = format.getChannelIndexMask();
+ }
+ int channelMask = 0;
+ if ((format.getPropertySetMask()
+ & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_CHANNEL_MASK) != 0) {
+ channelMask = format.getChannelMask();
+ } else if (channelIndexMask == 0) { // if no masks at all, use stereo
+ channelMask = AudioFormat.CHANNEL_OUT_FRONT_LEFT
+ | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
+ }
+ int encoding = AudioFormat.ENCODING_DEFAULT;
+ if ((format.getPropertySetMask() & AudioFormat.AUDIO_FORMAT_HAS_PROPERTY_ENCODING) != 0) {
+ encoding = format.getEncoding();
+ }
+ audioParamCheck(rate, channelMask, channelIndexMask, encoding, mode);
+ mStreamType = AudioSystem.STREAM_DEFAULT;
+
+ audioBuffSizeCheck(bufferSizeInBytes);
+
+ mInitializationLooper = looper;
+
+ if (sessionId < 0) {
+ throw new IllegalArgumentException("Invalid audio session ID: "+sessionId);
+ }
+
+ int[] sampleRate = new int[] {mSampleRate};
+ int[] session = new int[1];
+ session[0] = sessionId;
+ // native initialization
+ int initResult = native_setup(new WeakReference<AudioTrack>(this), mAttributes,
+ sampleRate, mChannelMask, mChannelIndexMask, mAudioFormat,
+ mNativeBufferSizeInBytes, mDataLoadMode, session, 0 /*nativeTrackInJavaObj*/);
+ if (initResult != SUCCESS) {
+ loge("Error code "+initResult+" when initializing AudioTrack.");
+ return; // with mState == STATE_UNINITIALIZED
+ }
+
+ mSampleRate = sampleRate[0];
+ mSessionId = session[0];
+
+ if ((mAttributes.getFlags() & AudioAttributes.FLAG_HW_AV_SYNC) != 0) {
+ int frameSizeInBytes;
+ if (AudioFormat.isEncodingLinearFrames(mAudioFormat)) {
+ frameSizeInBytes = mChannelCount * AudioFormat.getBytesPerSample(mAudioFormat);
+ } else {
+ frameSizeInBytes = 1;
+ }
+ mOffset = ((int) Math.ceil(HEADER_V2_SIZE_BYTES / frameSizeInBytes)) * frameSizeInBytes;
+ }
+
+ if (mDataLoadMode == MODE_STATIC) {
+ mState = STATE_NO_STATIC_DATA;
+ } else {
+ mState = STATE_INITIALIZED;
+ }
+
+ baseRegisterPlayer();
+ }
+
+ /**
+ * A constructor which explicitly connects a Native (C++) AudioTrack. For use by
+ * the AudioTrackRoutingProxy subclass.
+ * @param nativeTrackInJavaObj a C/C++ pointer to a native AudioTrack
+ * (associated with an OpenSL ES player).
+ * IMPORTANT: For "N", this method is ONLY called to setup a Java routing proxy,
+ * i.e. IAndroidConfiguration::AcquireJavaProxy(). If we call with a 0 in nativeTrackInJavaObj
+ * it means that the OpenSL player interface hasn't been realized, so there is no native
+ * Audiotrack to connect to. In this case wait to call deferred_connect() until the
+ * OpenSLES interface is realized.
+ */
+ /*package*/ AudioTrack(long nativeTrackInJavaObj) {
+ super(new AudioAttributes.Builder().build(),
+ AudioPlaybackConfiguration.PLAYER_TYPE_JAM_AUDIOTRACK);
+ // "final"s
+ mNativeTrackInJavaObj = 0;
+ mJniData = 0;
+
+ // remember which looper is associated with the AudioTrack instantiation
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+ mInitializationLooper = looper;
+
+ // other initialization...
+ if (nativeTrackInJavaObj != 0) {
+ baseRegisterPlayer();
+ deferred_connect(nativeTrackInJavaObj);
+ } else {
+ mState = STATE_UNINITIALIZED;
+ }
+ }
+
+ /**
+ * @hide
+ */
+ /* package */ void deferred_connect(long nativeTrackInJavaObj) {
+ if (mState != STATE_INITIALIZED) {
+ // Note that for this native_setup, we are providing an already created/initialized
+ // *Native* AudioTrack, so the attributes parameters to native_setup() are ignored.
+ int[] session = { 0 };
+ int[] rates = { 0 };
+ int initResult = native_setup(new WeakReference<AudioTrack>(this),
+ null /*mAttributes - NA*/,
+ rates /*sampleRate - NA*/,
+ 0 /*mChannelMask - NA*/,
+ 0 /*mChannelIndexMask - NA*/,
+ 0 /*mAudioFormat - NA*/,
+ 0 /*mNativeBufferSizeInBytes - NA*/,
+ 0 /*mDataLoadMode - NA*/,
+ session,
+ nativeTrackInJavaObj);
+ if (initResult != SUCCESS) {
+ loge("Error code "+initResult+" when initializing AudioTrack.");
+ return; // with mState == STATE_UNINITIALIZED
+ }
+
+ mSessionId = session[0];
+
+ mState = STATE_INITIALIZED;
+ }
+ }
+
+ /**
+ * Builder class for {@link AudioTrack} objects.
+ * Use this class to configure and create an <code>AudioTrack</code> instance. By setting audio
+ * attributes and audio format parameters, you indicate which of those vary from the default
+ * behavior on the device.
+ * <p> Here is an example where <code>Builder</code> is used to specify all {@link AudioFormat}
+ * parameters, to be used by a new <code>AudioTrack</code> instance:
+ *
+ * <pre class="prettyprint">
+ * AudioTrack player = new AudioTrack.Builder()
+ * .setAudioAttributes(new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_ALARM)
+ * .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
+ * .build())
+ * .setAudioFormat(new AudioFormat.Builder()
+ * .setEncoding(AudioFormat.ENCODING_PCM_16BIT)
+ * .setSampleRate(44100)
+ * .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
+ * .build())
+ * .setBufferSizeInBytes(minBuffSize)
+ * .build();
+ * </pre>
+ * <p>
+ * If the audio attributes are not set with {@link #setAudioAttributes(AudioAttributes)},
+ * attributes comprising {@link AudioAttributes#USAGE_MEDIA} will be used.
+ * <br>If the audio format is not specified or is incomplete, its channel configuration will be
+ * {@link AudioFormat#CHANNEL_OUT_STEREO} and the encoding will be
+ * {@link AudioFormat#ENCODING_PCM_16BIT}.
+ * The sample rate will depend on the device actually selected for playback and can be queried
+ * with {@link #getSampleRate()} method.
+ * <br>If the buffer size is not specified with {@link #setBufferSizeInBytes(int)},
+ * and the mode is {@link AudioTrack#MODE_STREAM}, the minimum buffer size is used.
+ * <br>If the transfer mode is not specified with {@link #setTransferMode(int)},
+ * <code>MODE_STREAM</code> will be used.
+ * <br>If the session ID is not specified with {@link #setSessionId(int)}, a new one will
+ * be generated.
+ */
+ public static class Builder {
+ private AudioAttributes mAttributes;
+ private AudioFormat mFormat;
+ private int mBufferSizeInBytes;
+ private int mSessionId = AudioManager.AUDIO_SESSION_ID_GENERATE;
+ private int mMode = MODE_STREAM;
+ private int mPerformanceMode = PERFORMANCE_MODE_NONE;
+
+ /**
+ * Constructs a new Builder with the default values as described above.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Sets the {@link AudioAttributes}.
+ * @param attributes a non-null {@link AudioAttributes} instance that describes the audio
+ * data to be played.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setAudioAttributes(@NonNull AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes argument");
+ }
+ // keep reference, we only copy the data when building
+ mAttributes = attributes;
+ return this;
+ }
+
+ /**
+ * Sets the format of the audio data to be played by the {@link AudioTrack}.
+ * See {@link AudioFormat.Builder} for configuring the audio format parameters such
+ * as encoding, channel mask and sample rate.
+ * @param format a non-null {@link AudioFormat} instance.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setAudioFormat(@NonNull AudioFormat format)
+ throws IllegalArgumentException {
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat argument");
+ }
+ // keep reference, we only copy the data when building
+ mFormat = format;
+ return this;
+ }
+
+ /**
+ * Sets the total size (in bytes) of the buffer where audio data is read from for playback.
+ * If using the {@link AudioTrack} in streaming mode
+ * (see {@link AudioTrack#MODE_STREAM}, you can write data into this buffer in smaller
+ * chunks than this size. See {@link #getMinBufferSize(int, int, int)} to determine
+ * the estimated minimum buffer size for the creation of an AudioTrack instance
+ * in streaming mode.
+ * <br>If using the <code>AudioTrack</code> in static mode (see
+ * {@link AudioTrack#MODE_STATIC}), this is the maximum size of the sound that will be
+ * played by this instance.
+ * @param bufferSizeInBytes
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setBufferSizeInBytes(int bufferSizeInBytes)
+ throws IllegalArgumentException {
+ if (bufferSizeInBytes <= 0) {
+ throw new IllegalArgumentException("Invalid buffer size " + bufferSizeInBytes);
+ }
+ mBufferSizeInBytes = bufferSizeInBytes;
+ return this;
+ }
+
+ /**
+ * Sets the mode under which buffers of audio data are transferred from the
+ * {@link AudioTrack} to the framework.
+ * @param mode one of {@link AudioTrack#MODE_STREAM}, {@link AudioTrack#MODE_STATIC}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setTransferMode(@TransferMode int mode)
+ throws IllegalArgumentException {
+ switch(mode) {
+ case MODE_STREAM:
+ case MODE_STATIC:
+ mMode = mode;
+ break;
+ default:
+ throw new IllegalArgumentException("Invalid transfer mode " + mode);
+ }
+ return this;
+ }
+
+ /**
+ * Sets the session ID the {@link AudioTrack} will be attached to.
+ * @param sessionId a strictly positive ID number retrieved from another
+ * <code>AudioTrack</code> via {@link AudioTrack#getAudioSessionId()} or allocated by
+ * {@link AudioManager} via {@link AudioManager#generateAudioSessionId()}, or
+ * {@link AudioManager#AUDIO_SESSION_ID_GENERATE}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public @NonNull Builder setSessionId(int sessionId)
+ throws IllegalArgumentException {
+ if ((sessionId != AudioManager.AUDIO_SESSION_ID_GENERATE) && (sessionId < 1)) {
+ throw new IllegalArgumentException("Invalid audio session ID " + sessionId);
+ }
+ mSessionId = sessionId;
+ return this;
+ }
+
+ /**
+ * Sets the {@link AudioTrack} performance mode. This is an advisory request which
+ * may not be supported by the particular device, and the framework is free
+ * to ignore such request if it is incompatible with other requests or hardware.
+ *
+ * @param performanceMode one of
+ * {@link AudioTrack#PERFORMANCE_MODE_NONE},
+ * {@link AudioTrack#PERFORMANCE_MODE_LOW_LATENCY},
+ * or {@link AudioTrack#PERFORMANCE_MODE_POWER_SAVING}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException if {@code performanceMode} is not valid.
+ */
+ public @NonNull Builder setPerformanceMode(@PerformanceMode int performanceMode) {
+ switch (performanceMode) {
+ case PERFORMANCE_MODE_NONE:
+ case PERFORMANCE_MODE_LOW_LATENCY:
+ case PERFORMANCE_MODE_POWER_SAVING:
+ mPerformanceMode = performanceMode;
+ break;
+ default:
+ throw new IllegalArgumentException(
+ "Invalid performance mode " + performanceMode);
+ }
+ return this;
+ }
+
+ /**
+ * Builds an {@link AudioTrack} instance initialized with all the parameters set
+ * on this <code>Builder</code>.
+ * @return a new successfully initialized {@link AudioTrack} instance.
+ * @throws UnsupportedOperationException if the parameters set on the <code>Builder</code>
+ * were incompatible, or if they are not supported by the device,
+ * or if the device was not available.
+ */
+ public @NonNull AudioTrack build() throws UnsupportedOperationException {
+ if (mAttributes == null) {
+ mAttributes = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_MEDIA)
+ .build();
+ }
+ switch (mPerformanceMode) {
+ case PERFORMANCE_MODE_LOW_LATENCY:
+ mAttributes = new AudioAttributes.Builder(mAttributes)
+ .replaceFlags((mAttributes.getAllFlags()
+ | AudioAttributes.FLAG_LOW_LATENCY)
+ & ~AudioAttributes.FLAG_DEEP_BUFFER)
+ .build();
+ break;
+ case PERFORMANCE_MODE_NONE:
+ if (!shouldEnablePowerSaving(mAttributes, mFormat, mBufferSizeInBytes, mMode)) {
+ break; // do not enable deep buffer mode.
+ }
+ // permitted to fall through to enable deep buffer
+ case PERFORMANCE_MODE_POWER_SAVING:
+ mAttributes = new AudioAttributes.Builder(mAttributes)
+ .replaceFlags((mAttributes.getAllFlags()
+ | AudioAttributes.FLAG_DEEP_BUFFER)
+ & ~AudioAttributes.FLAG_LOW_LATENCY)
+ .build();
+ break;
+ }
+
+ if (mFormat == null) {
+ mFormat = new AudioFormat.Builder()
+ .setChannelMask(AudioFormat.CHANNEL_OUT_STEREO)
+ //.setSampleRate(AudioFormat.SAMPLE_RATE_UNSPECIFIED)
+ .setEncoding(AudioFormat.ENCODING_DEFAULT)
+ .build();
+ }
+ try {
+ // If the buffer size is not specified in streaming mode,
+ // use a single frame for the buffer size and let the
+ // native code figure out the minimum buffer size.
+ if (mMode == MODE_STREAM && mBufferSizeInBytes == 0) {
+ mBufferSizeInBytes = mFormat.getChannelCount()
+ * mFormat.getBytesPerSample(mFormat.getEncoding());
+ }
+ final AudioTrack track = new AudioTrack(
+ mAttributes, mFormat, mBufferSizeInBytes, mMode, mSessionId);
+ if (track.getState() == STATE_UNINITIALIZED) {
+ // release is not necessary
+ throw new UnsupportedOperationException("Cannot create AudioTrack");
+ }
+ return track;
+ } catch (IllegalArgumentException e) {
+ throw new UnsupportedOperationException(e.getMessage());
+ }
+ }
+ }
+
+ // mask of all the positional channels supported, however the allowed combinations
+ // are further restricted by the matching left/right rule and CHANNEL_COUNT_MAX
+ private static final int SUPPORTED_OUT_CHANNELS =
+ AudioFormat.CHANNEL_OUT_FRONT_LEFT |
+ AudioFormat.CHANNEL_OUT_FRONT_RIGHT |
+ AudioFormat.CHANNEL_OUT_FRONT_CENTER |
+ AudioFormat.CHANNEL_OUT_LOW_FREQUENCY |
+ AudioFormat.CHANNEL_OUT_BACK_LEFT |
+ AudioFormat.CHANNEL_OUT_BACK_RIGHT |
+ AudioFormat.CHANNEL_OUT_BACK_CENTER |
+ AudioFormat.CHANNEL_OUT_SIDE_LEFT |
+ AudioFormat.CHANNEL_OUT_SIDE_RIGHT;
+
+ // Returns a boolean whether the attributes, format, bufferSizeInBytes, mode allow
+ // power saving to be automatically enabled for an AudioTrack. Returns false if
+ // power saving is already enabled in the attributes parameter.
+ private static boolean shouldEnablePowerSaving(
+ @Nullable AudioAttributes attributes, @Nullable AudioFormat format,
+ int bufferSizeInBytes, int mode) {
+ // If no attributes, OK
+ // otherwise check attributes for USAGE_MEDIA and CONTENT_UNKNOWN, MUSIC, or MOVIE.
+ if (attributes != null &&
+ (attributes.getAllFlags() != 0 // cannot have any special flags
+ || attributes.getUsage() != AudioAttributes.USAGE_MEDIA
+ || (attributes.getContentType() != AudioAttributes.CONTENT_TYPE_UNKNOWN
+ && attributes.getContentType() != AudioAttributes.CONTENT_TYPE_MUSIC
+ && attributes.getContentType() != AudioAttributes.CONTENT_TYPE_MOVIE))) {
+ return false;
+ }
+
+ // Format must be fully specified and be linear pcm
+ if (format == null
+ || format.getSampleRate() == AudioFormat.SAMPLE_RATE_UNSPECIFIED
+ || !AudioFormat.isEncodingLinearPcm(format.getEncoding())
+ || !AudioFormat.isValidEncoding(format.getEncoding())
+ || format.getChannelCount() < 1) {
+ return false;
+ }
+
+ // Mode must be streaming
+ if (mode != MODE_STREAM) {
+ return false;
+ }
+
+ // A buffer size of 0 is always compatible with deep buffer (when called from the Builder)
+ // but for app compatibility we only use deep buffer power saving for large buffer sizes.
+ if (bufferSizeInBytes != 0) {
+ final long BUFFER_TARGET_MODE_STREAM_MS = 100;
+ final int MILLIS_PER_SECOND = 1000;
+ final long bufferTargetSize =
+ BUFFER_TARGET_MODE_STREAM_MS
+ * format.getChannelCount()
+ * format.getBytesPerSample(format.getEncoding())
+ * format.getSampleRate()
+ / MILLIS_PER_SECOND;
+ if (bufferSizeInBytes < bufferTargetSize) {
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ // Convenience method for the constructor's parameter checks.
+ // This is where constructor IllegalArgumentException-s are thrown
+ // postconditions:
+ // mChannelCount is valid
+ // mChannelMask is valid
+ // mAudioFormat is valid
+ // mSampleRate is valid
+ // mDataLoadMode is valid
+ private void audioParamCheck(int sampleRateInHz, int channelConfig, int channelIndexMask,
+ int audioFormat, int mode) {
+ //--------------
+ // sample rate, note these values are subject to change
+ if ((sampleRateInHz < AudioFormat.SAMPLE_RATE_HZ_MIN ||
+ sampleRateInHz > AudioFormat.SAMPLE_RATE_HZ_MAX) &&
+ sampleRateInHz != AudioFormat.SAMPLE_RATE_UNSPECIFIED) {
+ throw new IllegalArgumentException(sampleRateInHz
+ + "Hz is not a supported sample rate.");
+ }
+ mSampleRate = sampleRateInHz;
+
+ // IEC61937 is based on stereo. We could coerce it to stereo.
+ // But the application needs to know the stream is stereo so that
+ // it is encoded and played correctly. So better to just reject it.
+ if (audioFormat == AudioFormat.ENCODING_IEC61937
+ && channelConfig != AudioFormat.CHANNEL_OUT_STEREO) {
+ throw new IllegalArgumentException(
+ "ENCODING_IEC61937 must be configured as CHANNEL_OUT_STEREO");
+ }
+
+ //--------------
+ // channel config
+ mChannelConfiguration = channelConfig;
+
+ switch (channelConfig) {
+ case AudioFormat.CHANNEL_OUT_DEFAULT: //AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
+ case AudioFormat.CHANNEL_OUT_MONO:
+ case AudioFormat.CHANNEL_CONFIGURATION_MONO:
+ mChannelCount = 1;
+ mChannelMask = AudioFormat.CHANNEL_OUT_MONO;
+ break;
+ case AudioFormat.CHANNEL_OUT_STEREO:
+ case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
+ mChannelCount = 2;
+ mChannelMask = AudioFormat.CHANNEL_OUT_STEREO;
+ break;
+ default:
+ if (channelConfig == AudioFormat.CHANNEL_INVALID && channelIndexMask != 0) {
+ mChannelCount = 0;
+ break; // channel index configuration only
+ }
+ if (!isMultichannelConfigSupported(channelConfig)) {
+ // input channel configuration features unsupported channels
+ throw new IllegalArgumentException("Unsupported channel configuration.");
+ }
+ mChannelMask = channelConfig;
+ mChannelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
+ }
+ // check the channel index configuration (if present)
+ mChannelIndexMask = channelIndexMask;
+ if (mChannelIndexMask != 0) {
+ // restrictive: indexMask could allow up to AUDIO_CHANNEL_BITS_LOG2
+ final int indexMask = (1 << CHANNEL_COUNT_MAX) - 1;
+ if ((channelIndexMask & ~indexMask) != 0) {
+ throw new IllegalArgumentException("Unsupported channel index configuration "
+ + channelIndexMask);
+ }
+ int channelIndexCount = Integer.bitCount(channelIndexMask);
+ if (mChannelCount == 0) {
+ mChannelCount = channelIndexCount;
+ } else if (mChannelCount != channelIndexCount) {
+ throw new IllegalArgumentException("Channel count must match");
+ }
+ }
+
+ //--------------
+ // audio format
+ if (audioFormat == AudioFormat.ENCODING_DEFAULT) {
+ audioFormat = AudioFormat.ENCODING_PCM_16BIT;
+ }
+
+ if (!AudioFormat.isPublicEncoding(audioFormat)) {
+ throw new IllegalArgumentException("Unsupported audio encoding.");
+ }
+ mAudioFormat = audioFormat;
+
+ //--------------
+ // audio load mode
+ if (((mode != MODE_STREAM) && (mode != MODE_STATIC)) ||
+ ((mode != MODE_STREAM) && !AudioFormat.isEncodingLinearPcm(mAudioFormat))) {
+ throw new IllegalArgumentException("Invalid mode.");
+ }
+ mDataLoadMode = mode;
+ }
+
+ /**
+ * Convenience method to check that the channel configuration (a.k.a channel mask) is supported
+ * @param channelConfig the mask to validate
+ * @return false if the AudioTrack can't be used with such a mask
+ */
+ private static boolean isMultichannelConfigSupported(int channelConfig) {
+ // check for unsupported channels
+ if ((channelConfig & SUPPORTED_OUT_CHANNELS) != channelConfig) {
+ loge("Channel configuration features unsupported channels");
+ return false;
+ }
+ final int channelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
+ if (channelCount > CHANNEL_COUNT_MAX) {
+ loge("Channel configuration contains too many channels " +
+ channelCount + ">" + CHANNEL_COUNT_MAX);
+ return false;
+ }
+ // check for unsupported multichannel combinations:
+ // - FL/FR must be present
+ // - L/R channels must be paired (e.g. no single L channel)
+ final int frontPair =
+ AudioFormat.CHANNEL_OUT_FRONT_LEFT | AudioFormat.CHANNEL_OUT_FRONT_RIGHT;
+ if ((channelConfig & frontPair) != frontPair) {
+ loge("Front channels must be present in multichannel configurations");
+ return false;
+ }
+ final int backPair =
+ AudioFormat.CHANNEL_OUT_BACK_LEFT | AudioFormat.CHANNEL_OUT_BACK_RIGHT;
+ if ((channelConfig & backPair) != 0) {
+ if ((channelConfig & backPair) != backPair) {
+ loge("Rear channels can't be used independently");
+ return false;
+ }
+ }
+ final int sidePair =
+ AudioFormat.CHANNEL_OUT_SIDE_LEFT | AudioFormat.CHANNEL_OUT_SIDE_RIGHT;
+ if ((channelConfig & sidePair) != 0
+ && (channelConfig & sidePair) != sidePair) {
+ loge("Side channels can't be used independently");
+ return false;
+ }
+ return true;
+ }
+
+
+ // Convenience method for the constructor's audio buffer size check.
+ // preconditions:
+ // mChannelCount is valid
+ // mAudioFormat is valid
+ // postcondition:
+ // mNativeBufferSizeInBytes is valid (multiple of frame size, positive)
+ private void audioBuffSizeCheck(int audioBufferSize) {
+ // NB: this section is only valid with PCM or IEC61937 data.
+ // To update when supporting compressed formats
+ int frameSizeInBytes;
+ if (AudioFormat.isEncodingLinearFrames(mAudioFormat)) {
+ frameSizeInBytes = mChannelCount * AudioFormat.getBytesPerSample(mAudioFormat);
+ } else {
+ frameSizeInBytes = 1;
+ }
+ if ((audioBufferSize % frameSizeInBytes != 0) || (audioBufferSize < 1)) {
+ throw new IllegalArgumentException("Invalid audio buffer size.");
+ }
+
+ mNativeBufferSizeInBytes = audioBufferSize;
+ mNativeBufferSizeInFrames = audioBufferSize / frameSizeInBytes;
+ }
+
+
+ /**
+ * Releases the native AudioTrack resources.
+ */
+ public void release() {
+ // even though native_release() stops the native AudioTrack, we need to stop
+ // AudioTrack subclasses too.
+ try {
+ stop();
+ } catch(IllegalStateException ise) {
+ // don't raise an exception, we're releasing the resources.
+ }
+ baseRelease();
+ native_release();
+ mState = STATE_UNINITIALIZED;
+ }
+
+ @Override
+ protected void finalize() {
+ baseRelease();
+ native_finalize();
+ }
+
+ //--------------------------------------------------------------------------
+ // Getters
+ //--------------------
+ /**
+ * Returns the minimum gain value, which is the constant 0.0.
+ * Gain values less than 0.0 will be clamped to 0.0.
+ * <p>The word "volume" in the API name is historical; this is actually a linear gain.
+ * @return the minimum value, which is the constant 0.0.
+ */
+ static public float getMinVolume() {
+ return GAIN_MIN;
+ }
+
+ /**
+ * Returns the maximum gain value, which is greater than or equal to 1.0.
+ * Gain values greater than the maximum will be clamped to the maximum.
+ * <p>The word "volume" in the API name is historical; this is actually a gain.
+ * expressed as a linear multiplier on sample values, where a maximum value of 1.0
+ * corresponds to a gain of 0 dB (sample values left unmodified).
+ * @return the maximum value, which is greater than or equal to 1.0.
+ */
+ static public float getMaxVolume() {
+ return GAIN_MAX;
+ }
+
+ /**
+ * Returns the configured audio source sample rate in Hz.
+ * The initial source sample rate depends on the constructor parameters,
+ * but the source sample rate may change if {@link #setPlaybackRate(int)} is called.
+ * If the constructor had a specific sample rate, then the initial sink sample rate is that
+ * value.
+ * If the constructor had {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED},
+ * then the initial sink sample rate is a route-dependent default value based on the source [sic].
+ */
+ public int getSampleRate() {
+ return mSampleRate;
+ }
+
+ /**
+ * Returns the current playback sample rate rate in Hz.
+ */
+ public int getPlaybackRate() {
+ return native_get_playback_rate();
+ }
+
+ /**
+ * Returns the current playback parameters.
+ * See {@link #setPlaybackParams(PlaybackParams)} to set playback parameters
+ * @return current {@link PlaybackParams}.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public @NonNull PlaybackParams getPlaybackParams() {
+ return native_get_playback_params();
+ }
+
+ /**
+ * Returns the configured audio data encoding. See {@link AudioFormat#ENCODING_PCM_8BIT},
+ * {@link AudioFormat#ENCODING_PCM_16BIT}, and {@link AudioFormat#ENCODING_PCM_FLOAT}.
+ */
+ public int getAudioFormat() {
+ return mAudioFormat;
+ }
+
+ /**
+ * Returns the volume stream type of this AudioTrack.
+ * Compare the result against {@link AudioManager#STREAM_VOICE_CALL},
+ * {@link AudioManager#STREAM_SYSTEM}, {@link AudioManager#STREAM_RING},
+ * {@link AudioManager#STREAM_MUSIC}, {@link AudioManager#STREAM_ALARM},
+ * {@link AudioManager#STREAM_NOTIFICATION}, {@link AudioManager#STREAM_DTMF} or
+ * {@link AudioManager#STREAM_ACCESSIBILITY}.
+ */
+ public int getStreamType() {
+ return mStreamType;
+ }
+
+ /**
+ * Returns the configured channel position mask.
+ * <p> For example, refer to {@link AudioFormat#CHANNEL_OUT_MONO},
+ * {@link AudioFormat#CHANNEL_OUT_STEREO}, {@link AudioFormat#CHANNEL_OUT_5POINT1}.
+ * This method may return {@link AudioFormat#CHANNEL_INVALID} if
+ * a channel index mask was used. Consider
+ * {@link #getFormat()} instead, to obtain an {@link AudioFormat},
+ * which contains both the channel position mask and the channel index mask.
+ */
+ public int getChannelConfiguration() {
+ return mChannelConfiguration;
+ }
+
+ /**
+ * Returns the configured <code>AudioTrack</code> format.
+ * @return an {@link AudioFormat} containing the
+ * <code>AudioTrack</code> parameters at the time of configuration.
+ */
+ public @NonNull AudioFormat getFormat() {
+ AudioFormat.Builder builder = new AudioFormat.Builder()
+ .setSampleRate(mSampleRate)
+ .setEncoding(mAudioFormat);
+ if (mChannelConfiguration != AudioFormat.CHANNEL_INVALID) {
+ builder.setChannelMask(mChannelConfiguration);
+ }
+ if (mChannelIndexMask != AudioFormat.CHANNEL_INVALID /* 0 */) {
+ builder.setChannelIndexMask(mChannelIndexMask);
+ }
+ return builder.build();
+ }
+
+ /**
+ * Returns the configured number of channels.
+ */
+ public int getChannelCount() {
+ return mChannelCount;
+ }
+
+ /**
+ * Returns the state of the AudioTrack instance. This is useful after the
+ * AudioTrack instance has been created to check if it was initialized
+ * properly. This ensures that the appropriate resources have been acquired.
+ * @see #STATE_UNINITIALIZED
+ * @see #STATE_INITIALIZED
+ * @see #STATE_NO_STATIC_DATA
+ */
+ public int getState() {
+ return mState;
+ }
+
+ /**
+ * Returns the playback state of the AudioTrack instance.
+ * @see #PLAYSTATE_STOPPED
+ * @see #PLAYSTATE_PAUSED
+ * @see #PLAYSTATE_PLAYING
+ */
+ public int getPlayState() {
+ synchronized (mPlayStateLock) {
+ return mPlayState;
+ }
+ }
+
+
+ /**
+ * Returns the effective size of the <code>AudioTrack</code> buffer
+ * that the application writes to.
+ * <p> This will be less than or equal to the result of
+ * {@link #getBufferCapacityInFrames()}.
+ * It will be equal if {@link #setBufferSizeInFrames(int)} has never been called.
+ * <p> If the track is subsequently routed to a different output sink, the buffer
+ * size and capacity may enlarge to accommodate.
+ * <p> If the <code>AudioTrack</code> encoding indicates compressed data,
+ * e.g. {@link AudioFormat#ENCODING_AC3}, then the frame count returned is
+ * the size of the <code>AudioTrack</code> buffer in bytes.
+ * <p> See also {@link AudioManager#getProperty(String)} for key
+ * {@link AudioManager#PROPERTY_OUTPUT_FRAMES_PER_BUFFER}.
+ * @return current size in frames of the <code>AudioTrack</code> buffer.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public int getBufferSizeInFrames() {
+ return native_get_buffer_size_frames();
+ }
+
+ /**
+ * Limits the effective size of the <code>AudioTrack</code> buffer
+ * that the application writes to.
+ * <p> A write to this AudioTrack will not fill the buffer beyond this limit.
+ * If a blocking write is used then the write will block until the data
+ * can fit within this limit.
+ * <p>Changing this limit modifies the latency associated with
+ * the buffer for this track. A smaller size will give lower latency
+ * but there may be more glitches due to buffer underruns.
+ * <p>The actual size used may not be equal to this requested size.
+ * It will be limited to a valid range with a maximum of
+ * {@link #getBufferCapacityInFrames()}.
+ * It may also be adjusted slightly for internal reasons.
+ * If bufferSizeInFrames is less than zero then {@link #ERROR_BAD_VALUE}
+ * will be returned.
+ * <p>This method is only supported for PCM audio.
+ * It is not supported for compressed audio tracks.
+ *
+ * @param bufferSizeInFrames requested buffer size in frames
+ * @return the actual buffer size in frames or an error code,
+ * {@link #ERROR_BAD_VALUE}, {@link #ERROR_INVALID_OPERATION}
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public int setBufferSizeInFrames(int bufferSizeInFrames) {
+ if (mDataLoadMode == MODE_STATIC || mState == STATE_UNINITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ if (bufferSizeInFrames < 0) {
+ return ERROR_BAD_VALUE;
+ }
+ return native_set_buffer_size_frames(bufferSizeInFrames);
+ }
+
+ /**
+ * Returns the maximum size of the <code>AudioTrack</code> buffer in frames.
+ * <p> If the track's creation mode is {@link #MODE_STATIC},
+ * it is equal to the specified bufferSizeInBytes on construction, converted to frame units.
+ * A static track's frame count will not change.
+ * <p> If the track's creation mode is {@link #MODE_STREAM},
+ * it is greater than or equal to the specified bufferSizeInBytes converted to frame units.
+ * For streaming tracks, this value may be rounded up to a larger value if needed by
+ * the target output sink, and
+ * if the track is subsequently routed to a different output sink, the
+ * frame count may enlarge to accommodate.
+ * <p> If the <code>AudioTrack</code> encoding indicates compressed data,
+ * e.g. {@link AudioFormat#ENCODING_AC3}, then the frame count returned is
+ * the size of the <code>AudioTrack</code> buffer in bytes.
+ * <p> See also {@link AudioManager#getProperty(String)} for key
+ * {@link AudioManager#PROPERTY_OUTPUT_FRAMES_PER_BUFFER}.
+ * @return maximum size in frames of the <code>AudioTrack</code> buffer.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public int getBufferCapacityInFrames() {
+ return native_get_buffer_capacity_frames();
+ }
+
+ /**
+ * Returns the frame count of the native <code>AudioTrack</code> buffer.
+ * @return current size in frames of the <code>AudioTrack</code> buffer.
+ * @throws IllegalStateException
+ * @deprecated Use the identical public method {@link #getBufferSizeInFrames()} instead.
+ */
+ @Deprecated
+ protected int getNativeFrameCount() {
+ return native_get_buffer_capacity_frames();
+ }
+
+ /**
+ * Returns marker position expressed in frames.
+ * @return marker position in wrapping frame units similar to {@link #getPlaybackHeadPosition},
+ * or zero if marker is disabled.
+ */
+ public int getNotificationMarkerPosition() {
+ return native_get_marker_pos();
+ }
+
+ /**
+ * Returns the notification update period expressed in frames.
+ * Zero means that no position update notifications are being delivered.
+ */
+ public int getPositionNotificationPeriod() {
+ return native_get_pos_update_period();
+ }
+
+ /**
+ * Returns the playback head position expressed in frames.
+ * Though the "int" type is signed 32-bits, the value should be reinterpreted as if it is
+ * unsigned 32-bits. That is, the next position after 0x7FFFFFFF is (int) 0x80000000.
+ * This is a continuously advancing counter. It will wrap (overflow) periodically,
+ * for example approximately once every 27:03:11 hours:minutes:seconds at 44.1 kHz.
+ * It is reset to zero by {@link #flush()}, {@link #reloadStaticData()}, and {@link #stop()}.
+ * If the track's creation mode is {@link #MODE_STATIC}, the return value indicates
+ * the total number of frames played since reset,
+ * <i>not</i> the current offset within the buffer.
+ */
+ public int getPlaybackHeadPosition() {
+ return native_get_position();
+ }
+
+ /**
+ * Returns this track's estimated latency in milliseconds. This includes the latency due
+ * to AudioTrack buffer size, AudioMixer (if any) and audio hardware driver.
+ *
+ * DO NOT UNHIDE. The existing approach for doing A/V sync has too many problems. We need
+ * a better solution.
+ * @hide
+ */
+ public int getLatency() {
+ return native_get_latency();
+ }
+
+ /**
+ * Returns the number of underrun occurrences in the application-level write buffer
+ * since the AudioTrack was created.
+ * An underrun occurs if the application does not write audio
+ * data quickly enough, causing the buffer to underflow
+ * and a potential audio glitch or pop.
+ * <p>
+ * Underruns are less likely when buffer sizes are large.
+ * It may be possible to eliminate underruns by recreating the AudioTrack with
+ * a larger buffer.
+ * Or by using {@link #setBufferSizeInFrames(int)} to dynamically increase the
+ * effective size of the buffer.
+ */
+ public int getUnderrunCount() {
+ return native_get_underrun_count();
+ }
+
+ /**
+ * Returns the current performance mode of the {@link AudioTrack}.
+ *
+ * @return one of {@link AudioTrack#PERFORMANCE_MODE_NONE},
+ * {@link AudioTrack#PERFORMANCE_MODE_LOW_LATENCY},
+ * or {@link AudioTrack#PERFORMANCE_MODE_POWER_SAVING}.
+ * Use {@link AudioTrack.Builder#setPerformanceMode}
+ * in the {@link AudioTrack.Builder} to enable a performance mode.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public @PerformanceMode int getPerformanceMode() {
+ final int flags = native_get_flags();
+ if ((flags & AUDIO_OUTPUT_FLAG_FAST) != 0) {
+ return PERFORMANCE_MODE_LOW_LATENCY;
+ } else if ((flags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) != 0) {
+ return PERFORMANCE_MODE_POWER_SAVING;
+ } else {
+ return PERFORMANCE_MODE_NONE;
+ }
+ }
+
+ /**
+ * Returns the output sample rate in Hz for the specified stream type.
+ */
+ static public int getNativeOutputSampleRate(int streamType) {
+ return native_get_output_sample_rate(streamType);
+ }
+
+ /**
+ * Returns the estimated minimum buffer size required for an AudioTrack
+ * object to be created in the {@link #MODE_STREAM} mode.
+ * The size is an estimate because it does not consider either the route or the sink,
+ * since neither is known yet. Note that this size doesn't
+ * guarantee a smooth playback under load, and higher values should be chosen according to
+ * the expected frequency at which the buffer will be refilled with additional data to play.
+ * For example, if you intend to dynamically set the source sample rate of an AudioTrack
+ * to a higher value than the initial source sample rate, be sure to configure the buffer size
+ * based on the highest planned sample rate.
+ * @param sampleRateInHz the source sample rate expressed in Hz.
+ * {@link AudioFormat#SAMPLE_RATE_UNSPECIFIED} is not permitted.
+ * @param channelConfig describes the configuration of the audio channels.
+ * See {@link AudioFormat#CHANNEL_OUT_MONO} and
+ * {@link AudioFormat#CHANNEL_OUT_STEREO}
+ * @param audioFormat the format in which the audio data is represented.
+ * See {@link AudioFormat#ENCODING_PCM_16BIT} and
+ * {@link AudioFormat#ENCODING_PCM_8BIT},
+ * and {@link AudioFormat#ENCODING_PCM_FLOAT}.
+ * @return {@link #ERROR_BAD_VALUE} if an invalid parameter was passed,
+ * or {@link #ERROR} if unable to query for output properties,
+ * or the minimum buffer size expressed in bytes.
+ */
+ static public int getMinBufferSize(int sampleRateInHz, int channelConfig, int audioFormat) {
+ int channelCount = 0;
+ switch(channelConfig) {
+ case AudioFormat.CHANNEL_OUT_MONO:
+ case AudioFormat.CHANNEL_CONFIGURATION_MONO:
+ channelCount = 1;
+ break;
+ case AudioFormat.CHANNEL_OUT_STEREO:
+ case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
+ channelCount = 2;
+ break;
+ default:
+ if (!isMultichannelConfigSupported(channelConfig)) {
+ loge("getMinBufferSize(): Invalid channel configuration.");
+ return ERROR_BAD_VALUE;
+ } else {
+ channelCount = AudioFormat.channelCountFromOutChannelMask(channelConfig);
+ }
+ }
+
+ if (!AudioFormat.isPublicEncoding(audioFormat)) {
+ loge("getMinBufferSize(): Invalid audio format.");
+ return ERROR_BAD_VALUE;
+ }
+
+ // sample rate, note these values are subject to change
+ // Note: AudioFormat.SAMPLE_RATE_UNSPECIFIED is not allowed
+ if ( (sampleRateInHz < AudioFormat.SAMPLE_RATE_HZ_MIN) ||
+ (sampleRateInHz > AudioFormat.SAMPLE_RATE_HZ_MAX) ) {
+ loge("getMinBufferSize(): " + sampleRateInHz + " Hz is not a supported sample rate.");
+ return ERROR_BAD_VALUE;
+ }
+
+ int size = native_get_min_buff_size(sampleRateInHz, channelCount, audioFormat);
+ if (size <= 0) {
+ loge("getMinBufferSize(): error querying hardware");
+ return ERROR;
+ }
+ else {
+ return size;
+ }
+ }
+
+ /**
+ * Returns the audio session ID.
+ *
+ * @return the ID of the audio session this AudioTrack belongs to.
+ */
+ public int getAudioSessionId() {
+ return mSessionId;
+ }
+
+ /**
+ * Poll for a timestamp on demand.
+ * <p>
+ * If you need to track timestamps during initial warmup or after a routing or mode change,
+ * you should request a new timestamp periodically until the reported timestamps
+ * show that the frame position is advancing, or until it becomes clear that
+ * timestamps are unavailable for this route.
+ * <p>
+ * After the clock is advancing at a stable rate,
+ * query for a new timestamp approximately once every 10 seconds to once per minute.
+ * Calling this method more often is inefficient.
+ * It is also counter-productive to call this method more often than recommended,
+ * because the short-term differences between successive timestamp reports are not meaningful.
+ * If you need a high-resolution mapping between frame position and presentation time,
+ * consider implementing that at application level, based on low-resolution timestamps.
+ * <p>
+ * The audio data at the returned position may either already have been
+ * presented, or may have not yet been presented but is committed to be presented.
+ * It is not possible to request the time corresponding to a particular position,
+ * or to request the (fractional) position corresponding to a particular time.
+ * If you need such features, consider implementing them at application level.
+ *
+ * @param timestamp a reference to a non-null AudioTimestamp instance allocated
+ * and owned by caller.
+ * @return true if a timestamp is available, or false if no timestamp is available.
+ * If a timestamp if available,
+ * the AudioTimestamp instance is filled in with a position in frame units, together
+ * with the estimated time when that frame was presented or is committed to
+ * be presented.
+ * In the case that no timestamp is available, any supplied instance is left unaltered.
+ * A timestamp may be temporarily unavailable while the audio clock is stabilizing,
+ * or during and immediately after a route change.
+ * A timestamp is permanently unavailable for a given route if the route does not support
+ * timestamps. In this case, the approximate frame position can be obtained
+ * using {@link #getPlaybackHeadPosition}.
+ * However, it may be useful to continue to query for
+ * timestamps occasionally, to recover after a route change.
+ */
+ // Add this text when the "on new timestamp" API is added:
+ // Use if you need to get the most recent timestamp outside of the event callback handler.
+ public boolean getTimestamp(AudioTimestamp timestamp)
+ {
+ if (timestamp == null) {
+ throw new IllegalArgumentException();
+ }
+ // It's unfortunate, but we have to either create garbage every time or use synchronized
+ long[] longArray = new long[2];
+ int ret = native_get_timestamp(longArray);
+ if (ret != SUCCESS) {
+ return false;
+ }
+ timestamp.framePosition = longArray[0];
+ timestamp.nanoTime = longArray[1];
+ return true;
+ }
+
+ /**
+ * Poll for a timestamp on demand.
+ * <p>
+ * Same as {@link #getTimestamp(AudioTimestamp)} but with a more useful return code.
+ *
+ * @param timestamp a reference to a non-null AudioTimestamp instance allocated
+ * and owned by caller.
+ * @return {@link #SUCCESS} if a timestamp is available
+ * {@link #ERROR_WOULD_BLOCK} if called in STOPPED or FLUSHED state, or if called
+ * immediately after start/ACTIVE, when the number of frames consumed is less than the
+ * overall hardware latency to physical output. In WOULD_BLOCK cases, one might poll
+ * again, or use {@link #getPlaybackHeadPosition}, or use 0 position and current time
+ * for the timestamp.
+ * {@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated.
+ * {@link #ERROR_INVALID_OPERATION} if current route does not support
+ * timestamps. In this case, the approximate frame position can be obtained
+ * using {@link #getPlaybackHeadPosition}.
+ *
+ * The AudioTimestamp instance is filled in with a position in frame units, together
+ * with the estimated time when that frame was presented or is committed to
+ * be presented.
+ * @hide
+ */
+ // Add this text when the "on new timestamp" API is added:
+ // Use if you need to get the most recent timestamp outside of the event callback handler.
+ public int getTimestampWithStatus(AudioTimestamp timestamp)
+ {
+ if (timestamp == null) {
+ throw new IllegalArgumentException();
+ }
+ // It's unfortunate, but we have to either create garbage every time or use synchronized
+ long[] longArray = new long[2];
+ int ret = native_get_timestamp(longArray);
+ timestamp.framePosition = longArray[0];
+ timestamp.nanoTime = longArray[1];
+ return ret;
+ }
+
+ //--------------------------------------------------------------------------
+ // Initialization / configuration
+ //--------------------
+ /**
+ * Sets the listener the AudioTrack notifies when a previously set marker is reached or
+ * for each periodic playback head position update.
+ * Notifications will be received in the same thread as the one in which the AudioTrack
+ * instance was created.
+ * @param listener
+ */
+ public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener) {
+ setPlaybackPositionUpdateListener(listener, null);
+ }
+
+ /**
+ * Sets the listener the AudioTrack notifies when a previously set marker is reached or
+ * for each periodic playback head position update.
+ * Use this method to receive AudioTrack events in the Handler associated with another
+ * thread than the one in which you created the AudioTrack instance.
+ * @param listener
+ * @param handler the Handler that will receive the event notification messages.
+ */
+ public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener listener,
+ Handler handler) {
+ if (listener != null) {
+ mEventHandlerDelegate = new NativePositionEventHandlerDelegate(this, listener, handler);
+ } else {
+ mEventHandlerDelegate = null;
+ }
+ }
+
+
+ private static float clampGainOrLevel(float gainOrLevel) {
+ if (Float.isNaN(gainOrLevel)) {
+ throw new IllegalArgumentException();
+ }
+ if (gainOrLevel < GAIN_MIN) {
+ gainOrLevel = GAIN_MIN;
+ } else if (gainOrLevel > GAIN_MAX) {
+ gainOrLevel = GAIN_MAX;
+ }
+ return gainOrLevel;
+ }
+
+
+ /**
+ * Sets the specified left and right output gain values on the AudioTrack.
+ * <p>Gain values are clamped to the closed interval [0.0, max] where
+ * max is the value of {@link #getMaxVolume}.
+ * A value of 0.0 results in zero gain (silence), and
+ * a value of 1.0 means unity gain (signal unchanged).
+ * The default value is 1.0 meaning unity gain.
+ * <p>The word "volume" in the API name is historical; this is actually a linear gain.
+ * @param leftGain output gain for the left channel.
+ * @param rightGain output gain for the right channel
+ * @return error code or success, see {@link #SUCCESS},
+ * {@link #ERROR_INVALID_OPERATION}
+ * @deprecated Applications should use {@link #setVolume} instead, as it
+ * more gracefully scales down to mono, and up to multi-channel content beyond stereo.
+ */
+ @Deprecated
+ public int setStereoVolume(float leftGain, float rightGain) {
+ if (mState == STATE_UNINITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+
+ baseSetVolume(leftGain, rightGain);
+ return SUCCESS;
+ }
+
+ @Override
+ void playerSetVolume(boolean muting, float leftVolume, float rightVolume) {
+ leftVolume = clampGainOrLevel(muting ? 0.0f : leftVolume);
+ rightVolume = clampGainOrLevel(muting ? 0.0f : rightVolume);
+
+ native_setVolume(leftVolume, rightVolume);
+ }
+
+
+ /**
+ * Sets the specified output gain value on all channels of this track.
+ * <p>Gain values are clamped to the closed interval [0.0, max] where
+ * max is the value of {@link #getMaxVolume}.
+ * A value of 0.0 results in zero gain (silence), and
+ * a value of 1.0 means unity gain (signal unchanged).
+ * The default value is 1.0 meaning unity gain.
+ * <p>This API is preferred over {@link #setStereoVolume}, as it
+ * more gracefully scales down to mono, and up to multi-channel content beyond stereo.
+ * <p>The word "volume" in the API name is historical; this is actually a linear gain.
+ * @param gain output gain for all channels.
+ * @return error code or success, see {@link #SUCCESS},
+ * {@link #ERROR_INVALID_OPERATION}
+ */
+ public int setVolume(float gain) {
+ return setStereoVolume(gain, gain);
+ }
+
+ @Override
+ /* package */ int playerApplyVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration,
+ @NonNull VolumeShaper.Operation operation) {
+ return native_applyVolumeShaper(configuration, operation);
+ }
+
+ @Override
+ /* package */ @Nullable VolumeShaper.State playerGetVolumeShaperState(int id) {
+ return native_getVolumeShaperState(id);
+ }
+
+ @Override
+ public @NonNull VolumeShaper createVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration) {
+ return new VolumeShaper(configuration, this);
+ }
+
+ /**
+ * Sets the playback sample rate for this track. This sets the sampling rate at which
+ * the audio data will be consumed and played back
+ * (as set by the sampleRateInHz parameter in the
+ * {@link #AudioTrack(int, int, int, int, int, int)} constructor),
+ * not the original sampling rate of the
+ * content. For example, setting it to half the sample rate of the content will cause the
+ * playback to last twice as long, but will also result in a pitch shift down by one octave.
+ * The valid sample rate range is from 1 Hz to twice the value returned by
+ * {@link #getNativeOutputSampleRate(int)}.
+ * Use {@link #setPlaybackParams(PlaybackParams)} for speed control.
+ * <p> This method may also be used to repurpose an existing <code>AudioTrack</code>
+ * for playback of content of differing sample rate,
+ * but with identical encoding and channel mask.
+ * @param sampleRateInHz the sample rate expressed in Hz
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
+ * {@link #ERROR_INVALID_OPERATION}
+ */
+ public int setPlaybackRate(int sampleRateInHz) {
+ if (mState != STATE_INITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ if (sampleRateInHz <= 0) {
+ return ERROR_BAD_VALUE;
+ }
+ return native_set_playback_rate(sampleRateInHz);
+ }
+
+
+ /**
+ * Sets the playback parameters.
+ * This method returns failure if it cannot apply the playback parameters.
+ * One possible cause is that the parameters for speed or pitch are out of range.
+ * Another possible cause is that the <code>AudioTrack</code> is streaming
+ * (see {@link #MODE_STREAM}) and the
+ * buffer size is too small. For speeds greater than 1.0f, the <code>AudioTrack</code> buffer
+ * on configuration must be larger than the speed multiplied by the minimum size
+ * {@link #getMinBufferSize(int, int, int)}) to allow proper playback.
+ * @param params see {@link PlaybackParams}. In particular,
+ * speed, pitch, and audio mode should be set.
+ * @throws IllegalArgumentException if the parameters are invalid or not accepted.
+ * @throws IllegalStateException if track is not initialized.
+ */
+ public void setPlaybackParams(@NonNull PlaybackParams params) {
+ if (params == null) {
+ throw new IllegalArgumentException("params is null");
+ }
+ native_set_playback_params(params);
+ }
+
+
+ /**
+ * Sets the position of the notification marker. At most one marker can be active.
+ * @param markerInFrames marker position in wrapping frame units similar to
+ * {@link #getPlaybackHeadPosition}, or zero to disable the marker.
+ * To set a marker at a position which would appear as zero due to wraparound,
+ * a workaround is to use a non-zero position near zero, such as -1 or 1.
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
+ * {@link #ERROR_INVALID_OPERATION}
+ */
+ public int setNotificationMarkerPosition(int markerInFrames) {
+ if (mState == STATE_UNINITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ return native_set_marker_pos(markerInFrames);
+ }
+
+
+ /**
+ * Sets the period for the periodic notification event.
+ * @param periodInFrames update period expressed in frames.
+ * Zero period means no position updates. A negative period is not allowed.
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_INVALID_OPERATION}
+ */
+ public int setPositionNotificationPeriod(int periodInFrames) {
+ if (mState == STATE_UNINITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ return native_set_pos_update_period(periodInFrames);
+ }
+
+
+ /**
+ * Sets the playback head position within the static buffer.
+ * The track must be stopped or paused for the position to be changed,
+ * and must use the {@link #MODE_STATIC} mode.
+ * @param positionInFrames playback head position within buffer, expressed in frames.
+ * Zero corresponds to start of buffer.
+ * The position must not be greater than the buffer size in frames, or negative.
+ * Though this method and {@link #getPlaybackHeadPosition()} have similar names,
+ * the position values have different meanings.
+ * <br>
+ * If looping is currently enabled and the new position is greater than or equal to the
+ * loop end marker, the behavior varies by API level:
+ * as of {@link android.os.Build.VERSION_CODES#M},
+ * the looping is first disabled and then the position is set.
+ * For earlier API levels, the behavior is unspecified.
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
+ * {@link #ERROR_INVALID_OPERATION}
+ */
+ public int setPlaybackHeadPosition(int positionInFrames) {
+ if (mDataLoadMode == MODE_STREAM || mState == STATE_UNINITIALIZED ||
+ getPlayState() == PLAYSTATE_PLAYING) {
+ return ERROR_INVALID_OPERATION;
+ }
+ if (!(0 <= positionInFrames && positionInFrames <= mNativeBufferSizeInFrames)) {
+ return ERROR_BAD_VALUE;
+ }
+ return native_set_position(positionInFrames);
+ }
+
+ /**
+ * Sets the loop points and the loop count. The loop can be infinite.
+ * Similarly to setPlaybackHeadPosition,
+ * the track must be stopped or paused for the loop points to be changed,
+ * and must use the {@link #MODE_STATIC} mode.
+ * @param startInFrames loop start marker expressed in frames.
+ * Zero corresponds to start of buffer.
+ * The start marker must not be greater than or equal to the buffer size in frames, or negative.
+ * @param endInFrames loop end marker expressed in frames.
+ * The total buffer size in frames corresponds to end of buffer.
+ * The end marker must not be greater than the buffer size in frames.
+ * For looping, the end marker must not be less than or equal to the start marker,
+ * but to disable looping
+ * it is permitted for start marker, end marker, and loop count to all be 0.
+ * If any input parameters are out of range, this method returns {@link #ERROR_BAD_VALUE}.
+ * If the loop period (endInFrames - startInFrames) is too small for the implementation to
+ * support,
+ * {@link #ERROR_BAD_VALUE} is returned.
+ * The loop range is the interval [startInFrames, endInFrames).
+ * <br>
+ * As of {@link android.os.Build.VERSION_CODES#M}, the position is left unchanged,
+ * unless it is greater than or equal to the loop end marker, in which case
+ * it is forced to the loop start marker.
+ * For earlier API levels, the effect on position is unspecified.
+ * @param loopCount the number of times the loop is looped; must be greater than or equal to -1.
+ * A value of -1 means infinite looping, and 0 disables looping.
+ * A value of positive N means to "loop" (go back) N times. For example,
+ * a value of one means to play the region two times in total.
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
+ * {@link #ERROR_INVALID_OPERATION}
+ */
+ public int setLoopPoints(int startInFrames, int endInFrames, int loopCount) {
+ if (mDataLoadMode == MODE_STREAM || mState == STATE_UNINITIALIZED ||
+ getPlayState() == PLAYSTATE_PLAYING) {
+ return ERROR_INVALID_OPERATION;
+ }
+ if (loopCount == 0) {
+ ; // explicitly allowed as an exception to the loop region range check
+ } else if (!(0 <= startInFrames && startInFrames < mNativeBufferSizeInFrames &&
+ startInFrames < endInFrames && endInFrames <= mNativeBufferSizeInFrames)) {
+ return ERROR_BAD_VALUE;
+ }
+ return native_set_loop(startInFrames, endInFrames, loopCount);
+ }
+
+ /**
+ * Sets the initialization state of the instance. This method was originally intended to be used
+ * in an AudioTrack subclass constructor to set a subclass-specific post-initialization state.
+ * However, subclasses of AudioTrack are no longer recommended, so this method is obsolete.
+ * @param state the state of the AudioTrack instance
+ * @deprecated Only accessible by subclasses, which are not recommended for AudioTrack.
+ */
+ @Deprecated
+ protected void setState(int state) {
+ mState = state;
+ }
+
+
+ //---------------------------------------------------------
+ // Transport control methods
+ //--------------------
+ /**
+ * Starts playing an AudioTrack.
+ * <p>
+ * If track's creation mode is {@link #MODE_STATIC}, you must have called one of
+ * the write methods ({@link #write(byte[], int, int)}, {@link #write(byte[], int, int, int)},
+ * {@link #write(short[], int, int)}, {@link #write(short[], int, int, int)},
+ * {@link #write(float[], int, int, int)}, or {@link #write(ByteBuffer, int, int)}) prior to
+ * play().
+ * <p>
+ * If the mode is {@link #MODE_STREAM}, you can optionally prime the data path prior to
+ * calling play(), by writing up to <code>bufferSizeInBytes</code> (from constructor).
+ * If you don't call write() first, or if you call write() but with an insufficient amount of
+ * data, then the track will be in underrun state at play(). In this case,
+ * playback will not actually start playing until the data path is filled to a
+ * device-specific minimum level. This requirement for the path to be filled
+ * to a minimum level is also true when resuming audio playback after calling stop().
+ * Similarly the buffer will need to be filled up again after
+ * the track underruns due to failure to call write() in a timely manner with sufficient data.
+ * For portability, an application should prime the data path to the maximum allowed
+ * by writing data until the write() method returns a short transfer count.
+ * This allows play() to start immediately, and reduces the chance of underrun.
+ *
+ * @throws IllegalStateException if the track isn't properly initialized
+ */
+ public void play()
+ throws IllegalStateException {
+ if (mState != STATE_INITIALIZED) {
+ throw new IllegalStateException("play() called on uninitialized AudioTrack.");
+ }
+ //FIXME use lambda to pass startImpl to superclass
+ final int delay = getStartDelayMs();
+ if (delay == 0) {
+ startImpl();
+ } else {
+ new Thread() {
+ public void run() {
+ try {
+ Thread.sleep(delay);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ baseSetStartDelayMs(0);
+ try {
+ startImpl();
+ } catch (IllegalStateException e) {
+ // fail silently for a state exception when it is happening after
+ // a delayed start, as the player state could have changed between the
+ // call to start() and the execution of startImpl()
+ }
+ }
+ }.start();
+ }
+ }
+
+ private void startImpl() {
+ synchronized(mPlayStateLock) {
+ baseStart();
+ native_start();
+ mPlayState = PLAYSTATE_PLAYING;
+ }
+ }
+
+ /**
+ * Stops playing the audio data.
+ * When used on an instance created in {@link #MODE_STREAM} mode, audio will stop playing
+ * after the last buffer that was written has been played. For an immediate stop, use
+ * {@link #pause()}, followed by {@link #flush()} to discard audio data that hasn't been played
+ * back yet.
+ * @throws IllegalStateException
+ */
+ public void stop()
+ throws IllegalStateException {
+ if (mState != STATE_INITIALIZED) {
+ throw new IllegalStateException("stop() called on uninitialized AudioTrack.");
+ }
+
+ // stop playing
+ synchronized(mPlayStateLock) {
+ native_stop();
+ baseStop();
+ mPlayState = PLAYSTATE_STOPPED;
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
+ }
+ }
+
+ /**
+ * Pauses the playback of the audio data. Data that has not been played
+ * back will not be discarded. Subsequent calls to {@link #play} will play
+ * this data back. See {@link #flush()} to discard this data.
+ *
+ * @throws IllegalStateException
+ */
+ public void pause()
+ throws IllegalStateException {
+ if (mState != STATE_INITIALIZED) {
+ throw new IllegalStateException("pause() called on uninitialized AudioTrack.");
+ }
+
+ // pause playback
+ synchronized(mPlayStateLock) {
+ native_pause();
+ basePause();
+ mPlayState = PLAYSTATE_PAUSED;
+ }
+ }
+
+
+ //---------------------------------------------------------
+ // Audio data supply
+ //--------------------
+
+ /**
+ * Flushes the audio data currently queued for playback. Any data that has
+ * been written but not yet presented will be discarded. No-op if not stopped or paused,
+ * or if the track's creation mode is not {@link #MODE_STREAM}.
+ * <BR> Note that although data written but not yet presented is discarded, there is no
+ * guarantee that all of the buffer space formerly used by that data
+ * is available for a subsequent write.
+ * For example, a call to {@link #write(byte[], int, int)} with <code>sizeInBytes</code>
+ * less than or equal to the total buffer size
+ * may return a short actual transfer count.
+ */
+ public void flush() {
+ if (mState == STATE_INITIALIZED) {
+ // flush the data in native layer
+ native_flush();
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
+ }
+
+ }
+
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * The format can be {@link AudioFormat#ENCODING_PCM_16BIT}, but this is deprecated.
+ * <p>
+ * In streaming mode, the write will normally block until all the data has been enqueued for
+ * playback, and will return a full transfer count. However, if the track is stopped or paused
+ * on entry, or another thread interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
+ * @param audioData the array that holds the data to play.
+ * @param offsetInBytes the offset expressed in bytes in audioData where the data to write
+ * starts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInBytes the number of bytes to write in audioData after the offset.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @return zero or the positive number of bytes that were written, or one of the following
+ * error codes. The number of bytes will be a multiple of the frame size in bytes
+ * not to exceed sizeInBytes.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the track isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next write()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ * This is equivalent to {@link #write(byte[], int, int, int)} with <code>writeMode</code>
+ * set to {@link #WRITE_BLOCKING}.
+ */
+ public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes) {
+ return write(audioData, offsetInBytes, sizeInBytes, WRITE_BLOCKING);
+ }
+
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_8BIT} to correspond to the data in the array.
+ * The format can be {@link AudioFormat#ENCODING_PCM_16BIT}, but this is deprecated.
+ * <p>
+ * In streaming mode, the blocking behavior depends on the write mode. If the write mode is
+ * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
+ * for playback, and will return a full transfer count. However, if the write mode is
+ * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
+ * interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0,
+ * and the write mode is ignored.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
+ * @param audioData the array that holds the data to play.
+ * @param offsetInBytes the offset expressed in bytes in audioData where the data to write
+ * starts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInBytes the number of bytes to write in audioData after the offset.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
+ * effect in static mode.
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @return zero or the positive number of bytes that were written, or one of the following
+ * error codes. The number of bytes will be a multiple of the frame size in bytes
+ * not to exceed sizeInBytes.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the track isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next write()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int write(@NonNull byte[] audioData, int offsetInBytes, int sizeInBytes,
+ @WriteMode int writeMode) {
+
+ if (mState == STATE_UNINITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ( (audioData == null) || (offsetInBytes < 0 ) || (sizeInBytes < 0)
+ || (offsetInBytes + sizeInBytes < 0) // detect integer overflow
+ || (offsetInBytes + sizeInBytes > audioData.length)) {
+ return ERROR_BAD_VALUE;
+ }
+
+ int ret = native_write_byte(audioData, offsetInBytes, sizeInBytes, mAudioFormat,
+ writeMode == WRITE_BLOCKING);
+
+ if ((mDataLoadMode == MODE_STATIC)
+ && (mState == STATE_NO_STATIC_DATA)
+ && (ret > 0)) {
+ // benign race with respect to other APIs that read mState
+ mState = STATE_INITIALIZED;
+ }
+
+ return ret;
+ }
+
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * <p>
+ * In streaming mode, the write will normally block until all the data has been enqueued for
+ * playback, and will return a full transfer count. However, if the track is stopped or paused
+ * on entry, or another thread interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
+ * @param audioData the array that holds the data to play.
+ * @param offsetInShorts the offset expressed in shorts in audioData where the data to play
+ * starts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInShorts the number of shorts to read in audioData after the offset.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @return zero or the positive number of shorts that were written, or one of the following
+ * error codes. The number of shorts will be a multiple of the channel count not to
+ * exceed sizeInShorts.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the track isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next write()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ * This is equivalent to {@link #write(short[], int, int, int)} with <code>writeMode</code>
+ * set to {@link #WRITE_BLOCKING}.
+ */
+ public int write(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts) {
+ return write(audioData, offsetInShorts, sizeInShorts, WRITE_BLOCKING);
+ }
+
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_16BIT} to correspond to the data in the array.
+ * <p>
+ * In streaming mode, the blocking behavior depends on the write mode. If the write mode is
+ * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
+ * for playback, and will return a full transfer count. However, if the write mode is
+ * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
+ * interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
+ * @param audioData the array that holds the data to write.
+ * @param offsetInShorts the offset expressed in shorts in audioData where the data to write
+ * starts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInShorts the number of shorts to read in audioData after the offset.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
+ * effect in static mode.
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @return zero or the positive number of shorts that were written, or one of the following
+ * error codes. The number of shorts will be a multiple of the channel count not to
+ * exceed sizeInShorts.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the track isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next write()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int write(@NonNull short[] audioData, int offsetInShorts, int sizeInShorts,
+ @WriteMode int writeMode) {
+
+ if (mState == STATE_UNINITIALIZED || mAudioFormat == AudioFormat.ENCODING_PCM_FLOAT) {
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ( (audioData == null) || (offsetInShorts < 0 ) || (sizeInShorts < 0)
+ || (offsetInShorts + sizeInShorts < 0) // detect integer overflow
+ || (offsetInShorts + sizeInShorts > audioData.length)) {
+ return ERROR_BAD_VALUE;
+ }
+
+ int ret = native_write_short(audioData, offsetInShorts, sizeInShorts, mAudioFormat,
+ writeMode == WRITE_BLOCKING);
+
+ if ((mDataLoadMode == MODE_STATIC)
+ && (mState == STATE_NO_STATIC_DATA)
+ && (ret > 0)) {
+ // benign race with respect to other APIs that read mState
+ mState = STATE_INITIALIZED;
+ }
+
+ return ret;
+ }
+
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The format specified in the AudioTrack constructor should be
+ * {@link AudioFormat#ENCODING_PCM_FLOAT} to correspond to the data in the array.
+ * <p>
+ * In streaming mode, the blocking behavior depends on the write mode. If the write mode is
+ * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
+ * for playback, and will return a full transfer count. However, if the write mode is
+ * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
+ * interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0,
+ * and the write mode is ignored.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
+ * @param audioData the array that holds the data to write.
+ * The implementation does not clip for sample values within the nominal range
+ * [-1.0f, 1.0f], provided that all gains in the audio pipeline are
+ * less than or equal to unity (1.0f), and in the absence of post-processing effects
+ * that could add energy, such as reverb. For the convenience of applications
+ * that compute samples using filters with non-unity gain,
+ * sample values +3 dB beyond the nominal range are permitted.
+ * However such values may eventually be limited or clipped, depending on various gains
+ * and later processing in the audio path. Therefore applications are encouraged
+ * to provide samples values within the nominal range.
+ * @param offsetInFloats the offset, expressed as a number of floats,
+ * in audioData where the data to write starts.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param sizeInFloats the number of floats to write in audioData after the offset.
+ * Must not be negative, or cause the data access to go out of bounds of the array.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
+ * effect in static mode.
+ * <br>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <br>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @return zero or the positive number of floats that were written, or one of the following
+ * error codes. The number of floats will be a multiple of the channel count not to
+ * exceed sizeInFloats.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the track isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next write()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int write(@NonNull float[] audioData, int offsetInFloats, int sizeInFloats,
+ @WriteMode int writeMode) {
+
+ if (mState == STATE_UNINITIALIZED) {
+ Log.e(TAG, "AudioTrack.write() called in invalid state STATE_UNINITIALIZED");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if (mAudioFormat != AudioFormat.ENCODING_PCM_FLOAT) {
+ Log.e(TAG, "AudioTrack.write(float[] ...) requires format ENCODING_PCM_FLOAT");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ( (audioData == null) || (offsetInFloats < 0 ) || (sizeInFloats < 0)
+ || (offsetInFloats + sizeInFloats < 0) // detect integer overflow
+ || (offsetInFloats + sizeInFloats > audioData.length)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid array, offset, or size");
+ return ERROR_BAD_VALUE;
+ }
+
+ int ret = native_write_float(audioData, offsetInFloats, sizeInFloats, mAudioFormat,
+ writeMode == WRITE_BLOCKING);
+
+ if ((mDataLoadMode == MODE_STATIC)
+ && (mState == STATE_NO_STATIC_DATA)
+ && (ret > 0)) {
+ // benign race with respect to other APIs that read mState
+ mState = STATE_INITIALIZED;
+ }
+
+ return ret;
+ }
+
+
+ /**
+ * Writes the audio data to the audio sink for playback (streaming mode),
+ * or copies audio data for later playback (static buffer mode).
+ * The audioData in ByteBuffer should match the format specified in the AudioTrack constructor.
+ * <p>
+ * In streaming mode, the blocking behavior depends on the write mode. If the write mode is
+ * {@link #WRITE_BLOCKING}, the write will normally block until all the data has been enqueued
+ * for playback, and will return a full transfer count. However, if the write mode is
+ * {@link #WRITE_NON_BLOCKING}, or the track is stopped or paused on entry, or another thread
+ * interrupts the write by calling stop or pause, or an I/O error
+ * occurs during the write, then the write may return a short transfer count.
+ * <p>
+ * In static buffer mode, copies the data to the buffer starting at offset 0,
+ * and the write mode is ignored.
+ * Note that the actual playback of this data might occur after this function returns.
+ *
+ * @param audioData the buffer that holds the data to write, starting at the position reported
+ * by <code>audioData.position()</code>.
+ * <BR>Note that upon return, the buffer position (<code>audioData.position()</code>) will
+ * have been advanced to reflect the amount of data that was successfully written to
+ * the AudioTrack.
+ * @param sizeInBytes number of bytes to write. It is recommended but not enforced
+ * that the number of bytes requested be a multiple of the frame size (sample size in
+ * bytes multiplied by the channel count).
+ * <BR>Note this may differ from <code>audioData.remaining()</code>, but cannot exceed it.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}. It has no
+ * effect in static mode.
+ * <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @return zero or the positive number of bytes that were written, or one of the following
+ * error codes.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the track isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next write()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int write(@NonNull ByteBuffer audioData, int sizeInBytes,
+ @WriteMode int writeMode) {
+
+ if (mState == STATE_UNINITIALIZED) {
+ Log.e(TAG, "AudioTrack.write() called in invalid state STATE_UNINITIALIZED");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if ( (audioData == null) || (sizeInBytes < 0) || (sizeInBytes > audioData.remaining())) {
+ Log.e(TAG, "AudioTrack.write() called with invalid size (" + sizeInBytes + ") value");
+ return ERROR_BAD_VALUE;
+ }
+
+ int ret = 0;
+ if (audioData.isDirect()) {
+ ret = native_write_native_bytes(audioData,
+ audioData.position(), sizeInBytes, mAudioFormat,
+ writeMode == WRITE_BLOCKING);
+ } else {
+ ret = native_write_byte(NioUtils.unsafeArray(audioData),
+ NioUtils.unsafeArrayOffset(audioData) + audioData.position(),
+ sizeInBytes, mAudioFormat,
+ writeMode == WRITE_BLOCKING);
+ }
+
+ if ((mDataLoadMode == MODE_STATIC)
+ && (mState == STATE_NO_STATIC_DATA)
+ && (ret > 0)) {
+ // benign race with respect to other APIs that read mState
+ mState = STATE_INITIALIZED;
+ }
+
+ if (ret > 0) {
+ audioData.position(audioData.position() + ret);
+ }
+
+ return ret;
+ }
+
+ /**
+ * Writes the audio data to the audio sink for playback in streaming mode on a HW_AV_SYNC track.
+ * The blocking behavior will depend on the write mode.
+ * @param audioData the buffer that holds the data to write, starting at the position reported
+ * by <code>audioData.position()</code>.
+ * <BR>Note that upon return, the buffer position (<code>audioData.position()</code>) will
+ * have been advanced to reflect the amount of data that was successfully written to
+ * the AudioTrack.
+ * @param sizeInBytes number of bytes to write. It is recommended but not enforced
+ * that the number of bytes requested be a multiple of the frame size (sample size in
+ * bytes multiplied by the channel count).
+ * <BR>Note this may differ from <code>audioData.remaining()</code>, but cannot exceed it.
+ * @param writeMode one of {@link #WRITE_BLOCKING}, {@link #WRITE_NON_BLOCKING}.
+ * <BR>With {@link #WRITE_BLOCKING}, the write will block until all data has been written
+ * to the audio sink.
+ * <BR>With {@link #WRITE_NON_BLOCKING}, the write will return immediately after
+ * queuing as much audio data for playback as possible without blocking.
+ * @param timestamp The timestamp of the first decodable audio frame in the provided audioData.
+ * @return zero or the positive number of bytes that were written, or one of the following
+ * error codes.
+ * <ul>
+ * <li>{@link #ERROR_INVALID_OPERATION} if the track isn't properly initialized</li>
+ * <li>{@link #ERROR_BAD_VALUE} if the parameters don't resolve to valid data and indexes</li>
+ * <li>{@link #ERROR_DEAD_OBJECT} if the AudioTrack is not valid anymore and
+ * needs to be recreated. The dead object error code is not returned if some data was
+ * successfully transferred. In this case, the error is returned at the next write()</li>
+ * <li>{@link #ERROR} in case of other error</li>
+ * </ul>
+ */
+ public int write(@NonNull ByteBuffer audioData, int sizeInBytes,
+ @WriteMode int writeMode, long timestamp) {
+
+ if (mState == STATE_UNINITIALIZED) {
+ Log.e(TAG, "AudioTrack.write() called in invalid state STATE_UNINITIALIZED");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((writeMode != WRITE_BLOCKING) && (writeMode != WRITE_NON_BLOCKING)) {
+ Log.e(TAG, "AudioTrack.write() called with invalid blocking mode");
+ return ERROR_BAD_VALUE;
+ }
+
+ if (mDataLoadMode != MODE_STREAM) {
+ Log.e(TAG, "AudioTrack.write() with timestamp called for non-streaming mode track");
+ return ERROR_INVALID_OPERATION;
+ }
+
+ if ((mAttributes.getFlags() & AudioAttributes.FLAG_HW_AV_SYNC) == 0) {
+ Log.d(TAG, "AudioTrack.write() called on a regular AudioTrack. Ignoring pts...");
+ return write(audioData, sizeInBytes, writeMode);
+ }
+
+ if ((audioData == null) || (sizeInBytes < 0) || (sizeInBytes > audioData.remaining())) {
+ Log.e(TAG, "AudioTrack.write() called with invalid size (" + sizeInBytes + ") value");
+ return ERROR_BAD_VALUE;
+ }
+
+ // create timestamp header if none exists
+ if (mAvSyncHeader == null) {
+ mAvSyncHeader = ByteBuffer.allocate(mOffset);
+ mAvSyncHeader.order(ByteOrder.BIG_ENDIAN);
+ mAvSyncHeader.putInt(0x55550002);
+ }
+
+ if (mAvSyncBytesRemaining == 0) {
+ mAvSyncHeader.putInt(4, sizeInBytes);
+ mAvSyncHeader.putLong(8, timestamp);
+ mAvSyncHeader.putInt(16, mOffset);
+ mAvSyncHeader.position(0);
+ mAvSyncBytesRemaining = sizeInBytes;
+ }
+
+ // write timestamp header if not completely written already
+ int ret = 0;
+ if (mAvSyncHeader.remaining() != 0) {
+ ret = write(mAvSyncHeader, mAvSyncHeader.remaining(), writeMode);
+ if (ret < 0) {
+ Log.e(TAG, "AudioTrack.write() could not write timestamp header!");
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
+ return ret;
+ }
+ if (mAvSyncHeader.remaining() > 0) {
+ Log.v(TAG, "AudioTrack.write() partial timestamp header written.");
+ return 0;
+ }
+ }
+
+ // write audio data
+ int sizeToWrite = Math.min(mAvSyncBytesRemaining, sizeInBytes);
+ ret = write(audioData, sizeToWrite, writeMode);
+ if (ret < 0) {
+ Log.e(TAG, "AudioTrack.write() could not write audio data!");
+ mAvSyncHeader = null;
+ mAvSyncBytesRemaining = 0;
+ return ret;
+ }
+
+ mAvSyncBytesRemaining -= ret;
+
+ return ret;
+ }
+
+
+ /**
+ * Sets the playback head position within the static buffer to zero,
+ * that is it rewinds to start of static buffer.
+ * The track must be stopped or paused, and
+ * the track's creation mode must be {@link #MODE_STATIC}.
+ * <p>
+ * As of {@link android.os.Build.VERSION_CODES#M}, also resets the value returned by
+ * {@link #getPlaybackHeadPosition()} to zero.
+ * For earlier API levels, the reset behavior is unspecified.
+ * <p>
+ * Use {@link #setPlaybackHeadPosition(int)} with a zero position
+ * if the reset of <code>getPlaybackHeadPosition()</code> is not needed.
+ * @return error code or success, see {@link #SUCCESS}, {@link #ERROR_BAD_VALUE},
+ * {@link #ERROR_INVALID_OPERATION}
+ */
+ public int reloadStaticData() {
+ if (mDataLoadMode == MODE_STREAM || mState != STATE_INITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ return native_reload_static();
+ }
+
+ //--------------------------------------------------------------------------
+ // Audio effects management
+ //--------------------
+
+ /**
+ * Attaches an auxiliary effect to the audio track. A typical auxiliary
+ * effect is a reverberation effect which can be applied on any sound source
+ * that directs a certain amount of its energy to this effect. This amount
+ * is defined by setAuxEffectSendLevel().
+ * {@see #setAuxEffectSendLevel(float)}.
+ * <p>After creating an auxiliary effect (e.g.
+ * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with
+ * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling
+ * this method to attach the audio track to the effect.
+ * <p>To detach the effect from the audio track, call this method with a
+ * null effect id.
+ *
+ * @param effectId system wide unique id of the effect to attach
+ * @return error code or success, see {@link #SUCCESS},
+ * {@link #ERROR_INVALID_OPERATION}, {@link #ERROR_BAD_VALUE}
+ */
+ public int attachAuxEffect(int effectId) {
+ if (mState == STATE_UNINITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ return native_attachAuxEffect(effectId);
+ }
+
+ /**
+ * Sets the send level of the audio track to the attached auxiliary effect
+ * {@link #attachAuxEffect(int)}. Effect levels
+ * are clamped to the closed interval [0.0, max] where
+ * max is the value of {@link #getMaxVolume}.
+ * A value of 0.0 results in no effect, and a value of 1.0 is full send.
+ * <p>By default the send level is 0.0f, so even if an effect is attached to the player
+ * this method must be called for the effect to be applied.
+ * <p>Note that the passed level value is a linear scalar. UI controls should be scaled
+ * logarithmically: the gain applied by audio framework ranges from -72dB to at least 0dB,
+ * so an appropriate conversion from linear UI input x to level is:
+ * x == 0 -&gt; level = 0
+ * 0 &lt; x &lt;= R -&gt; level = 10^(72*(x-R)/20/R)
+ *
+ * @param level linear send level
+ * @return error code or success, see {@link #SUCCESS},
+ * {@link #ERROR_INVALID_OPERATION}, {@link #ERROR}
+ */
+ public int setAuxEffectSendLevel(float level) {
+ if (mState == STATE_UNINITIALIZED) {
+ return ERROR_INVALID_OPERATION;
+ }
+ return baseSetAuxEffectSendLevel(level);
+ }
+
+ @Override
+ int playerSetAuxEffectSendLevel(boolean muting, float level) {
+ level = clampGainOrLevel(muting ? 0.0f : level);
+ int err = native_setAuxEffectSendLevel(level);
+ return err == 0 ? SUCCESS : ERROR;
+ }
+
+ //--------------------------------------------------------------------------
+ // Explicit Routing
+ //--------------------
+ private AudioDeviceInfo mPreferredDevice = null;
+
+ /**
+ * Specifies an audio device (via an {@link AudioDeviceInfo} object) to route
+ * the output from this AudioTrack.
+ * @param deviceInfo The {@link AudioDeviceInfo} specifying the audio sink.
+ * If deviceInfo is null, default routing is restored.
+ * @return true if succesful, false if the specified {@link AudioDeviceInfo} is non-null and
+ * does not correspond to a valid audio output device.
+ */
+ @Override
+ public boolean setPreferredDevice(AudioDeviceInfo deviceInfo) {
+ // Do some validation....
+ if (deviceInfo != null && !deviceInfo.isSink()) {
+ return false;
+ }
+ int preferredDeviceId = deviceInfo != null ? deviceInfo.getId() : 0;
+ boolean status = native_setOutputDevice(preferredDeviceId);
+ if (status == true) {
+ synchronized (this) {
+ mPreferredDevice = deviceInfo;
+ }
+ }
+ return status;
+ }
+
+ /**
+ * Returns the selected output specified by {@link #setPreferredDevice}. Note that this
+ * is not guaranteed to correspond to the actual device being used for playback.
+ */
+ @Override
+ public AudioDeviceInfo getPreferredDevice() {
+ synchronized (this) {
+ return mPreferredDevice;
+ }
+ }
+
+ /**
+ * Returns an {@link AudioDeviceInfo} identifying the current routing of this AudioTrack.
+ * Note: The query is only valid if the AudioTrack is currently playing. If it is not,
+ * <code>getRoutedDevice()</code> will return null.
+ */
+ @Override
+ public AudioDeviceInfo getRoutedDevice() {
+ int deviceId = native_getRoutedDeviceId();
+ if (deviceId == 0) {
+ return null;
+ }
+ AudioDeviceInfo[] devices =
+ AudioManager.getDevicesStatic(AudioManager.GET_DEVICES_OUTPUTS);
+ for (int i = 0; i < devices.length; i++) {
+ if (devices[i].getId() == deviceId) {
+ return devices[i];
+ }
+ }
+ return null;
+ }
+
+ /*
+ * Call BEFORE adding a routing callback handler.
+ */
+ private void testEnableNativeRoutingCallbacksLocked() {
+ if (mRoutingChangeListeners.size() == 0) {
+ native_enableDeviceCallback();
+ }
+ }
+
+ /*
+ * Call AFTER removing a routing callback handler.
+ */
+ private void testDisableNativeRoutingCallbacksLocked() {
+ if (mRoutingChangeListeners.size() == 0) {
+ native_disableDeviceCallback();
+ }
+ }
+
+ //--------------------------------------------------------------------------
+ // (Re)Routing Info
+ //--------------------
+ /**
+ * The list of AudioRouting.OnRoutingChangedListener interfaces added (with
+ * {@link #addOnRoutingChangedListener(android.media.AudioRouting.OnRoutingChangedListener, Handler)}
+ * by an app to receive (re)routing notifications.
+ */
+ @GuardedBy("mRoutingChangeListeners")
+ private ArrayMap<AudioRouting.OnRoutingChangedListener,
+ NativeRoutingEventHandlerDelegate> mRoutingChangeListeners = new ArrayMap<>();
+
+ /**
+ * Adds an {@link AudioRouting.OnRoutingChangedListener} to receive notifications of routing
+ * changes on this AudioTrack.
+ * @param listener The {@link AudioRouting.OnRoutingChangedListener} interface to receive
+ * notifications of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ */
+ @Override
+ public void addOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener,
+ Handler handler) {
+ synchronized (mRoutingChangeListeners) {
+ if (listener != null && !mRoutingChangeListeners.containsKey(listener)) {
+ testEnableNativeRoutingCallbacksLocked();
+ mRoutingChangeListeners.put(
+ listener, new NativeRoutingEventHandlerDelegate(this, listener,
+ handler != null ? handler : new Handler(mInitializationLooper)));
+ }
+ }
+ }
+
+ /**
+ * Removes an {@link AudioRouting.OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link AudioRouting.OnRoutingChangedListener} interface
+ * to remove.
+ */
+ @Override
+ public void removeOnRoutingChangedListener(AudioRouting.OnRoutingChangedListener listener) {
+ synchronized (mRoutingChangeListeners) {
+ if (mRoutingChangeListeners.containsKey(listener)) {
+ mRoutingChangeListeners.remove(listener);
+ }
+ testDisableNativeRoutingCallbacksLocked();
+ }
+ }
+
+ //--------------------------------------------------------------------------
+ // (Re)Routing Info
+ //--------------------
+ /**
+ * Defines the interface by which applications can receive notifications of
+ * routing changes for the associated {@link AudioTrack}.
+ *
+ * @deprecated users should switch to the general purpose
+ * {@link AudioRouting.OnRoutingChangedListener} class instead.
+ */
+ @Deprecated
+ public interface OnRoutingChangedListener extends AudioRouting.OnRoutingChangedListener {
+ /**
+ * Called when the routing of an AudioTrack changes from either and
+ * explicit or policy rerouting. Use {@link #getRoutedDevice()} to
+ * retrieve the newly routed-to device.
+ */
+ public void onRoutingChanged(AudioTrack audioTrack);
+
+ @Override
+ default public void onRoutingChanged(AudioRouting router) {
+ if (router instanceof AudioTrack) {
+ onRoutingChanged((AudioTrack) router);
+ }
+ }
+ }
+
+ /**
+ * Adds an {@link OnRoutingChangedListener} to receive notifications of routing changes
+ * on this AudioTrack.
+ * @param listener The {@link OnRoutingChangedListener} interface to receive notifications
+ * of rerouting events.
+ * @param handler Specifies the {@link Handler} object for the thread on which to execute
+ * the callback. If <code>null</code>, the {@link Handler} associated with the main
+ * {@link Looper} will be used.
+ * @deprecated users should switch to the general purpose
+ * {@link AudioRouting.OnRoutingChangedListener} class instead.
+ */
+ @Deprecated
+ public void addOnRoutingChangedListener(OnRoutingChangedListener listener,
+ android.os.Handler handler) {
+ addOnRoutingChangedListener((AudioRouting.OnRoutingChangedListener) listener, handler);
+ }
+
+ /**
+ * Removes an {@link OnRoutingChangedListener} which has been previously added
+ * to receive rerouting notifications.
+ * @param listener The previously added {@link OnRoutingChangedListener} interface to remove.
+ * @deprecated users should switch to the general purpose
+ * {@link AudioRouting.OnRoutingChangedListener} class instead.
+ */
+ @Deprecated
+ public void removeOnRoutingChangedListener(OnRoutingChangedListener listener) {
+ removeOnRoutingChangedListener((AudioRouting.OnRoutingChangedListener) listener);
+ }
+
+ /**
+ * Sends device list change notification to all listeners.
+ */
+ private void broadcastRoutingChange() {
+ AudioManager.resetAudioPortGeneration();
+ synchronized (mRoutingChangeListeners) {
+ for (NativeRoutingEventHandlerDelegate delegate : mRoutingChangeListeners.values()) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ handler.sendEmptyMessage(AudioSystem.NATIVE_EVENT_ROUTING_CHANGE);
+ }
+ }
+ }
+ }
+
+ //---------------------------------------------------------
+ // Interface definitions
+ //--------------------
+ /**
+ * Interface definition for a callback to be invoked when the playback head position of
+ * an AudioTrack has reached a notification marker or has increased by a certain period.
+ */
+ public interface OnPlaybackPositionUpdateListener {
+ /**
+ * Called on the listener to notify it that the previously set marker has been reached
+ * by the playback head.
+ */
+ void onMarkerReached(AudioTrack track);
+
+ /**
+ * Called on the listener to periodically notify it that the playback head has reached
+ * a multiple of the notification period.
+ */
+ void onPeriodicNotification(AudioTrack track);
+ }
+
+ //---------------------------------------------------------
+ // Inner classes
+ //--------------------
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativePositionEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativePositionEventHandlerDelegate(final AudioTrack track,
+ final OnPlaybackPositionUpdateListener listener,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the AudioTrack was created in
+ looper = mInitializationLooper;
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ if (track == null) {
+ return;
+ }
+ switch(msg.what) {
+ case NATIVE_EVENT_MARKER:
+ if (listener != null) {
+ listener.onMarkerReached(track);
+ }
+ break;
+ case NATIVE_EVENT_NEW_POS:
+ if (listener != null) {
+ listener.onPeriodicNotification(track);
+ }
+ break;
+ default:
+ loge("Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
+
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listener
+ * (potentially) handled in a different thread
+ */
+ private class NativeRoutingEventHandlerDelegate {
+ private final Handler mHandler;
+
+ NativeRoutingEventHandlerDelegate(final AudioTrack track,
+ final AudioRouting.OnRoutingChangedListener listener,
+ Handler handler) {
+ // find the looper for our new event handler
+ Looper looper;
+ if (handler != null) {
+ looper = handler.getLooper();
+ } else {
+ // no given handler, use the looper the AudioTrack was created in
+ looper = mInitializationLooper;
+ }
+
+ // construct the event handler with this looper
+ if (looper != null) {
+ // implement the event handler delegate
+ mHandler = new Handler(looper) {
+ @Override
+ public void handleMessage(Message msg) {
+ if (track == null) {
+ return;
+ }
+ switch(msg.what) {
+ case AudioSystem.NATIVE_EVENT_ROUTING_CHANGE:
+ if (listener != null) {
+ listener.onRoutingChanged(track);
+ }
+ break;
+ default:
+ loge("Unknown native event type: " + msg.what);
+ break;
+ }
+ }
+ };
+ } else {
+ mHandler = null;
+ }
+ }
+
+ Handler getHandler() {
+ return mHandler;
+ }
+ }
+
+ //---------------------------------------------------------
+ // Methods for IPlayer interface
+ //--------------------
+ @Override
+ void playerStart() {
+ play();
+ }
+
+ @Override
+ void playerPause() {
+ pause();
+ }
+
+ @Override
+ void playerStop() {
+ stop();
+ }
+
+ //---------------------------------------------------------
+ // Java methods called from the native side
+ //--------------------
+ @SuppressWarnings("unused")
+ private static void postEventFromNative(Object audiotrack_ref,
+ int what, int arg1, int arg2, Object obj) {
+ //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
+ AudioTrack track = (AudioTrack)((WeakReference)audiotrack_ref).get();
+ if (track == null) {
+ return;
+ }
+
+ if (what == AudioSystem.NATIVE_EVENT_ROUTING_CHANGE) {
+ track.broadcastRoutingChange();
+ return;
+ }
+ NativePositionEventHandlerDelegate delegate = track.mEventHandlerDelegate;
+ if (delegate != null) {
+ Handler handler = delegate.getHandler();
+ if (handler != null) {
+ Message m = handler.obtainMessage(what, arg1, arg2, obj);
+ handler.sendMessage(m);
+ }
+ }
+ }
+
+
+ //---------------------------------------------------------
+ // Native methods called from the Java side
+ //--------------------
+
+ // post-condition: mStreamType is overwritten with a value
+ // that reflects the audio attributes (e.g. an AudioAttributes object with a usage of
+ // AudioAttributes.USAGE_MEDIA will map to AudioManager.STREAM_MUSIC
+ private native final int native_setup(Object /*WeakReference<AudioTrack>*/ audiotrack_this,
+ Object /*AudioAttributes*/ attributes,
+ int[] sampleRate, int channelMask, int channelIndexMask, int audioFormat,
+ int buffSizeInBytes, int mode, int[] sessionId, long nativeAudioTrack);
+
+ private native final void native_finalize();
+
+ /**
+ * @hide
+ */
+ public native final void native_release();
+
+ private native final void native_start();
+
+ private native final void native_stop();
+
+ private native final void native_pause();
+
+ private native final void native_flush();
+
+ private native final int native_write_byte(byte[] audioData,
+ int offsetInBytes, int sizeInBytes, int format,
+ boolean isBlocking);
+
+ private native final int native_write_short(short[] audioData,
+ int offsetInShorts, int sizeInShorts, int format,
+ boolean isBlocking);
+
+ private native final int native_write_float(float[] audioData,
+ int offsetInFloats, int sizeInFloats, int format,
+ boolean isBlocking);
+
+ private native final int native_write_native_bytes(Object audioData,
+ int positionInBytes, int sizeInBytes, int format, boolean blocking);
+
+ private native final int native_reload_static();
+
+ private native final int native_get_buffer_size_frames();
+ private native final int native_set_buffer_size_frames(int bufferSizeInFrames);
+ private native final int native_get_buffer_capacity_frames();
+
+ private native final void native_setVolume(float leftVolume, float rightVolume);
+
+ private native final int native_set_playback_rate(int sampleRateInHz);
+ private native final int native_get_playback_rate();
+
+ private native final void native_set_playback_params(@NonNull PlaybackParams params);
+ private native final @NonNull PlaybackParams native_get_playback_params();
+
+ private native final int native_set_marker_pos(int marker);
+ private native final int native_get_marker_pos();
+
+ private native final int native_set_pos_update_period(int updatePeriod);
+ private native final int native_get_pos_update_period();
+
+ private native final int native_set_position(int position);
+ private native final int native_get_position();
+
+ private native final int native_get_latency();
+
+ private native final int native_get_underrun_count();
+
+ private native final int native_get_flags();
+
+ // longArray must be a non-null array of length >= 2
+ // [0] is assigned the frame position
+ // [1] is assigned the time in CLOCK_MONOTONIC nanoseconds
+ private native final int native_get_timestamp(long[] longArray);
+
+ private native final int native_set_loop(int start, int end, int loopCount);
+
+ static private native final int native_get_output_sample_rate(int streamType);
+ static private native final int native_get_min_buff_size(
+ int sampleRateInHz, int channelConfig, int audioFormat);
+
+ private native final int native_attachAuxEffect(int effectId);
+ private native final int native_setAuxEffectSendLevel(float level);
+
+ private native final boolean native_setOutputDevice(int deviceId);
+ private native final int native_getRoutedDeviceId();
+ private native final void native_enableDeviceCallback();
+ private native final void native_disableDeviceCallback();
+ static private native int native_get_FCC_8();
+
+ private native int native_applyVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration,
+ @NonNull VolumeShaper.Operation operation);
+
+ private native @Nullable VolumeShaper.State native_getVolumeShaperState(int id);
+
+ //---------------------------------------------------------
+ // Utility methods
+ //------------------
+
+ private static void logd(String msg) {
+ Log.d(TAG, msg);
+ }
+
+ private static void loge(String msg) {
+ Log.e(TAG, msg);
+ }
+}
diff --git a/android/media/AudioTrackRoutingProxy.java b/android/media/AudioTrackRoutingProxy.java
new file mode 100644
index 00000000..9b97ae99
--- /dev/null
+++ b/android/media/AudioTrackRoutingProxy.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An AudioTrack connected to a native (C/C++) which allows access only to routing methods.
+ */
+class AudioTrackRoutingProxy extends AudioTrack {
+ /**
+ * A constructor which explicitly connects a Native (C++) AudioTrack. For use by
+ * the AudioTrackRoutingProxy subclass.
+ * @param nativeTrackInJavaObj a C/C++ pointer to a native AudioTrack
+ * (associated with an OpenSL ES player).
+ */
+ public AudioTrackRoutingProxy(long nativeTrackInJavaObj) {
+ super(nativeTrackInJavaObj);
+ }
+}
diff --git a/android/media/BufferingParams.java b/android/media/BufferingParams.java
new file mode 100644
index 00000000..681271b1
--- /dev/null
+++ b/android/media/BufferingParams.java
@@ -0,0 +1,460 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Structure for source buffering management params.
+ *
+ * Used by {@link MediaPlayer#getDefaultBufferingParams()},
+ * {@link MediaPlayer#getBufferingParams()} and
+ * {@link MediaPlayer#setBufferingParams(BufferingParams)}
+ * to control source buffering behavior.
+ *
+ * <p>There are two stages of source buffering in {@link MediaPlayer}: initial buffering
+ * (when {@link MediaPlayer} is being prepared) and rebuffering (when {@link MediaPlayer}
+ * is playing back source). {@link BufferingParams} includes mode and corresponding
+ * watermarks for each stage of source buffering. The watermarks could be either size
+ * based (in milliseconds), or time based (in kilobytes) or both, depending on the mode.
+ *
+ * <p>There are 4 buffering modes: {@link #BUFFERING_MODE_NONE},
+ * {@link #BUFFERING_MODE_TIME_ONLY}, {@link #BUFFERING_MODE_SIZE_ONLY} and
+ * {@link #BUFFERING_MODE_TIME_THEN_SIZE}.
+ * {@link MediaPlayer} source component has default buffering modes which can be queried
+ * by calling {@link MediaPlayer#getDefaultBufferingParams()}.
+ * Users should always use those default modes or their downsized version when trying to
+ * change buffering params. For example, {@link #BUFFERING_MODE_TIME_THEN_SIZE} can be
+ * downsized to {@link #BUFFERING_MODE_NONE}, {@link #BUFFERING_MODE_TIME_ONLY} or
+ * {@link #BUFFERING_MODE_SIZE_ONLY}. But {@link #BUFFERING_MODE_TIME_ONLY} can not be
+ * downsized to {@link #BUFFERING_MODE_SIZE_ONLY}.
+ * <ul>
+ * <li><strong>initial buffering stage:</strong> has one watermark which is used when
+ * {@link MediaPlayer} is being prepared. When cached data amount exceeds this watermark,
+ * {@link MediaPlayer} is prepared.</li>
+ * <li><strong>rebuffering stage:</strong> has two watermarks, low and high, which are
+ * used when {@link MediaPlayer} is playing back content.
+ * <ul>
+ * <li> When cached data amount exceeds high watermark, {@link MediaPlayer} will pause
+ * buffering. Buffering will resume when cache runs below some limit which could be low
+ * watermark or some intermediate value decided by the source component.</li>
+ * <li> When cached data amount runs below low watermark, {@link MediaPlayer} will paused
+ * playback. Playback will resume when cached data amount exceeds high watermark
+ * or reaches end of stream.</li>
+ * </ul>
+ * </ul>
+ * <p>Users should use {@link Builder} to change {@link BufferingParams}.
+ * @hide
+ */
+public final class BufferingParams implements Parcelable {
+ /**
+ * This mode indicates that source buffering is not supported.
+ */
+ public static final int BUFFERING_MODE_NONE = 0;
+ /**
+ * This mode indicates that only time based source buffering is supported. This means
+ * the watermark(s) are time based.
+ */
+ public static final int BUFFERING_MODE_TIME_ONLY = 1;
+ /**
+ * This mode indicates that only size based source buffering is supported. This means
+ * the watermark(s) are size based.
+ */
+ public static final int BUFFERING_MODE_SIZE_ONLY = 2;
+ /**
+ * This mode indicates that both time and size based source buffering are supported,
+ * and time based calculation precedes size based. Size based calculation will be used
+ * only when time information is not available from the source.
+ */
+ public static final int BUFFERING_MODE_TIME_THEN_SIZE = 3;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ BUFFERING_MODE_NONE,
+ BUFFERING_MODE_TIME_ONLY,
+ BUFFERING_MODE_SIZE_ONLY,
+ BUFFERING_MODE_TIME_THEN_SIZE,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface BufferingMode {}
+
+ private static final int BUFFERING_NO_WATERMARK = -1;
+
+ // params
+ private int mInitialBufferingMode = BUFFERING_MODE_NONE;
+ private int mRebufferingMode = BUFFERING_MODE_NONE;
+
+ private int mInitialWatermarkMs = BUFFERING_NO_WATERMARK;
+ private int mInitialWatermarkKB = BUFFERING_NO_WATERMARK;
+
+ private int mRebufferingWatermarkLowMs = BUFFERING_NO_WATERMARK;
+ private int mRebufferingWatermarkHighMs = BUFFERING_NO_WATERMARK;
+ private int mRebufferingWatermarkLowKB = BUFFERING_NO_WATERMARK;
+ private int mRebufferingWatermarkHighKB = BUFFERING_NO_WATERMARK;
+
+ private BufferingParams() {
+ }
+
+ /**
+ * Return the initial buffering mode used when {@link MediaPlayer} is being prepared.
+ * @return one of the values that can be set in {@link Builder#setInitialBufferingMode(int)}
+ */
+ public int getInitialBufferingMode() {
+ return mInitialBufferingMode;
+ }
+
+ /**
+ * Return the rebuffering mode used when {@link MediaPlayer} is playing back source.
+ * @return one of the values that can be set in {@link Builder#setRebufferingMode(int)}
+ */
+ public int getRebufferingMode() {
+ return mRebufferingMode;
+ }
+
+ /**
+ * Return the time based initial buffering watermark in milliseconds.
+ * It is meaningful only when initial buffering mode obatined from
+ * {@link #getInitialBufferingMode()} is time based.
+ * @return time based initial buffering watermark in milliseconds
+ */
+ public int getInitialBufferingWatermarkMs() {
+ return mInitialWatermarkMs;
+ }
+
+ /**
+ * Return the size based initial buffering watermark in kilobytes.
+ * It is meaningful only when initial buffering mode obatined from
+ * {@link #getInitialBufferingMode()} is size based.
+ * @return size based initial buffering watermark in kilobytes
+ */
+ public int getInitialBufferingWatermarkKB() {
+ return mInitialWatermarkKB;
+ }
+
+ /**
+ * Return the time based low watermark in milliseconds for rebuffering.
+ * It is meaningful only when rebuffering mode obatined from
+ * {@link #getRebufferingMode()} is time based.
+ * @return time based low watermark for rebuffering in milliseconds
+ */
+ public int getRebufferingWatermarkLowMs() {
+ return mRebufferingWatermarkLowMs;
+ }
+
+ /**
+ * Return the time based high watermark in milliseconds for rebuffering.
+ * It is meaningful only when rebuffering mode obatined from
+ * {@link #getRebufferingMode()} is time based.
+ * @return time based high watermark for rebuffering in milliseconds
+ */
+ public int getRebufferingWatermarkHighMs() {
+ return mRebufferingWatermarkHighMs;
+ }
+
+ /**
+ * Return the size based low watermark in kilobytes for rebuffering.
+ * It is meaningful only when rebuffering mode obatined from
+ * {@link #getRebufferingMode()} is size based.
+ * @return size based low watermark for rebuffering in kilobytes
+ */
+ public int getRebufferingWatermarkLowKB() {
+ return mRebufferingWatermarkLowKB;
+ }
+
+ /**
+ * Return the size based high watermark in kilobytes for rebuffering.
+ * It is meaningful only when rebuffering mode obatined from
+ * {@link #getRebufferingMode()} is size based.
+ * @return size based high watermark for rebuffering in kilobytes
+ */
+ public int getRebufferingWatermarkHighKB() {
+ return mRebufferingWatermarkHighKB;
+ }
+
+ /**
+ * Builder class for {@link BufferingParams} objects.
+ * <p> Here is an example where <code>Builder</code> is used to define the
+ * {@link BufferingParams} to be used by a {@link MediaPlayer} instance:
+ *
+ * <pre class="prettyprint">
+ * BufferingParams myParams = mediaplayer.getDefaultBufferingParams();
+ * myParams = new BufferingParams.Builder(myParams)
+ * .setInitialBufferingWatermarkMs(10000)
+ * .build();
+ * mediaplayer.setBufferingParams(myParams);
+ * </pre>
+ */
+ public static class Builder {
+ private int mInitialBufferingMode = BUFFERING_MODE_NONE;
+ private int mRebufferingMode = BUFFERING_MODE_NONE;
+
+ private int mInitialWatermarkMs = BUFFERING_NO_WATERMARK;
+ private int mInitialWatermarkKB = BUFFERING_NO_WATERMARK;
+
+ private int mRebufferingWatermarkLowMs = BUFFERING_NO_WATERMARK;
+ private int mRebufferingWatermarkHighMs = BUFFERING_NO_WATERMARK;
+ private int mRebufferingWatermarkLowKB = BUFFERING_NO_WATERMARK;
+ private int mRebufferingWatermarkHighKB = BUFFERING_NO_WATERMARK;
+
+ /**
+ * Constructs a new Builder with the defaults.
+ * By default, both initial buffering mode and rebuffering mode are
+ * {@link BufferingParams#BUFFERING_MODE_NONE}, and all watermarks are -1.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Constructs a new Builder from a given {@link BufferingParams} instance
+ * @param bp the {@link BufferingParams} object whose data will be reused
+ * in the new Builder.
+ */
+ public Builder(BufferingParams bp) {
+ mInitialBufferingMode = bp.mInitialBufferingMode;
+ mRebufferingMode = bp.mRebufferingMode;
+
+ mInitialWatermarkMs = bp.mInitialWatermarkMs;
+ mInitialWatermarkKB = bp.mInitialWatermarkKB;
+
+ mRebufferingWatermarkLowMs = bp.mRebufferingWatermarkLowMs;
+ mRebufferingWatermarkHighMs = bp.mRebufferingWatermarkHighMs;
+ mRebufferingWatermarkLowKB = bp.mRebufferingWatermarkLowKB;
+ mRebufferingWatermarkHighKB = bp.mRebufferingWatermarkHighKB;
+ }
+
+ /**
+ * Combines all of the fields that have been set and return a new
+ * {@link BufferingParams} object. <code>IllegalStateException</code> will be
+ * thrown if there is conflict between fields.
+ * @return a new {@link BufferingParams} object
+ */
+ public BufferingParams build() {
+ if (isTimeBasedMode(mRebufferingMode)
+ && mRebufferingWatermarkLowMs > mRebufferingWatermarkHighMs) {
+ throw new IllegalStateException("Illegal watermark:"
+ + mRebufferingWatermarkLowMs + " : " + mRebufferingWatermarkHighMs);
+ }
+ if (isSizeBasedMode(mRebufferingMode)
+ && mRebufferingWatermarkLowKB > mRebufferingWatermarkHighKB) {
+ throw new IllegalStateException("Illegal watermark:"
+ + mRebufferingWatermarkLowKB + " : " + mRebufferingWatermarkHighKB);
+ }
+
+ BufferingParams bp = new BufferingParams();
+ bp.mInitialBufferingMode = mInitialBufferingMode;
+ bp.mRebufferingMode = mRebufferingMode;
+
+ bp.mInitialWatermarkMs = mInitialWatermarkMs;
+ bp.mInitialWatermarkKB = mInitialWatermarkKB;
+
+ bp.mRebufferingWatermarkLowMs = mRebufferingWatermarkLowMs;
+ bp.mRebufferingWatermarkHighMs = mRebufferingWatermarkHighMs;
+ bp.mRebufferingWatermarkLowKB = mRebufferingWatermarkLowKB;
+ bp.mRebufferingWatermarkHighKB = mRebufferingWatermarkHighKB;
+ return bp;
+ }
+
+ private boolean isTimeBasedMode(int mode) {
+ return (mode == BUFFERING_MODE_TIME_ONLY || mode == BUFFERING_MODE_TIME_THEN_SIZE);
+ }
+
+ private boolean isSizeBasedMode(int mode) {
+ return (mode == BUFFERING_MODE_SIZE_ONLY || mode == BUFFERING_MODE_TIME_THEN_SIZE);
+ }
+
+ /**
+ * Sets the initial buffering mode.
+ * @param mode one of {@link BufferingParams#BUFFERING_MODE_NONE},
+ * {@link BufferingParams#BUFFERING_MODE_TIME_ONLY},
+ * {@link BufferingParams#BUFFERING_MODE_SIZE_ONLY},
+ * {@link BufferingParams#BUFFERING_MODE_TIME_THEN_SIZE},
+ * @return the same Builder instance.
+ */
+ public Builder setInitialBufferingMode(@BufferingMode int mode) {
+ switch (mode) {
+ case BUFFERING_MODE_NONE:
+ case BUFFERING_MODE_TIME_ONLY:
+ case BUFFERING_MODE_SIZE_ONLY:
+ case BUFFERING_MODE_TIME_THEN_SIZE:
+ mInitialBufferingMode = mode;
+ break;
+ default:
+ throw new IllegalArgumentException("Illegal buffering mode " + mode);
+ }
+ return this;
+ }
+
+ /**
+ * Sets the rebuffering mode.
+ * @param mode one of {@link BufferingParams#BUFFERING_MODE_NONE},
+ * {@link BufferingParams#BUFFERING_MODE_TIME_ONLY},
+ * {@link BufferingParams#BUFFERING_MODE_SIZE_ONLY},
+ * {@link BufferingParams#BUFFERING_MODE_TIME_THEN_SIZE},
+ * @return the same Builder instance.
+ */
+ public Builder setRebufferingMode(@BufferingMode int mode) {
+ switch (mode) {
+ case BUFFERING_MODE_NONE:
+ case BUFFERING_MODE_TIME_ONLY:
+ case BUFFERING_MODE_SIZE_ONLY:
+ case BUFFERING_MODE_TIME_THEN_SIZE:
+ mRebufferingMode = mode;
+ break;
+ default:
+ throw new IllegalArgumentException("Illegal buffering mode " + mode);
+ }
+ return this;
+ }
+
+ /**
+ * Sets the time based watermark in milliseconds for initial buffering.
+ * @param watermarkMs time based watermark in milliseconds
+ * @return the same Builder instance.
+ */
+ public Builder setInitialBufferingWatermarkMs(int watermarkMs) {
+ mInitialWatermarkMs = watermarkMs;
+ return this;
+ }
+
+ /**
+ * Sets the size based watermark in kilobytes for initial buffering.
+ * @param watermarkKB size based watermark in kilobytes
+ * @return the same Builder instance.
+ */
+ public Builder setInitialBufferingWatermarkKB(int watermarkKB) {
+ mInitialWatermarkKB = watermarkKB;
+ return this;
+ }
+
+ /**
+ * Sets the time based low watermark in milliseconds for rebuffering.
+ * @param watermarkMs time based low watermark in milliseconds
+ * @return the same Builder instance.
+ */
+ public Builder setRebufferingWatermarkLowMs(int watermarkMs) {
+ mRebufferingWatermarkLowMs = watermarkMs;
+ return this;
+ }
+
+ /**
+ * Sets the time based high watermark in milliseconds for rebuffering.
+ * @param watermarkMs time based high watermark in milliseconds
+ * @return the same Builder instance.
+ */
+ public Builder setRebufferingWatermarkHighMs(int watermarkMs) {
+ mRebufferingWatermarkHighMs = watermarkMs;
+ return this;
+ }
+
+ /**
+ * Sets the size based low watermark in milliseconds for rebuffering.
+ * @param watermarkKB size based low watermark in milliseconds
+ * @return the same Builder instance.
+ */
+ public Builder setRebufferingWatermarkLowKB(int watermarkKB) {
+ mRebufferingWatermarkLowKB = watermarkKB;
+ return this;
+ }
+
+ /**
+ * Sets the size based high watermark in milliseconds for rebuffering.
+ * @param watermarkKB size based high watermark in milliseconds
+ * @return the same Builder instance.
+ */
+ public Builder setRebufferingWatermarkHighKB(int watermarkKB) {
+ mRebufferingWatermarkHighKB = watermarkKB;
+ return this;
+ }
+
+ /**
+ * Sets the time based low and high watermarks in milliseconds for rebuffering.
+ * @param lowWatermarkMs time based low watermark in milliseconds
+ * @param highWatermarkMs time based high watermark in milliseconds
+ * @return the same Builder instance.
+ */
+ public Builder setRebufferingWatermarksMs(int lowWatermarkMs, int highWatermarkMs) {
+ mRebufferingWatermarkLowMs = lowWatermarkMs;
+ mRebufferingWatermarkHighMs = highWatermarkMs;
+ return this;
+ }
+
+ /**
+ * Sets the size based low and high watermarks in kilobytes for rebuffering.
+ * @param lowWatermarkKB size based low watermark in kilobytes
+ * @param highWatermarkKB size based high watermark in kilobytes
+ * @return the same Builder instance.
+ */
+ public Builder setRebufferingWatermarksKB(int lowWatermarkKB, int highWatermarkKB) {
+ mRebufferingWatermarkLowKB = lowWatermarkKB;
+ mRebufferingWatermarkHighKB = highWatermarkKB;
+ return this;
+ }
+ }
+
+ private BufferingParams(Parcel in) {
+ mInitialBufferingMode = in.readInt();
+ mRebufferingMode = in.readInt();
+
+ mInitialWatermarkMs = in.readInt();
+ mInitialWatermarkKB = in.readInt();
+
+ mRebufferingWatermarkLowMs = in.readInt();
+ mRebufferingWatermarkHighMs = in.readInt();
+ mRebufferingWatermarkLowKB = in.readInt();
+ mRebufferingWatermarkHighKB = in.readInt();
+ }
+
+ public static final Parcelable.Creator<BufferingParams> CREATOR =
+ new Parcelable.Creator<BufferingParams>() {
+ @Override
+ public BufferingParams createFromParcel(Parcel in) {
+ return new BufferingParams(in);
+ }
+
+ @Override
+ public BufferingParams[] newArray(int size) {
+ return new BufferingParams[size];
+ }
+ };
+
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mInitialBufferingMode);
+ dest.writeInt(mRebufferingMode);
+
+ dest.writeInt(mInitialWatermarkMs);
+ dest.writeInt(mInitialWatermarkKB);
+
+ dest.writeInt(mRebufferingWatermarkLowMs);
+ dest.writeInt(mRebufferingWatermarkHighMs);
+ dest.writeInt(mRebufferingWatermarkLowKB);
+ dest.writeInt(mRebufferingWatermarkHighKB);
+ }
+}
diff --git a/android/media/CamcorderProfile.java b/android/media/CamcorderProfile.java
new file mode 100644
index 00000000..d303a2e3
--- /dev/null
+++ b/android/media/CamcorderProfile.java
@@ -0,0 +1,503 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+
+/**
+ * Retrieves the
+ * predefined camcorder profile settings for camcorder applications.
+ * These settings are read-only.
+ *
+ * <p>The compressed output from a recording session with a given
+ * CamcorderProfile contains two tracks: one for audio and one for video.
+ *
+ * <p>Each profile specifies the following set of parameters:
+ * <ul>
+ * <li> The file output format
+ * <li> Video codec format
+ * <li> Video bit rate in bits per second
+ * <li> Video frame rate in frames per second
+ * <li> Video frame width and height,
+ * <li> Audio codec format
+ * <li> Audio bit rate in bits per second,
+ * <li> Audio sample rate
+ * <li> Number of audio channels for recording.
+ * </ul>
+ */
+public class CamcorderProfile
+{
+ // Do not change these values/ordinals without updating their counterpart
+ // in include/media/MediaProfiles.h!
+
+ /**
+ * Quality level corresponding to the lowest available resolution.
+ */
+ public static final int QUALITY_LOW = 0;
+
+ /**
+ * Quality level corresponding to the highest available resolution.
+ */
+ public static final int QUALITY_HIGH = 1;
+
+ /**
+ * Quality level corresponding to the qcif (176 x 144) resolution.
+ */
+ public static final int QUALITY_QCIF = 2;
+
+ /**
+ * Quality level corresponding to the cif (352 x 288) resolution.
+ */
+ public static final int QUALITY_CIF = 3;
+
+ /**
+ * Quality level corresponding to the 480p (720 x 480) resolution.
+ * Note that the horizontal resolution for 480p can also be other
+ * values, such as 640 or 704, instead of 720.
+ */
+ public static final int QUALITY_480P = 4;
+
+ /**
+ * Quality level corresponding to the 720p (1280 x 720) resolution.
+ */
+ public static final int QUALITY_720P = 5;
+
+ /**
+ * Quality level corresponding to the 1080p (1920 x 1080) resolution.
+ * Note that the vertical resolution for 1080p can also be 1088,
+ * instead of 1080 (used by some vendors to avoid cropping during
+ * video playback).
+ */
+ public static final int QUALITY_1080P = 6;
+
+ /**
+ * Quality level corresponding to the QVGA (320x240) resolution.
+ */
+ public static final int QUALITY_QVGA = 7;
+
+ /**
+ * Quality level corresponding to the 2160p (3840x2160) resolution.
+ */
+ public static final int QUALITY_2160P = 8;
+
+ // Start and end of quality list
+ private static final int QUALITY_LIST_START = QUALITY_LOW;
+ private static final int QUALITY_LIST_END = QUALITY_2160P;
+
+ /**
+ * Time lapse quality level corresponding to the lowest available resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_LOW = 1000;
+
+ /**
+ * Time lapse quality level corresponding to the highest available resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_HIGH = 1001;
+
+ /**
+ * Time lapse quality level corresponding to the qcif (176 x 144) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_QCIF = 1002;
+
+ /**
+ * Time lapse quality level corresponding to the cif (352 x 288) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_CIF = 1003;
+
+ /**
+ * Time lapse quality level corresponding to the 480p (720 x 480) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_480P = 1004;
+
+ /**
+ * Time lapse quality level corresponding to the 720p (1280 x 720) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_720P = 1005;
+
+ /**
+ * Time lapse quality level corresponding to the 1080p (1920 x 1088) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_1080P = 1006;
+
+ /**
+ * Time lapse quality level corresponding to the QVGA (320 x 240) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_QVGA = 1007;
+
+ /**
+ * Time lapse quality level corresponding to the 2160p (3840 x 2160) resolution.
+ */
+ public static final int QUALITY_TIME_LAPSE_2160P = 1008;
+
+ // Start and end of timelapse quality list
+ private static final int QUALITY_TIME_LAPSE_LIST_START = QUALITY_TIME_LAPSE_LOW;
+ private static final int QUALITY_TIME_LAPSE_LIST_END = QUALITY_TIME_LAPSE_2160P;
+
+ /**
+ * High speed ( >= 100fps) quality level corresponding to the lowest available resolution.
+ * <p>
+ * For all the high speed profiles defined below ((from {@link #QUALITY_HIGH_SPEED_LOW} to
+ * {@link #QUALITY_HIGH_SPEED_2160P}), they are similar as normal recording profiles, with just
+ * higher output frame rate and bit rate. Therefore, setting these profiles with
+ * {@link MediaRecorder#setProfile} without specifying any other encoding parameters will
+ * produce high speed videos rather than slow motion videos that have different capture and
+ * output (playback) frame rates. To record slow motion videos, the application must set video
+ * output (playback) frame rate and bit rate appropriately via
+ * {@link MediaRecorder#setVideoFrameRate} and {@link MediaRecorder#setVideoEncodingBitRate}
+ * based on the slow motion factor. If the application intends to do the video recording with
+ * {@link MediaCodec} encoder, it must set each individual field of {@link MediaFormat}
+ * similarly according to this CamcorderProfile.
+ * </p>
+ *
+ * @see #videoBitRate
+ * @see #videoFrameRate
+ * @see MediaRecorder
+ * @see MediaCodec
+ * @see MediaFormat
+ */
+ public static final int QUALITY_HIGH_SPEED_LOW = 2000;
+
+ /**
+ * High speed ( >= 100fps) quality level corresponding to the highest available resolution.
+ */
+ public static final int QUALITY_HIGH_SPEED_HIGH = 2001;
+
+ /**
+ * High speed ( >= 100fps) quality level corresponding to the 480p (720 x 480) resolution.
+ *
+ * Note that the horizontal resolution for 480p can also be other
+ * values, such as 640 or 704, instead of 720.
+ */
+ public static final int QUALITY_HIGH_SPEED_480P = 2002;
+
+ /**
+ * High speed ( >= 100fps) quality level corresponding to the 720p (1280 x 720) resolution.
+ */
+ public static final int QUALITY_HIGH_SPEED_720P = 2003;
+
+ /**
+ * High speed ( >= 100fps) quality level corresponding to the 1080p (1920 x 1080 or 1920x1088)
+ * resolution.
+ */
+ public static final int QUALITY_HIGH_SPEED_1080P = 2004;
+
+ /**
+ * High speed ( >= 100fps) quality level corresponding to the 2160p (3840 x 2160)
+ * resolution.
+ */
+ public static final int QUALITY_HIGH_SPEED_2160P = 2005;
+
+ // Start and end of high speed quality list
+ private static final int QUALITY_HIGH_SPEED_LIST_START = QUALITY_HIGH_SPEED_LOW;
+ private static final int QUALITY_HIGH_SPEED_LIST_END = QUALITY_HIGH_SPEED_2160P;
+
+ /**
+ * Default recording duration in seconds before the session is terminated.
+ * This is useful for applications like MMS has limited file size requirement.
+ */
+ public int duration;
+
+ /**
+ * The quality level of the camcorder profile
+ */
+ public int quality;
+
+ /**
+ * The file output format of the camcorder profile
+ * @see android.media.MediaRecorder.OutputFormat
+ */
+ public int fileFormat;
+
+ /**
+ * The video encoder being used for the video track
+ * @see android.media.MediaRecorder.VideoEncoder
+ */
+ public int videoCodec;
+
+ /**
+ * The target video output bit rate in bits per second
+ * <p>
+ * This is the target recorded video output bit rate if the application configures the video
+ * recording via {@link MediaRecorder#setProfile} without specifying any other
+ * {@link MediaRecorder} encoding parameters. For example, for high speed quality profiles (from
+ * {@link #QUALITY_HIGH_SPEED_LOW} to {@link #QUALITY_HIGH_SPEED_2160P}), this is the bit rate
+ * where the video is recorded with. If the application intends to record slow motion videos
+ * with the high speed quality profiles, it must set a different video bit rate that is
+ * corresponding to the desired recording output bit rate (i.e., the encoded video bit rate
+ * during normal playback) via {@link MediaRecorder#setVideoEncodingBitRate}. For example, if
+ * {@link #QUALITY_HIGH_SPEED_720P} advertises 240fps {@link #videoFrameRate} and 64Mbps
+ * {@link #videoBitRate} in the high speed CamcorderProfile, and the application intends to
+ * record 1/8 factor slow motion recording videos, the application must set 30fps via
+ * {@link MediaRecorder#setVideoFrameRate} and 8Mbps ( {@link #videoBitRate} * slow motion
+ * factor) via {@link MediaRecorder#setVideoEncodingBitRate}. Failing to do so will result in
+ * videos with unexpected frame rate and bit rate, or {@link MediaRecorder} error if the output
+ * bit rate exceeds the encoder limit. If the application intends to do the video recording with
+ * {@link MediaCodec} encoder, it must set each individual field of {@link MediaFormat}
+ * similarly according to this CamcorderProfile.
+ * </p>
+ *
+ * @see #videoFrameRate
+ * @see MediaRecorder
+ * @see MediaCodec
+ * @see MediaFormat
+ */
+ public int videoBitRate;
+
+ /**
+ * The target video frame rate in frames per second.
+ * <p>
+ * This is the target recorded video output frame rate per second if the application configures
+ * the video recording via {@link MediaRecorder#setProfile} without specifying any other
+ * {@link MediaRecorder} encoding parameters. For example, for high speed quality profiles (from
+ * {@link #QUALITY_HIGH_SPEED_LOW} to {@link #QUALITY_HIGH_SPEED_2160P}), this is the frame rate
+ * where the video is recorded and played back with. If the application intends to create slow
+ * motion use case with the high speed quality profiles, it must set a different video frame
+ * rate that is corresponding to the desired output (playback) frame rate via
+ * {@link MediaRecorder#setVideoFrameRate}. For example, if {@link #QUALITY_HIGH_SPEED_720P}
+ * advertises 240fps {@link #videoFrameRate} in the CamcorderProfile, and the application
+ * intends to create 1/8 factor slow motion recording videos, the application must set 30fps via
+ * {@link MediaRecorder#setVideoFrameRate}. Failing to do so will result in high speed videos
+ * with normal speed playback frame rate (240fps for above example). If the application intends
+ * to do the video recording with {@link MediaCodec} encoder, it must set each individual field
+ * of {@link MediaFormat} similarly according to this CamcorderProfile.
+ * </p>
+ *
+ * @see #videoBitRate
+ * @see MediaRecorder
+ * @see MediaCodec
+ * @see MediaFormat
+ */
+ public int videoFrameRate;
+
+ /**
+ * The target video frame width in pixels
+ */
+ public int videoFrameWidth;
+
+ /**
+ * The target video frame height in pixels
+ */
+ public int videoFrameHeight;
+
+ /**
+ * The audio encoder being used for the audio track.
+ * @see android.media.MediaRecorder.AudioEncoder
+ */
+ public int audioCodec;
+
+ /**
+ * The target audio output bit rate in bits per second
+ */
+ public int audioBitRate;
+
+ /**
+ * The audio sampling rate used for the audio track
+ */
+ public int audioSampleRate;
+
+ /**
+ * The number of audio channels used for the audio track
+ */
+ public int audioChannels;
+
+ /**
+ * Returns the camcorder profile for the first back-facing camera on the
+ * device at the given quality level. If the device has no back-facing
+ * camera, this returns null.
+ * @param quality the target quality level for the camcorder profile
+ * @see #get(int, int)
+ */
+ public static CamcorderProfile get(int quality) {
+ int numberOfCameras = Camera.getNumberOfCameras();
+ CameraInfo cameraInfo = new CameraInfo();
+ for (int i = 0; i < numberOfCameras; i++) {
+ Camera.getCameraInfo(i, cameraInfo);
+ if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) {
+ return get(i, quality);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Returns the camcorder profile for the given camera at the given
+ * quality level.
+ *
+ * Quality levels QUALITY_LOW, QUALITY_HIGH are guaranteed to be supported, while
+ * other levels may or may not be supported. The supported levels can be checked using
+ * {@link #hasProfile(int, int)}.
+ * QUALITY_LOW refers to the lowest quality available, while QUALITY_HIGH refers to
+ * the highest quality available.
+ * QUALITY_LOW/QUALITY_HIGH have to match one of qcif, cif, 480p, 720p, 1080p or 2160p.
+ * E.g. if the device supports 480p, 720p, 1080p and 2160p, then low is 480p and high is
+ * 2160p.
+ *
+ * The same is true for time lapse quality levels, i.e. QUALITY_TIME_LAPSE_LOW,
+ * QUALITY_TIME_LAPSE_HIGH are guaranteed to be supported and have to match one of
+ * qcif, cif, 480p, 720p, 1080p, or 2160p.
+ *
+ * For high speed quality levels, they may or may not be supported. If a subset of the levels
+ * are supported, QUALITY_HIGH_SPEED_LOW and QUALITY_HIGH_SPEED_HIGH are guaranteed to be
+ * supported and have to match one of 480p, 720p, or 1080p.
+ *
+ * A camcorder recording session with higher quality level usually has higher output
+ * bit rate, better video and/or audio recording quality, larger video frame
+ * resolution and higher audio sampling rate, etc, than those with lower quality
+ * level.
+ *
+ * @param cameraId the id for the camera
+ * @param quality the target quality level for the camcorder profile.
+ * @see #QUALITY_LOW
+ * @see #QUALITY_HIGH
+ * @see #QUALITY_QCIF
+ * @see #QUALITY_CIF
+ * @see #QUALITY_480P
+ * @see #QUALITY_720P
+ * @see #QUALITY_1080P
+ * @see #QUALITY_2160P
+ * @see #QUALITY_TIME_LAPSE_LOW
+ * @see #QUALITY_TIME_LAPSE_HIGH
+ * @see #QUALITY_TIME_LAPSE_QCIF
+ * @see #QUALITY_TIME_LAPSE_CIF
+ * @see #QUALITY_TIME_LAPSE_480P
+ * @see #QUALITY_TIME_LAPSE_720P
+ * @see #QUALITY_TIME_LAPSE_1080P
+ * @see #QUALITY_TIME_LAPSE_2160P
+ * @see #QUALITY_HIGH_SPEED_LOW
+ * @see #QUALITY_HIGH_SPEED_HIGH
+ * @see #QUALITY_HIGH_SPEED_480P
+ * @see #QUALITY_HIGH_SPEED_720P
+ * @see #QUALITY_HIGH_SPEED_1080P
+ * @see #QUALITY_HIGH_SPEED_2160P
+ */
+ public static CamcorderProfile get(int cameraId, int quality) {
+ if (!((quality >= QUALITY_LIST_START &&
+ quality <= QUALITY_LIST_END) ||
+ (quality >= QUALITY_TIME_LAPSE_LIST_START &&
+ quality <= QUALITY_TIME_LAPSE_LIST_END) ||
+ (quality >= QUALITY_HIGH_SPEED_LIST_START &&
+ quality <= QUALITY_HIGH_SPEED_LIST_END))) {
+ String errMessage = "Unsupported quality level: " + quality;
+ throw new IllegalArgumentException(errMessage);
+ }
+ return native_get_camcorder_profile(cameraId, quality);
+ }
+
+ /**
+ * Returns true if camcorder profile exists for the first back-facing
+ * camera at the given quality level.
+ *
+ * <p>
+ * When using the Camera 2 API in {@code LEGACY} mode (i.e. when
+ * {@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL} is set
+ * to
+ * {@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY}),
+ * {@link #hasProfile} may return {@code true} for unsupported resolutions. To ensure a
+ * a given resolution is supported in LEGACY mode, the configuration given in
+ * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP}
+ * must contain the the resolution in the supported output sizes. The recommended way to check
+ * this is with
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes(Class)} with the
+ * class of the desired recording endpoint, and check that the desired resolution is contained
+ * in the list returned.
+ * </p>
+ * @see android.hardware.camera2.CameraManager
+ * @see android.hardware.camera2.CameraCharacteristics
+ *
+ * @param quality the target quality level for the camcorder profile
+ */
+ public static boolean hasProfile(int quality) {
+ int numberOfCameras = Camera.getNumberOfCameras();
+ CameraInfo cameraInfo = new CameraInfo();
+ for (int i = 0; i < numberOfCameras; i++) {
+ Camera.getCameraInfo(i, cameraInfo);
+ if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) {
+ return hasProfile(i, quality);
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Returns true if camcorder profile exists for the given camera at
+ * the given quality level.
+ *
+ * <p>
+ * When using the Camera 2 API in LEGACY mode (i.e. when
+ * {@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL} is set
+ * to
+ * {@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY}),
+ * {@link #hasProfile} may return {@code true} for unsupported resolutions. To ensure a
+ * a given resolution is supported in LEGACY mode, the configuration given in
+ * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP}
+ * must contain the the resolution in the supported output sizes. The recommended way to check
+ * this is with
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes(Class)} with the
+ * class of the desired recording endpoint, and check that the desired resolution is contained
+ * in the list returned.
+ * </p>
+ * @see android.hardware.camera2.CameraManager
+ * @see android.hardware.camera2.CameraCharacteristics
+ *
+ * @param cameraId the id for the camera
+ * @param quality the target quality level for the camcorder profile
+ */
+ public static boolean hasProfile(int cameraId, int quality) {
+ return native_has_camcorder_profile(cameraId, quality);
+ }
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ // Private constructor called by JNI
+ private CamcorderProfile(int duration,
+ int quality,
+ int fileFormat,
+ int videoCodec,
+ int videoBitRate,
+ int videoFrameRate,
+ int videoWidth,
+ int videoHeight,
+ int audioCodec,
+ int audioBitRate,
+ int audioSampleRate,
+ int audioChannels) {
+
+ this.duration = duration;
+ this.quality = quality;
+ this.fileFormat = fileFormat;
+ this.videoCodec = videoCodec;
+ this.videoBitRate = videoBitRate;
+ this.videoFrameRate = videoFrameRate;
+ this.videoFrameWidth = videoWidth;
+ this.videoFrameHeight = videoHeight;
+ this.audioCodec = audioCodec;
+ this.audioBitRate = audioBitRate;
+ this.audioSampleRate = audioSampleRate;
+ this.audioChannels = audioChannels;
+ }
+
+ // Methods implemented by JNI
+ private static native final void native_init();
+ private static native final CamcorderProfile native_get_camcorder_profile(
+ int cameraId, int quality);
+ private static native final boolean native_has_camcorder_profile(
+ int cameraId, int quality);
+}
diff --git a/android/media/CameraProfile.java b/android/media/CameraProfile.java
new file mode 100644
index 00000000..905e2d2d
--- /dev/null
+++ b/android/media/CameraProfile.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+
+import java.util.Arrays;
+import java.util.HashMap;
+
+/**
+ * The CameraProfile class is used to retrieve the pre-defined still image
+ * capture (jpeg) quality levels (0-100) used for low, medium, and high
+ * quality settings in the Camera application.
+ *
+ */
+public class CameraProfile
+{
+ /**
+ * Define three quality levels for JPEG image encoding.
+ */
+ /*
+ * Don't change the values for these constants unless getImageEncodingQualityLevels()
+ * method is also changed accordingly.
+ */
+ public static final int QUALITY_LOW = 0;
+ public static final int QUALITY_MEDIUM = 1;
+ public static final int QUALITY_HIGH = 2;
+
+ /*
+ * Cache the Jpeg encoding quality parameters
+ */
+ private static final HashMap<Integer, int[]> sCache = new HashMap<Integer, int[]>();
+
+ /**
+ * Returns a pre-defined still image capture (jpeg) quality level
+ * used for the given quality level in the Camera application for
+ * the first back-facing camera on the device. If the device has no
+ * back-facing camera, this returns 0.
+ *
+ * @param quality The target quality level
+ */
+ public static int getJpegEncodingQualityParameter(int quality) {
+ int numberOfCameras = Camera.getNumberOfCameras();
+ CameraInfo cameraInfo = new CameraInfo();
+ for (int i = 0; i < numberOfCameras; i++) {
+ Camera.getCameraInfo(i, cameraInfo);
+ if (cameraInfo.facing == CameraInfo.CAMERA_FACING_BACK) {
+ return getJpegEncodingQualityParameter(i, quality);
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Returns a pre-defined still image capture (jpeg) quality level
+ * used for the given quality level in the Camera application for
+ * the specified camera.
+ *
+ * @param cameraId The id of the camera
+ * @param quality The target quality level
+ */
+ public static int getJpegEncodingQualityParameter(int cameraId, int quality) {
+ if (quality < QUALITY_LOW || quality > QUALITY_HIGH) {
+ throw new IllegalArgumentException("Unsupported quality level: " + quality);
+ }
+ synchronized (sCache) {
+ int[] levels = sCache.get(cameraId);
+ if (levels == null) {
+ levels = getImageEncodingQualityLevels(cameraId);
+ sCache.put(cameraId, levels);
+ }
+ return levels[quality];
+ }
+ }
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private static int[] getImageEncodingQualityLevels(int cameraId) {
+ int nLevels = native_get_num_image_encoding_quality_levels(cameraId);
+ if (nLevels != QUALITY_HIGH + 1) {
+ throw new RuntimeException("Unexpected Jpeg encoding quality levels " + nLevels);
+ }
+
+ int[] levels = new int[nLevels];
+ for (int i = 0; i < nLevels; ++i) {
+ levels[i] = native_get_image_encoding_quality_level(cameraId, i);
+ }
+ Arrays.sort(levels); // Lower quality level ALWAYS comes before higher one
+ return levels;
+ }
+
+ // Methods implemented by JNI
+ private static native final void native_init();
+ private static native final int native_get_num_image_encoding_quality_levels(int cameraId);
+ private static native final int native_get_image_encoding_quality_level(int cameraId, int index);
+}
diff --git a/android/media/Cea708CaptionRenderer.java b/android/media/Cea708CaptionRenderer.java
new file mode 100644
index 00000000..88912fef
--- /dev/null
+++ b/android/media/Cea708CaptionRenderer.java
@@ -0,0 +1,2151 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.graphics.Typeface;
+import android.os.Handler;
+import android.os.Message;
+import android.text.SpannableStringBuilder;
+import android.text.Spanned;
+import android.text.style.CharacterStyle;
+import android.text.style.RelativeSizeSpan;
+import android.text.style.StyleSpan;
+import android.text.style.SubscriptSpan;
+import android.text.style.SuperscriptSpan;
+import android.text.style.UnderlineSpan;
+import android.util.AttributeSet;
+import android.text.Layout.Alignment;
+import android.util.Log;
+import android.text.TextUtils;
+import android.view.Gravity;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.accessibility.CaptioningManager;
+import android.view.accessibility.CaptioningManager.CaptionStyle;
+import android.widget.RelativeLayout;
+import android.widget.TextView;
+
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Vector;
+
+import com.android.internal.widget.SubtitleView;
+
+/** @hide */
+public class Cea708CaptionRenderer extends SubtitleController.Renderer {
+ private final Context mContext;
+ private Cea708CCWidget mCCWidget;
+
+ public Cea708CaptionRenderer(Context context) {
+ mContext = context;
+ }
+
+ @Override
+ public boolean supports(MediaFormat format) {
+ if (format.containsKey(MediaFormat.KEY_MIME)) {
+ String mimeType = format.getString(MediaFormat.KEY_MIME);
+ return MediaPlayer.MEDIA_MIMETYPE_TEXT_CEA_708.equals(mimeType);
+ }
+ return false;
+ }
+
+ @Override
+ public SubtitleTrack createTrack(MediaFormat format) {
+ String mimeType = format.getString(MediaFormat.KEY_MIME);
+ if (MediaPlayer.MEDIA_MIMETYPE_TEXT_CEA_708.equals(mimeType)) {
+ if (mCCWidget == null) {
+ mCCWidget = new Cea708CCWidget(mContext);
+ }
+ return new Cea708CaptionTrack(mCCWidget, format);
+ }
+ throw new RuntimeException("No matching format: " + format.toString());
+ }
+}
+
+/** @hide */
+class Cea708CaptionTrack extends SubtitleTrack {
+ private final Cea708CCParser mCCParser;
+ private final Cea708CCWidget mRenderingWidget;
+
+ Cea708CaptionTrack(Cea708CCWidget renderingWidget, MediaFormat format) {
+ super(format);
+
+ mRenderingWidget = renderingWidget;
+ mCCParser = new Cea708CCParser(mRenderingWidget);
+ }
+
+ @Override
+ public void onData(byte[] data, boolean eos, long runID) {
+ mCCParser.parse(data);
+ }
+
+ @Override
+ public RenderingWidget getRenderingWidget() {
+ return mRenderingWidget;
+ }
+
+ @Override
+ public void updateView(Vector<Cue> activeCues) {
+ // Overriding with NO-OP, CC rendering by-passes this
+ }
+}
+
+/**
+ * @hide
+ *
+ * A class for parsing CEA-708, which is the standard for closed captioning for ATSC DTV.
+ *
+ * <p>ATSC DTV closed caption data are carried on picture user data of video streams.
+ * This class starts to parse from picture user data payload, so extraction process of user_data
+ * from video streams is up to outside of this code.
+ *
+ * <p>There are 4 steps to decode user_data to provide closed caption services. Step 1 and 2 are
+ * done in NuPlayer and libstagefright.
+ *
+ * <h3>Step 1. user_data -&gt; CcPacket</h3>
+ *
+ * <p>First, user_data consists of cc_data packets, which are 3-byte segments. Here, CcPacket is a
+ * collection of cc_data packets in a frame along with same presentation timestamp. Because cc_data
+ * packets must be reassembled in the frame display order, CcPackets are reordered.
+ *
+ * <h3>Step 2. CcPacket -&gt; DTVCC packet</h3>
+ *
+ * <p>Each cc_data packet has a one byte for declaring a type of itself and data validity, and the
+ * subsequent two bytes for input data of a DTVCC packet. There are 4 types for cc_data packet.
+ * We're interested in DTVCC_PACKET_START(type 3) and DTVCC_PACKET_DATA(type 2). Each DTVCC packet
+ * begins with DTVCC_PACKET_START(type 3) and the following cc_data packets which has
+ * DTVCC_PACKET_DATA(type 2) are appended into the DTVCC packet being assembled.
+ *
+ * <h3>Step 3. DTVCC packet -&gt; Service Blocks</h3>
+ *
+ * <p>A DTVCC packet consists of multiple service blocks. Each service block represents a caption
+ * track and has a service number, which ranges from 1 to 63, that denotes caption track identity.
+ * In here, we listen at most one chosen caption track by service number. Otherwise, just skip the
+ * other service blocks.
+ *
+ * <h3>Step 4. Interpreting Service Block Data ({@link #parseServiceBlockData}, {@code parseXX},
+ * and {@link #parseExt1} methods)</h3>
+ *
+ * <p>Service block data is actual caption stream. it looks similar to telnet. It uses most parts of
+ * ASCII table and consists of specially defined commands and some ASCII control codes which work
+ * in a behavior slightly different from their original purpose. ASCII control codes and caption
+ * commands are explicit instructions that control the state of a closed caption service and the
+ * other ASCII and text codes are implicit instructions that send their characters to buffer.
+ *
+ * <p>There are 4 main code groups and 4 extended code groups. Both the range of code groups are the
+ * same as the range of a byte.
+ *
+ * <p>4 main code groups: C0, C1, G0, G1
+ * <br>4 extended code groups: C2, C3, G2, G3
+ *
+ * <p>Each code group has its own handle method. For example, {@link #parseC0} handles C0 code group
+ * and so on. And {@link #parseServiceBlockData} method maps a stream on the main code groups while
+ * {@link #parseExt1} method maps on the extended code groups.
+ *
+ * <p>The main code groups:
+ * <ul>
+ * <li>C0 - contains modified ASCII control codes. It is not intended by CEA-708 but Korea TTA
+ * standard for ATSC CC uses P16 character heavily, which is unclear entity in CEA-708 doc,
+ * even for the alphanumeric characters instead of ASCII characters.</li>
+ * <li>C1 - contains the caption commands. There are 3 categories of a caption command.</li>
+ * <ul>
+ * <li>Window commands: The window commands control a caption window which is addressable area being
+ * with in the Safe title area. (CWX, CLW, DSW, HDW, TGW, DLW, SWA, DFX)</li>
+ * <li>Pen commands: Th pen commands control text style and location. (SPA, SPC, SPL)</li>
+ * <li>Job commands: The job commands make a delay and recover from the delay. (DLY, DLC, RST)</li>
+ * </ul>
+ * <li>G0 - same as printable ASCII character set except music note character.</li>
+ * <li>G1 - same as ISO 8859-1 Latin 1 character set.</li>
+ * </ul>
+ * <p>Most of the extended code groups are being skipped.
+ *
+ */
+class Cea708CCParser {
+ private static final String TAG = "Cea708CCParser";
+ private static final boolean DEBUG = false;
+
+ private static final String MUSIC_NOTE_CHAR = new String(
+ "\u266B".getBytes(StandardCharsets.UTF_8), StandardCharsets.UTF_8);
+
+ private final StringBuffer mBuffer = new StringBuffer();
+ private int mCommand = 0;
+
+ // Assign a dummy listener in order to avoid null checks.
+ private DisplayListener mListener = new DisplayListener() {
+ @Override
+ public void emitEvent(CaptionEvent event) {
+ // do nothing
+ }
+ };
+
+ /**
+ * {@link Cea708Parser} emits caption event of three different types.
+ * {@link DisplayListener#emitEvent} is invoked with the parameter
+ * {@link CaptionEvent} to pass all the results to an observer of the decoding process .
+ *
+ * <p>{@link CaptionEvent#type} determines the type of the result and
+ * {@link CaptionEvent#obj} contains the output value of a caption event.
+ * The observer must do the casting to the corresponding type.
+ *
+ * <ul><li>{@code CAPTION_EMIT_TYPE_BUFFER}: Passes a caption text buffer to a observer.
+ * {@code obj} must be of {@link String}.</li>
+ *
+ * <li>{@code CAPTION_EMIT_TYPE_CONTROL}: Passes a caption character control code to a observer.
+ * {@code obj} must be of {@link Character}.</li>
+ *
+ * <li>{@code CAPTION_EMIT_TYPE_CLEAR_COMMAND}: Passes a clear command to a observer.
+ * {@code obj} must be {@code NULL}.</li></ul>
+ */
+ public static final int CAPTION_EMIT_TYPE_BUFFER = 1;
+ public static final int CAPTION_EMIT_TYPE_CONTROL = 2;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_CWX = 3;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_CLW = 4;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_DSW = 5;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_HDW = 6;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_TGW = 7;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_DLW = 8;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_DLY = 9;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_DLC = 10;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_RST = 11;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_SPA = 12;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_SPC = 13;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_SPL = 14;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_SWA = 15;
+ public static final int CAPTION_EMIT_TYPE_COMMAND_DFX = 16;
+
+ Cea708CCParser(DisplayListener listener) {
+ if (listener != null) {
+ mListener = listener;
+ }
+ }
+
+ interface DisplayListener {
+ void emitEvent(CaptionEvent event);
+ }
+
+ private void emitCaptionEvent(CaptionEvent captionEvent) {
+ // Emit the existing string buffer before a new event is arrived.
+ emitCaptionBuffer();
+ mListener.emitEvent(captionEvent);
+ }
+
+ private void emitCaptionBuffer() {
+ if (mBuffer.length() > 0) {
+ mListener.emitEvent(new CaptionEvent(CAPTION_EMIT_TYPE_BUFFER, mBuffer.toString()));
+ mBuffer.setLength(0);
+ }
+ }
+
+ // Step 3. DTVCC packet -> Service Blocks (parseDtvCcPacket method)
+ public void parse(byte[] data) {
+ // From this point, starts to read DTVCC coding layer.
+ // First, identify code groups, which is defined in CEA-708B Section 7.1.
+ int pos = 0;
+ while (pos < data.length) {
+ pos = parseServiceBlockData(data, pos);
+ }
+
+ // Emit the buffer after reading codes.
+ emitCaptionBuffer();
+ }
+
+ // Step 4. Main code groups
+ private int parseServiceBlockData(byte[] data, int pos) {
+ // For the details of the ranges of DTVCC code groups, see CEA-708B Table 6.
+ mCommand = data[pos] & 0xff;
+ ++pos;
+ if (mCommand == Const.CODE_C0_EXT1) {
+ if (DEBUG) {
+ Log.d(TAG, String.format("parseServiceBlockData EXT1 %x", mCommand));
+ }
+ pos = parseExt1(data, pos);
+ } else if (mCommand >= Const.CODE_C0_RANGE_START
+ && mCommand <= Const.CODE_C0_RANGE_END) {
+ if (DEBUG) {
+ Log.d(TAG, String.format("parseServiceBlockData C0 %x", mCommand));
+ }
+ pos = parseC0(data, pos);
+ } else if (mCommand >= Const.CODE_C1_RANGE_START
+ && mCommand <= Const.CODE_C1_RANGE_END) {
+ if (DEBUG) {
+ Log.d(TAG, String.format("parseServiceBlockData C1 %x", mCommand));
+ }
+ pos = parseC1(data, pos);
+ } else if (mCommand >= Const.CODE_G0_RANGE_START
+ && mCommand <= Const.CODE_G0_RANGE_END) {
+ if (DEBUG) {
+ Log.d(TAG, String.format("parseServiceBlockData G0 %x", mCommand));
+ }
+ pos = parseG0(data, pos);
+ } else if (mCommand >= Const.CODE_G1_RANGE_START
+ && mCommand <= Const.CODE_G1_RANGE_END) {
+ if (DEBUG) {
+ Log.d(TAG, String.format("parseServiceBlockData G1 %x", mCommand));
+ }
+ pos = parseG1(data, pos);
+ }
+ return pos;
+ }
+
+ private int parseC0(byte[] data, int pos) {
+ // For the details of C0 code group, see CEA-708B Section 7.4.1.
+ // CL Group: C0 Subset of ASCII Control codes
+ if (mCommand >= Const.CODE_C0_SKIP2_RANGE_START
+ && mCommand <= Const.CODE_C0_SKIP2_RANGE_END) {
+ if (mCommand == Const.CODE_C0_P16) {
+ // P16 escapes next two bytes for the large character maps.(no standard rule)
+ // For Korea broadcasting, express whole letters by using this.
+ try {
+ if (data[pos] == 0) {
+ mBuffer.append((char) data[pos + 1]);
+ } else {
+ String value = new String(Arrays.copyOfRange(data, pos, pos + 2), "EUC-KR");
+ mBuffer.append(value);
+ }
+ } catch (UnsupportedEncodingException e) {
+ Log.e(TAG, "P16 Code - Could not find supported encoding", e);
+ }
+ }
+ pos += 2;
+ } else if (mCommand >= Const.CODE_C0_SKIP1_RANGE_START
+ && mCommand <= Const.CODE_C0_SKIP1_RANGE_END) {
+ ++pos;
+ } else {
+ // NUL, BS, FF, CR interpreted as they are in ASCII control codes.
+ // HCR moves the pen location to th beginning of the current line and deletes contents.
+ // FF clears the screen and moves the pen location to (0,0).
+ // ETX is the NULL command which is used to flush text to the current window when no
+ // other command is pending.
+ switch (mCommand) {
+ case Const.CODE_C0_NUL:
+ break;
+ case Const.CODE_C0_ETX:
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_CONTROL, (char) mCommand));
+ break;
+ case Const.CODE_C0_BS:
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_CONTROL, (char) mCommand));
+ break;
+ case Const.CODE_C0_FF:
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_CONTROL, (char) mCommand));
+ break;
+ case Const.CODE_C0_CR:
+ mBuffer.append('\n');
+ break;
+ case Const.CODE_C0_HCR:
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_CONTROL, (char) mCommand));
+ break;
+ default:
+ break;
+ }
+ }
+ return pos;
+ }
+
+ private int parseC1(byte[] data, int pos) {
+ // For the details of C1 code group, see CEA-708B Section 8.10.
+ // CR Group: C1 Caption Control Codes
+ switch (mCommand) {
+ case Const.CODE_C1_CW0:
+ case Const.CODE_C1_CW1:
+ case Const.CODE_C1_CW2:
+ case Const.CODE_C1_CW3:
+ case Const.CODE_C1_CW4:
+ case Const.CODE_C1_CW5:
+ case Const.CODE_C1_CW6:
+ case Const.CODE_C1_CW7: {
+ // SetCurrentWindow0-7
+ int windowId = mCommand - Const.CODE_C1_CW0;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_CWX, windowId));
+ if (DEBUG) {
+ Log.d(TAG, String.format("CaptionCommand CWX windowId: %d", windowId));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_CLW: {
+ // ClearWindows
+ int windowBitmap = data[pos] & 0xff;
+ ++pos;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_CLW, windowBitmap));
+ if (DEBUG) {
+ Log.d(TAG, String.format("CaptionCommand CLW windowBitmap: %d", windowBitmap));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_DSW: {
+ // DisplayWindows
+ int windowBitmap = data[pos] & 0xff;
+ ++pos;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_DSW, windowBitmap));
+ if (DEBUG) {
+ Log.d(TAG, String.format("CaptionCommand DSW windowBitmap: %d", windowBitmap));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_HDW: {
+ // HideWindows
+ int windowBitmap = data[pos] & 0xff;
+ ++pos;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_HDW, windowBitmap));
+ if (DEBUG) {
+ Log.d(TAG, String.format("CaptionCommand HDW windowBitmap: %d", windowBitmap));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_TGW: {
+ // ToggleWindows
+ int windowBitmap = data[pos] & 0xff;
+ ++pos;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_TGW, windowBitmap));
+ if (DEBUG) {
+ Log.d(TAG, String.format("CaptionCommand TGW windowBitmap: %d", windowBitmap));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_DLW: {
+ // DeleteWindows
+ int windowBitmap = data[pos] & 0xff;
+ ++pos;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_DLW, windowBitmap));
+ if (DEBUG) {
+ Log.d(TAG, String.format("CaptionCommand DLW windowBitmap: %d", windowBitmap));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_DLY: {
+ // Delay
+ int tenthsOfSeconds = data[pos] & 0xff;
+ ++pos;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_DLY, tenthsOfSeconds));
+ if (DEBUG) {
+ Log.d(TAG, String.format("CaptionCommand DLY %d tenths of seconds",
+ tenthsOfSeconds));
+ }
+ break;
+ }
+ case Const.CODE_C1_DLC: {
+ // DelayCancel
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_DLC, null));
+ if (DEBUG) {
+ Log.d(TAG, "CaptionCommand DLC");
+ }
+ break;
+ }
+
+ case Const.CODE_C1_RST: {
+ // Reset
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_RST, null));
+ if (DEBUG) {
+ Log.d(TAG, "CaptionCommand RST");
+ }
+ break;
+ }
+
+ case Const.CODE_C1_SPA: {
+ // SetPenAttributes
+ int textTag = (data[pos] & 0xf0) >> 4;
+ int penSize = data[pos] & 0x03;
+ int penOffset = (data[pos] & 0x0c) >> 2;
+ boolean italic = (data[pos + 1] & 0x80) != 0;
+ boolean underline = (data[pos + 1] & 0x40) != 0;
+ int edgeType = (data[pos + 1] & 0x38) >> 3;
+ int fontTag = data[pos + 1] & 0x7;
+ pos += 2;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_SPA,
+ new CaptionPenAttr(penSize, penOffset, textTag, fontTag, edgeType,
+ underline, italic)));
+ if (DEBUG) {
+ Log.d(TAG, String.format(
+ "CaptionCommand SPA penSize: %d, penOffset: %d, textTag: %d, "
+ + "fontTag: %d, edgeType: %d, underline: %s, italic: %s",
+ penSize, penOffset, textTag, fontTag, edgeType, underline, italic));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_SPC: {
+ // SetPenColor
+ int opacity = (data[pos] & 0xc0) >> 6;
+ int red = (data[pos] & 0x30) >> 4;
+ int green = (data[pos] & 0x0c) >> 2;
+ int blue = data[pos] & 0x03;
+ CaptionColor foregroundColor = new CaptionColor(opacity, red, green, blue);
+ ++pos;
+ opacity = (data[pos] & 0xc0) >> 6;
+ red = (data[pos] & 0x30) >> 4;
+ green = (data[pos] & 0x0c) >> 2;
+ blue = data[pos] & 0x03;
+ CaptionColor backgroundColor = new CaptionColor(opacity, red, green, blue);
+ ++pos;
+ red = (data[pos] & 0x30) >> 4;
+ green = (data[pos] & 0x0c) >> 2;
+ blue = data[pos] & 0x03;
+ CaptionColor edgeColor = new CaptionColor(
+ CaptionColor.OPACITY_SOLID, red, green, blue);
+ ++pos;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_SPC,
+ new CaptionPenColor(foregroundColor, backgroundColor, edgeColor)));
+ if (DEBUG) {
+ Log.d(TAG, String.format(
+ "CaptionCommand SPC foregroundColor %s backgroundColor %s edgeColor %s",
+ foregroundColor, backgroundColor, edgeColor));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_SPL: {
+ // SetPenLocation
+ // column is normally 0-31 for 4:3 formats, and 0-41 for 16:9 formats
+ int row = data[pos] & 0x0f;
+ int column = data[pos + 1] & 0x3f;
+ pos += 2;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_SPL,
+ new CaptionPenLocation(row, column)));
+ if (DEBUG) {
+ Log.d(TAG, String.format("CaptionCommand SPL row: %d, column: %d",
+ row, column));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_SWA: {
+ // SetWindowAttributes
+ int opacity = (data[pos] & 0xc0) >> 6;
+ int red = (data[pos] & 0x30) >> 4;
+ int green = (data[pos] & 0x0c) >> 2;
+ int blue = data[pos] & 0x03;
+ CaptionColor fillColor = new CaptionColor(opacity, red, green, blue);
+ int borderType = (data[pos + 1] & 0xc0) >> 6 | (data[pos + 2] & 0x80) >> 5;
+ red = (data[pos + 1] & 0x30) >> 4;
+ green = (data[pos + 1] & 0x0c) >> 2;
+ blue = data[pos + 1] & 0x03;
+ CaptionColor borderColor = new CaptionColor(
+ CaptionColor.OPACITY_SOLID, red, green, blue);
+ boolean wordWrap = (data[pos + 2] & 0x40) != 0;
+ int printDirection = (data[pos + 2] & 0x30) >> 4;
+ int scrollDirection = (data[pos + 2] & 0x0c) >> 2;
+ int justify = (data[pos + 2] & 0x03);
+ int effectSpeed = (data[pos + 3] & 0xf0) >> 4;
+ int effectDirection = (data[pos + 3] & 0x0c) >> 2;
+ int displayEffect = data[pos + 3] & 0x3;
+ pos += 4;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_SWA,
+ new CaptionWindowAttr(fillColor, borderColor, borderType, wordWrap,
+ printDirection, scrollDirection, justify,
+ effectDirection, effectSpeed, displayEffect)));
+ if (DEBUG) {
+ Log.d(TAG, String.format(
+ "CaptionCommand SWA fillColor: %s, borderColor: %s, borderType: %d"
+ + "wordWrap: %s, printDirection: %d, scrollDirection: %d, "
+ + "justify: %s, effectDirection: %d, effectSpeed: %d, "
+ + "displayEffect: %d",
+ fillColor, borderColor, borderType, wordWrap, printDirection,
+ scrollDirection, justify, effectDirection, effectSpeed, displayEffect));
+ }
+ break;
+ }
+
+ case Const.CODE_C1_DF0:
+ case Const.CODE_C1_DF1:
+ case Const.CODE_C1_DF2:
+ case Const.CODE_C1_DF3:
+ case Const.CODE_C1_DF4:
+ case Const.CODE_C1_DF5:
+ case Const.CODE_C1_DF6:
+ case Const.CODE_C1_DF7: {
+ // DefineWindow0-7
+ int windowId = mCommand - Const.CODE_C1_DF0;
+ boolean visible = (data[pos] & 0x20) != 0;
+ boolean rowLock = (data[pos] & 0x10) != 0;
+ boolean columnLock = (data[pos] & 0x08) != 0;
+ int priority = data[pos] & 0x07;
+ boolean relativePositioning = (data[pos + 1] & 0x80) != 0;
+ int anchorVertical = data[pos + 1] & 0x7f;
+ int anchorHorizontal = data[pos + 2] & 0xff;
+ int anchorId = (data[pos + 3] & 0xf0) >> 4;
+ int rowCount = data[pos + 3] & 0x0f;
+ int columnCount = data[pos + 4] & 0x3f;
+ int windowStyle = (data[pos + 5] & 0x38) >> 3;
+ int penStyle = data[pos + 5] & 0x07;
+ pos += 6;
+ emitCaptionEvent(new CaptionEvent(CAPTION_EMIT_TYPE_COMMAND_DFX,
+ new CaptionWindow(windowId, visible, rowLock, columnLock, priority,
+ relativePositioning, anchorVertical, anchorHorizontal, anchorId,
+ rowCount, columnCount, penStyle, windowStyle)));
+ if (DEBUG) {
+ Log.d(TAG, String.format(
+ "CaptionCommand DFx windowId: %d, priority: %d, columnLock: %s, "
+ + "rowLock: %s, visible: %s, anchorVertical: %d, "
+ + "relativePositioning: %s, anchorHorizontal: %d, "
+ + "rowCount: %d, anchorId: %d, columnCount: %d, penStyle: %d, "
+ + "windowStyle: %d",
+ windowId, priority, columnLock, rowLock, visible, anchorVertical,
+ relativePositioning, anchorHorizontal, rowCount, anchorId, columnCount,
+ penStyle, windowStyle));
+ }
+ break;
+ }
+
+ default:
+ break;
+ }
+ return pos;
+ }
+
+ private int parseG0(byte[] data, int pos) {
+ // For the details of G0 code group, see CEA-708B Section 7.4.3.
+ // GL Group: G0 Modified version of ANSI X3.4 Printable Character Set (ASCII)
+ if (mCommand == Const.CODE_G0_MUSICNOTE) {
+ // Music note.
+ mBuffer.append(MUSIC_NOTE_CHAR);
+ } else {
+ // Put ASCII code into buffer.
+ mBuffer.append((char) mCommand);
+ }
+ return pos;
+ }
+
+ private int parseG1(byte[] data, int pos) {
+ // For the details of G0 code group, see CEA-708B Section 7.4.4.
+ // GR Group: G1 ISO 8859-1 Latin 1 Characters
+ // Put ASCII Extended character set into buffer.
+ mBuffer.append((char) mCommand);
+ return pos;
+ }
+
+ // Step 4. Extended code groups
+ private int parseExt1(byte[] data, int pos) {
+ // For the details of EXT1 code group, see CEA-708B Section 7.2.
+ mCommand = data[pos] & 0xff;
+ ++pos;
+ if (mCommand >= Const.CODE_C2_RANGE_START
+ && mCommand <= Const.CODE_C2_RANGE_END) {
+ pos = parseC2(data, pos);
+ } else if (mCommand >= Const.CODE_C3_RANGE_START
+ && mCommand <= Const.CODE_C3_RANGE_END) {
+ pos = parseC3(data, pos);
+ } else if (mCommand >= Const.CODE_G2_RANGE_START
+ && mCommand <= Const.CODE_G2_RANGE_END) {
+ pos = parseG2(data, pos);
+ } else if (mCommand >= Const.CODE_G3_RANGE_START
+ && mCommand <= Const.CODE_G3_RANGE_END) {
+ pos = parseG3(data ,pos);
+ }
+ return pos;
+ }
+
+ private int parseC2(byte[] data, int pos) {
+ // For the details of C2 code group, see CEA-708B Section 7.4.7.
+ // Extended Miscellaneous Control Codes
+ // C2 Table : No commands as of CEA-708B. A decoder must skip.
+ if (mCommand >= Const.CODE_C2_SKIP0_RANGE_START
+ && mCommand <= Const.CODE_C2_SKIP0_RANGE_END) {
+ // Do nothing.
+ } else if (mCommand >= Const.CODE_C2_SKIP1_RANGE_START
+ && mCommand <= Const.CODE_C2_SKIP1_RANGE_END) {
+ ++pos;
+ } else if (mCommand >= Const.CODE_C2_SKIP2_RANGE_START
+ && mCommand <= Const.CODE_C2_SKIP2_RANGE_END) {
+ pos += 2;
+ } else if (mCommand >= Const.CODE_C2_SKIP3_RANGE_START
+ && mCommand <= Const.CODE_C2_SKIP3_RANGE_END) {
+ pos += 3;
+ }
+ return pos;
+ }
+
+ private int parseC3(byte[] data, int pos) {
+ // For the details of C3 code group, see CEA-708B Section 7.4.8.
+ // Extended Control Code Set 2
+ // C3 Table : No commands as of CEA-708B. A decoder must skip.
+ if (mCommand >= Const.CODE_C3_SKIP4_RANGE_START
+ && mCommand <= Const.CODE_C3_SKIP4_RANGE_END) {
+ pos += 4;
+ } else if (mCommand >= Const.CODE_C3_SKIP5_RANGE_START
+ && mCommand <= Const.CODE_C3_SKIP5_RANGE_END) {
+ pos += 5;
+ }
+ return pos;
+ }
+
+ private int parseG2(byte[] data, int pos) {
+ // For the details of C3 code group, see CEA-708B Section 7.4.5.
+ // Extended Control Code Set 1(G2 Table)
+ switch (mCommand) {
+ case Const.CODE_G2_TSP:
+ // TODO : TSP is the Transparent space
+ break;
+ case Const.CODE_G2_NBTSP:
+ // TODO : NBTSP is Non-Breaking Transparent Space.
+ break;
+ case Const.CODE_G2_BLK:
+ // TODO : BLK indicates a solid block which fills the entire character block
+ // TODO : with a solid foreground color.
+ break;
+ default:
+ break;
+ }
+ return pos;
+ }
+
+ private int parseG3(byte[] data, int pos) {
+ // For the details of C3 code group, see CEA-708B Section 7.4.6.
+ // Future characters and icons(G3 Table)
+ if (mCommand == Const.CODE_G3_CC) {
+ // TODO : [CC] icon with square corners
+ }
+
+ // Do nothing
+ return pos;
+ }
+
+ /**
+ * @hide
+ *
+ * Collection of CEA-708 structures.
+ */
+ private static class Const {
+
+ private Const() {
+ }
+
+ // For the details of the ranges of DTVCC code groups, see CEA-708B Table 6.
+ public static final int CODE_C0_RANGE_START = 0x00;
+ public static final int CODE_C0_RANGE_END = 0x1f;
+ public static final int CODE_C1_RANGE_START = 0x80;
+ public static final int CODE_C1_RANGE_END = 0x9f;
+ public static final int CODE_G0_RANGE_START = 0x20;
+ public static final int CODE_G0_RANGE_END = 0x7f;
+ public static final int CODE_G1_RANGE_START = 0xa0;
+ public static final int CODE_G1_RANGE_END = 0xff;
+ public static final int CODE_C2_RANGE_START = 0x00;
+ public static final int CODE_C2_RANGE_END = 0x1f;
+ public static final int CODE_C3_RANGE_START = 0x80;
+ public static final int CODE_C3_RANGE_END = 0x9f;
+ public static final int CODE_G2_RANGE_START = 0x20;
+ public static final int CODE_G2_RANGE_END = 0x7f;
+ public static final int CODE_G3_RANGE_START = 0xa0;
+ public static final int CODE_G3_RANGE_END = 0xff;
+
+ // The following ranges are defined in CEA-708B Section 7.4.1.
+ public static final int CODE_C0_SKIP2_RANGE_START = 0x18;
+ public static final int CODE_C0_SKIP2_RANGE_END = 0x1f;
+ public static final int CODE_C0_SKIP1_RANGE_START = 0x10;
+ public static final int CODE_C0_SKIP1_RANGE_END = 0x17;
+
+ // The following ranges are defined in CEA-708B Section 7.4.7.
+ public static final int CODE_C2_SKIP0_RANGE_START = 0x00;
+ public static final int CODE_C2_SKIP0_RANGE_END = 0x07;
+ public static final int CODE_C2_SKIP1_RANGE_START = 0x08;
+ public static final int CODE_C2_SKIP1_RANGE_END = 0x0f;
+ public static final int CODE_C2_SKIP2_RANGE_START = 0x10;
+ public static final int CODE_C2_SKIP2_RANGE_END = 0x17;
+ public static final int CODE_C2_SKIP3_RANGE_START = 0x18;
+ public static final int CODE_C2_SKIP3_RANGE_END = 0x1f;
+
+ // The following ranges are defined in CEA-708B Section 7.4.8.
+ public static final int CODE_C3_SKIP4_RANGE_START = 0x80;
+ public static final int CODE_C3_SKIP4_RANGE_END = 0x87;
+ public static final int CODE_C3_SKIP5_RANGE_START = 0x88;
+ public static final int CODE_C3_SKIP5_RANGE_END = 0x8f;
+
+ // The following values are the special characters of CEA-708 spec.
+ public static final int CODE_C0_NUL = 0x00;
+ public static final int CODE_C0_ETX = 0x03;
+ public static final int CODE_C0_BS = 0x08;
+ public static final int CODE_C0_FF = 0x0c;
+ public static final int CODE_C0_CR = 0x0d;
+ public static final int CODE_C0_HCR = 0x0e;
+ public static final int CODE_C0_EXT1 = 0x10;
+ public static final int CODE_C0_P16 = 0x18;
+ public static final int CODE_G0_MUSICNOTE = 0x7f;
+ public static final int CODE_G2_TSP = 0x20;
+ public static final int CODE_G2_NBTSP = 0x21;
+ public static final int CODE_G2_BLK = 0x30;
+ public static final int CODE_G3_CC = 0xa0;
+
+ // The following values are the command bits of CEA-708 spec.
+ public static final int CODE_C1_CW0 = 0x80;
+ public static final int CODE_C1_CW1 = 0x81;
+ public static final int CODE_C1_CW2 = 0x82;
+ public static final int CODE_C1_CW3 = 0x83;
+ public static final int CODE_C1_CW4 = 0x84;
+ public static final int CODE_C1_CW5 = 0x85;
+ public static final int CODE_C1_CW6 = 0x86;
+ public static final int CODE_C1_CW7 = 0x87;
+ public static final int CODE_C1_CLW = 0x88;
+ public static final int CODE_C1_DSW = 0x89;
+ public static final int CODE_C1_HDW = 0x8a;
+ public static final int CODE_C1_TGW = 0x8b;
+ public static final int CODE_C1_DLW = 0x8c;
+ public static final int CODE_C1_DLY = 0x8d;
+ public static final int CODE_C1_DLC = 0x8e;
+ public static final int CODE_C1_RST = 0x8f;
+ public static final int CODE_C1_SPA = 0x90;
+ public static final int CODE_C1_SPC = 0x91;
+ public static final int CODE_C1_SPL = 0x92;
+ public static final int CODE_C1_SWA = 0x97;
+ public static final int CODE_C1_DF0 = 0x98;
+ public static final int CODE_C1_DF1 = 0x99;
+ public static final int CODE_C1_DF2 = 0x9a;
+ public static final int CODE_C1_DF3 = 0x9b;
+ public static final int CODE_C1_DF4 = 0x9c;
+ public static final int CODE_C1_DF5 = 0x9d;
+ public static final int CODE_C1_DF6 = 0x9e;
+ public static final int CODE_C1_DF7 = 0x9f;
+ }
+
+ /**
+ * @hide
+ *
+ * CEA-708B-specific color.
+ */
+ public static class CaptionColor {
+ public static final int OPACITY_SOLID = 0;
+ public static final int OPACITY_FLASH = 1;
+ public static final int OPACITY_TRANSLUCENT = 2;
+ public static final int OPACITY_TRANSPARENT = 3;
+
+ private static final int[] COLOR_MAP = new int[] { 0x00, 0x0f, 0xf0, 0xff };
+ private static final int[] OPACITY_MAP = new int[] { 0xff, 0xfe, 0x80, 0x00 };
+
+ public final int opacity;
+ public final int red;
+ public final int green;
+ public final int blue;
+
+ public CaptionColor(int opacity, int red, int green, int blue) {
+ this.opacity = opacity;
+ this.red = red;
+ this.green = green;
+ this.blue = blue;
+ }
+
+ public int getArgbValue() {
+ return Color.argb(
+ OPACITY_MAP[opacity], COLOR_MAP[red], COLOR_MAP[green], COLOR_MAP[blue]);
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * Caption event generated by {@link Cea708CCParser}.
+ */
+ public static class CaptionEvent {
+ public final int type;
+ public final Object obj;
+
+ public CaptionEvent(int type, Object obj) {
+ this.type = type;
+ this.obj = obj;
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * Pen style information.
+ */
+ public static class CaptionPenAttr {
+ // Pen sizes
+ public static final int PEN_SIZE_SMALL = 0;
+ public static final int PEN_SIZE_STANDARD = 1;
+ public static final int PEN_SIZE_LARGE = 2;
+
+ // Offsets
+ public static final int OFFSET_SUBSCRIPT = 0;
+ public static final int OFFSET_NORMAL = 1;
+ public static final int OFFSET_SUPERSCRIPT = 2;
+
+ public final int penSize;
+ public final int penOffset;
+ public final int textTag;
+ public final int fontTag;
+ public final int edgeType;
+ public final boolean underline;
+ public final boolean italic;
+
+ public CaptionPenAttr(int penSize, int penOffset, int textTag, int fontTag, int edgeType,
+ boolean underline, boolean italic) {
+ this.penSize = penSize;
+ this.penOffset = penOffset;
+ this.textTag = textTag;
+ this.fontTag = fontTag;
+ this.edgeType = edgeType;
+ this.underline = underline;
+ this.italic = italic;
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * {@link CaptionColor} objects that indicate the foreground, background, and edge color of a
+ * pen.
+ */
+ public static class CaptionPenColor {
+ public final CaptionColor foregroundColor;
+ public final CaptionColor backgroundColor;
+ public final CaptionColor edgeColor;
+
+ public CaptionPenColor(CaptionColor foregroundColor, CaptionColor backgroundColor,
+ CaptionColor edgeColor) {
+ this.foregroundColor = foregroundColor;
+ this.backgroundColor = backgroundColor;
+ this.edgeColor = edgeColor;
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * Location information of a pen.
+ */
+ public static class CaptionPenLocation {
+ public final int row;
+ public final int column;
+
+ public CaptionPenLocation(int row, int column) {
+ this.row = row;
+ this.column = column;
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * Attributes of a caption window, which is defined in CEA-708B.
+ */
+ public static class CaptionWindowAttr {
+ public final CaptionColor fillColor;
+ public final CaptionColor borderColor;
+ public final int borderType;
+ public final boolean wordWrap;
+ public final int printDirection;
+ public final int scrollDirection;
+ public final int justify;
+ public final int effectDirection;
+ public final int effectSpeed;
+ public final int displayEffect;
+
+ public CaptionWindowAttr(CaptionColor fillColor, CaptionColor borderColor, int borderType,
+ boolean wordWrap, int printDirection, int scrollDirection, int justify,
+ int effectDirection,
+ int effectSpeed, int displayEffect) {
+ this.fillColor = fillColor;
+ this.borderColor = borderColor;
+ this.borderType = borderType;
+ this.wordWrap = wordWrap;
+ this.printDirection = printDirection;
+ this.scrollDirection = scrollDirection;
+ this.justify = justify;
+ this.effectDirection = effectDirection;
+ this.effectSpeed = effectSpeed;
+ this.displayEffect = displayEffect;
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * Construction information of the caption window of CEA-708B.
+ */
+ public static class CaptionWindow {
+ public final int id;
+ public final boolean visible;
+ public final boolean rowLock;
+ public final boolean columnLock;
+ public final int priority;
+ public final boolean relativePositioning;
+ public final int anchorVertical;
+ public final int anchorHorizontal;
+ public final int anchorId;
+ public final int rowCount;
+ public final int columnCount;
+ public final int penStyle;
+ public final int windowStyle;
+
+ public CaptionWindow(int id, boolean visible,
+ boolean rowLock, boolean columnLock, int priority, boolean relativePositioning,
+ int anchorVertical, int anchorHorizontal, int anchorId,
+ int rowCount, int columnCount, int penStyle, int windowStyle) {
+ this.id = id;
+ this.visible = visible;
+ this.rowLock = rowLock;
+ this.columnLock = columnLock;
+ this.priority = priority;
+ this.relativePositioning = relativePositioning;
+ this.anchorVertical = anchorVertical;
+ this.anchorHorizontal = anchorHorizontal;
+ this.anchorId = anchorId;
+ this.rowCount = rowCount;
+ this.columnCount = columnCount;
+ this.penStyle = penStyle;
+ this.windowStyle = windowStyle;
+ }
+ }
+}
+
+/**
+ * Widget capable of rendering CEA-708 closed captions.
+ *
+ * @hide
+ */
+class Cea708CCWidget extends ClosedCaptionWidget implements Cea708CCParser.DisplayListener {
+ private final CCHandler mCCHandler;
+
+ public Cea708CCWidget(Context context) {
+ this(context, null);
+ }
+
+ public Cea708CCWidget(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public Cea708CCWidget(Context context, AttributeSet attrs, int defStyleAttr) {
+ this(context, attrs, defStyleAttr, 0);
+ }
+
+ public Cea708CCWidget(Context context, AttributeSet attrs, int defStyleAttr,
+ int defStyleRes) {
+ super(context, attrs, defStyleAttr, defStyleRes);
+
+ mCCHandler = new CCHandler((CCLayout) mClosedCaptionLayout);
+ }
+
+ @Override
+ public ClosedCaptionLayout createCaptionLayout(Context context) {
+ return new CCLayout(context);
+ }
+
+ @Override
+ public void emitEvent(Cea708CCParser.CaptionEvent event) {
+ mCCHandler.processCaptionEvent(event);
+
+ setSize(getWidth(), getHeight());
+
+ if (mListener != null) {
+ mListener.onChanged(this);
+ }
+ }
+
+ @Override
+ public void onDraw(Canvas canvas) {
+ super.onDraw(canvas);
+ ((ViewGroup) mClosedCaptionLayout).draw(canvas);
+ }
+
+ /**
+ * @hide
+ *
+ * A layout that scales its children using the given percentage value.
+ */
+ static class ScaledLayout extends ViewGroup {
+ private static final String TAG = "ScaledLayout";
+ private static final boolean DEBUG = false;
+ private static final Comparator<Rect> mRectTopLeftSorter = new Comparator<Rect>() {
+ @Override
+ public int compare(Rect lhs, Rect rhs) {
+ if (lhs.top != rhs.top) {
+ return lhs.top - rhs.top;
+ } else {
+ return lhs.left - rhs.left;
+ }
+ }
+ };
+
+ private Rect[] mRectArray;
+
+ public ScaledLayout(Context context) {
+ super(context);
+ }
+
+ /**
+ * @hide
+ *
+ * ScaledLayoutParams stores the four scale factors.
+ * <br>
+ * Vertical coordinate system: (scaleStartRow * 100) % ~ (scaleEndRow * 100) %
+ * Horizontal coordinate system: (scaleStartCol * 100) % ~ (scaleEndCol * 100) %
+ * <br>
+ * In XML, for example,
+ * <pre>
+ * {@code
+ * <View
+ * app:layout_scaleStartRow="0.1"
+ * app:layout_scaleEndRow="0.5"
+ * app:layout_scaleStartCol="0.4"
+ * app:layout_scaleEndCol="1" />
+ * }
+ * </pre>
+ */
+ static class ScaledLayoutParams extends ViewGroup.LayoutParams {
+ public static final float SCALE_UNSPECIFIED = -1;
+ public float scaleStartRow;
+ public float scaleEndRow;
+ public float scaleStartCol;
+ public float scaleEndCol;
+
+ public ScaledLayoutParams(float scaleStartRow, float scaleEndRow,
+ float scaleStartCol, float scaleEndCol) {
+ super(MATCH_PARENT, MATCH_PARENT);
+ this.scaleStartRow = scaleStartRow;
+ this.scaleEndRow = scaleEndRow;
+ this.scaleStartCol = scaleStartCol;
+ this.scaleEndCol = scaleEndCol;
+ }
+
+ public ScaledLayoutParams(Context context, AttributeSet attrs) {
+ super(MATCH_PARENT, MATCH_PARENT);
+ }
+ }
+
+ @Override
+ public LayoutParams generateLayoutParams(AttributeSet attrs) {
+ return new ScaledLayoutParams(getContext(), attrs);
+ }
+
+ @Override
+ protected boolean checkLayoutParams(LayoutParams p) {
+ return (p instanceof ScaledLayoutParams);
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ int widthSpecSize = MeasureSpec.getSize(widthMeasureSpec);
+ int heightSpecSize = MeasureSpec.getSize(heightMeasureSpec);
+ int width = widthSpecSize - getPaddingLeft() - getPaddingRight();
+ int height = heightSpecSize - getPaddingTop() - getPaddingBottom();
+ if (DEBUG) {
+ Log.d(TAG, String.format("onMeasure width: %d, height: %d", width, height));
+ }
+ int count = getChildCount();
+ mRectArray = new Rect[count];
+ for (int i = 0; i < count; ++i) {
+ View child = getChildAt(i);
+ ViewGroup.LayoutParams params = child.getLayoutParams();
+ float scaleStartRow, scaleEndRow, scaleStartCol, scaleEndCol;
+ if (!(params instanceof ScaledLayoutParams)) {
+ throw new RuntimeException(
+ "A child of ScaledLayout cannot have the UNSPECIFIED scale factors");
+ }
+ scaleStartRow = ((ScaledLayoutParams) params).scaleStartRow;
+ scaleEndRow = ((ScaledLayoutParams) params).scaleEndRow;
+ scaleStartCol = ((ScaledLayoutParams) params).scaleStartCol;
+ scaleEndCol = ((ScaledLayoutParams) params).scaleEndCol;
+ if (scaleStartRow < 0 || scaleStartRow > 1) {
+ throw new RuntimeException("A child of ScaledLayout should have a range of "
+ + "scaleStartRow between 0 and 1");
+ }
+ if (scaleEndRow < scaleStartRow || scaleStartRow > 1) {
+ throw new RuntimeException("A child of ScaledLayout should have a range of "
+ + "scaleEndRow between scaleStartRow and 1");
+ }
+ if (scaleEndCol < 0 || scaleEndCol > 1) {
+ throw new RuntimeException("A child of ScaledLayout should have a range of "
+ + "scaleStartCol between 0 and 1");
+ }
+ if (scaleEndCol < scaleStartCol || scaleEndCol > 1) {
+ throw new RuntimeException("A child of ScaledLayout should have a range of "
+ + "scaleEndCol between scaleStartCol and 1");
+ }
+ if (DEBUG) {
+ Log.d(TAG, String.format("onMeasure child scaleStartRow: %f scaleEndRow: %f "
+ + "scaleStartCol: %f scaleEndCol: %f",
+ scaleStartRow, scaleEndRow, scaleStartCol, scaleEndCol));
+ }
+ mRectArray[i] = new Rect((int) (scaleStartCol * width), (int) (scaleStartRow
+ * height), (int) (scaleEndCol * width), (int) (scaleEndRow * height));
+ int childWidthSpec = MeasureSpec.makeMeasureSpec(
+ (int) (width * (scaleEndCol - scaleStartCol)), MeasureSpec.EXACTLY);
+ int childHeightSpec = MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED);
+ child.measure(childWidthSpec, childHeightSpec);
+
+ // If the height of the measured child view is bigger than the height of the
+ // calculated region by the given ScaleLayoutParams, the height of the region should
+ // be increased to fit the size of the child view.
+ if (child.getMeasuredHeight() > mRectArray[i].height()) {
+ int overflowedHeight = child.getMeasuredHeight() - mRectArray[i].height();
+ overflowedHeight = (overflowedHeight + 1) / 2;
+ mRectArray[i].bottom += overflowedHeight;
+ mRectArray[i].top -= overflowedHeight;
+ if (mRectArray[i].top < 0) {
+ mRectArray[i].bottom -= mRectArray[i].top;
+ mRectArray[i].top = 0;
+ }
+ if (mRectArray[i].bottom > height) {
+ mRectArray[i].top -= mRectArray[i].bottom - height;
+ mRectArray[i].bottom = height;
+ }
+ }
+ childHeightSpec = MeasureSpec.makeMeasureSpec(
+ (int) (height * (scaleEndRow - scaleStartRow)), MeasureSpec.EXACTLY);
+ child.measure(childWidthSpec, childHeightSpec);
+ }
+
+ // Avoid overlapping rectangles.
+ // Step 1. Sort rectangles by position (top-left).
+ int visibleRectCount = 0;
+ int[] visibleRectGroup = new int[count];
+ Rect[] visibleRectArray = new Rect[count];
+ for (int i = 0; i < count; ++i) {
+ if (getChildAt(i).getVisibility() == View.VISIBLE) {
+ visibleRectGroup[visibleRectCount] = visibleRectCount;
+ visibleRectArray[visibleRectCount] = mRectArray[i];
+ ++visibleRectCount;
+ }
+ }
+ Arrays.sort(visibleRectArray, 0, visibleRectCount, mRectTopLeftSorter);
+
+ // Step 2. Move down if there are overlapping rectangles.
+ for (int i = 0; i < visibleRectCount - 1; ++i) {
+ for (int j = i + 1; j < visibleRectCount; ++j) {
+ if (Rect.intersects(visibleRectArray[i], visibleRectArray[j])) {
+ visibleRectGroup[j] = visibleRectGroup[i];
+ visibleRectArray[j].set(visibleRectArray[j].left,
+ visibleRectArray[i].bottom,
+ visibleRectArray[j].right,
+ visibleRectArray[i].bottom + visibleRectArray[j].height());
+ }
+ }
+ }
+
+ // Step 3. Move up if there is any overflowed rectangle.
+ for (int i = visibleRectCount - 1; i >= 0; --i) {
+ if (visibleRectArray[i].bottom > height) {
+ int overflowedHeight = visibleRectArray[i].bottom - height;
+ for (int j = 0; j <= i; ++j) {
+ if (visibleRectGroup[i] == visibleRectGroup[j]) {
+ visibleRectArray[j].set(visibleRectArray[j].left,
+ visibleRectArray[j].top - overflowedHeight,
+ visibleRectArray[j].right,
+ visibleRectArray[j].bottom - overflowedHeight);
+ }
+ }
+ }
+ }
+ setMeasuredDimension(widthSpecSize, heightSpecSize);
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int l, int t, int r, int b) {
+ int paddingLeft = getPaddingLeft();
+ int paddingTop = getPaddingTop();
+ int count = getChildCount();
+ for (int i = 0; i < count; ++i) {
+ View child = getChildAt(i);
+ if (child.getVisibility() != GONE) {
+ int childLeft = paddingLeft + mRectArray[i].left;
+ int childTop = paddingTop + mRectArray[i].top;
+ int childBottom = paddingLeft + mRectArray[i].bottom;
+ int childRight = paddingTop + mRectArray[i].right;
+ if (DEBUG) {
+ Log.d(TAG, String.format(
+ "child layout bottom: %d left: %d right: %d top: %d",
+ childBottom, childLeft, childRight, childTop));
+ }
+ child.layout(childLeft, childTop, childRight, childBottom);
+ }
+ }
+ }
+
+ @Override
+ public void dispatchDraw(Canvas canvas) {
+ int paddingLeft = getPaddingLeft();
+ int paddingTop = getPaddingTop();
+ int count = getChildCount();
+ for (int i = 0; i < count; ++i) {
+ View child = getChildAt(i);
+ if (child.getVisibility() != GONE) {
+ if (i >= mRectArray.length) {
+ break;
+ }
+ int childLeft = paddingLeft + mRectArray[i].left;
+ int childTop = paddingTop + mRectArray[i].top;
+ final int saveCount = canvas.save();
+ canvas.translate(childLeft, childTop);
+ child.draw(canvas);
+ canvas.restoreToCount(saveCount);
+ }
+ }
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * Layout containing the safe title area that helps the closed captions look more prominent.
+ *
+ * <p>This is required by CEA-708B.
+ */
+ static class CCLayout extends ScaledLayout implements ClosedCaptionLayout {
+ private static final float SAFE_TITLE_AREA_SCALE_START_X = 0.1f;
+ private static final float SAFE_TITLE_AREA_SCALE_END_X = 0.9f;
+ private static final float SAFE_TITLE_AREA_SCALE_START_Y = 0.1f;
+ private static final float SAFE_TITLE_AREA_SCALE_END_Y = 0.9f;
+
+ private final ScaledLayout mSafeTitleAreaLayout;
+
+ public CCLayout(Context context) {
+ super(context);
+
+ mSafeTitleAreaLayout = new ScaledLayout(context);
+ addView(mSafeTitleAreaLayout, new ScaledLayout.ScaledLayoutParams(
+ SAFE_TITLE_AREA_SCALE_START_X, SAFE_TITLE_AREA_SCALE_END_X,
+ SAFE_TITLE_AREA_SCALE_START_Y, SAFE_TITLE_AREA_SCALE_END_Y));
+ }
+
+ public void addOrUpdateViewToSafeTitleArea(CCWindowLayout captionWindowLayout,
+ ScaledLayoutParams scaledLayoutParams) {
+ int index = mSafeTitleAreaLayout.indexOfChild(captionWindowLayout);
+ if (index < 0) {
+ mSafeTitleAreaLayout.addView(captionWindowLayout, scaledLayoutParams);
+ return;
+ }
+ mSafeTitleAreaLayout.updateViewLayout(captionWindowLayout, scaledLayoutParams);
+ }
+
+ public void removeViewFromSafeTitleArea(CCWindowLayout captionWindowLayout) {
+ mSafeTitleAreaLayout.removeView(captionWindowLayout);
+ }
+
+ public void setCaptionStyle(CaptionStyle style) {
+ final int count = mSafeTitleAreaLayout.getChildCount();
+ for (int i = 0; i < count; ++i) {
+ final CCWindowLayout windowLayout =
+ (CCWindowLayout) mSafeTitleAreaLayout.getChildAt(i);
+ windowLayout.setCaptionStyle(style);
+ }
+ }
+
+ public void setFontScale(float fontScale) {
+ final int count = mSafeTitleAreaLayout.getChildCount();
+ for (int i = 0; i < count; ++i) {
+ final CCWindowLayout windowLayout =
+ (CCWindowLayout) mSafeTitleAreaLayout.getChildAt(i);
+ windowLayout.setFontScale(fontScale);
+ }
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * Renders the selected CC track.
+ */
+ static class CCHandler implements Handler.Callback {
+ // TODO: Remaining works
+ // CaptionTrackRenderer does not support the full spec of CEA-708. The remaining works are
+ // described in the follows.
+ // C0 Table: Backspace, FF, and HCR are not supported. The rule for P16 is not standardized
+ // but it is handled as EUC-KR charset for Korea broadcasting.
+ // C1 Table: All the styles of windows and pens except underline, italic, pen size, and pen
+ // offset specified in CEA-708 are ignored and this follows system wide CC
+ // preferences for look and feel. SetPenLocation is not implemented.
+ // G2 Table: TSP, NBTSP and BLK are not supported.
+ // Text/commands: Word wrapping, fonts, row and column locking are not supported.
+
+ private static final String TAG = "CCHandler";
+ private static final boolean DEBUG = false;
+
+ private static final int TENTHS_OF_SECOND_IN_MILLIS = 100;
+
+ // According to CEA-708B, there can exist up to 8 caption windows.
+ private static final int CAPTION_WINDOWS_MAX = 8;
+ private static final int CAPTION_ALL_WINDOWS_BITMAP = 255;
+
+ private static final int MSG_DELAY_CANCEL = 1;
+ private static final int MSG_CAPTION_CLEAR = 2;
+
+ private static final long CAPTION_CLEAR_INTERVAL_MS = 60000;
+
+ private final CCLayout mCCLayout;
+ private boolean mIsDelayed = false;
+ private CCWindowLayout mCurrentWindowLayout;
+ private final CCWindowLayout[] mCaptionWindowLayouts =
+ new CCWindowLayout[CAPTION_WINDOWS_MAX];
+ private final ArrayList<Cea708CCParser.CaptionEvent> mPendingCaptionEvents
+ = new ArrayList<>();
+ private final Handler mHandler;
+
+ public CCHandler(CCLayout ccLayout) {
+ mCCLayout = ccLayout;
+ mHandler = new Handler(this);
+ }
+
+ @Override
+ public boolean handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_DELAY_CANCEL:
+ delayCancel();
+ return true;
+ case MSG_CAPTION_CLEAR:
+ clearWindows(CAPTION_ALL_WINDOWS_BITMAP);
+ return true;
+ }
+ return false;
+ }
+
+ public void processCaptionEvent(Cea708CCParser.CaptionEvent event) {
+ if (mIsDelayed) {
+ mPendingCaptionEvents.add(event);
+ return;
+ }
+ switch (event.type) {
+ case Cea708CCParser.CAPTION_EMIT_TYPE_BUFFER:
+ sendBufferToCurrentWindow((String) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_CONTROL:
+ sendControlToCurrentWindow((char) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_CWX:
+ setCurrentWindowLayout((int) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_CLW:
+ clearWindows((int) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_DSW:
+ displayWindows((int) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_HDW:
+ hideWindows((int) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_TGW:
+ toggleWindows((int) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_DLW:
+ deleteWindows((int) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_DLY:
+ delay((int) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_DLC:
+ delayCancel();
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_RST:
+ reset();
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_SPA:
+ setPenAttr((Cea708CCParser.CaptionPenAttr) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_SPC:
+ setPenColor((Cea708CCParser.CaptionPenColor) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_SPL:
+ setPenLocation((Cea708CCParser.CaptionPenLocation) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_SWA:
+ setWindowAttr((Cea708CCParser.CaptionWindowAttr) event.obj);
+ break;
+ case Cea708CCParser.CAPTION_EMIT_TYPE_COMMAND_DFX:
+ defineWindow((Cea708CCParser.CaptionWindow) event.obj);
+ break;
+ }
+ }
+
+ // The window related caption commands
+ private void setCurrentWindowLayout(int windowId) {
+ if (windowId < 0 || windowId >= mCaptionWindowLayouts.length) {
+ return;
+ }
+ CCWindowLayout windowLayout = mCaptionWindowLayouts[windowId];
+ if (windowLayout == null) {
+ return;
+ }
+ if (DEBUG) {
+ Log.d(TAG, "setCurrentWindowLayout to " + windowId);
+ }
+ mCurrentWindowLayout = windowLayout;
+ }
+
+ // Each bit of windowBitmap indicates a window.
+ // If a bit is set, the window id is the same as the number of the trailing zeros of the
+ // bit.
+ private ArrayList<CCWindowLayout> getWindowsFromBitmap(int windowBitmap) {
+ ArrayList<CCWindowLayout> windows = new ArrayList<>();
+ for (int i = 0; i < CAPTION_WINDOWS_MAX; ++i) {
+ if ((windowBitmap & (1 << i)) != 0) {
+ CCWindowLayout windowLayout = mCaptionWindowLayouts[i];
+ if (windowLayout != null) {
+ windows.add(windowLayout);
+ }
+ }
+ }
+ return windows;
+ }
+
+ private void clearWindows(int windowBitmap) {
+ if (windowBitmap == 0) {
+ return;
+ }
+ for (CCWindowLayout windowLayout : getWindowsFromBitmap(windowBitmap)) {
+ windowLayout.clear();
+ }
+ }
+
+ private void displayWindows(int windowBitmap) {
+ if (windowBitmap == 0) {
+ return;
+ }
+ for (CCWindowLayout windowLayout : getWindowsFromBitmap(windowBitmap)) {
+ windowLayout.show();
+ }
+ }
+
+ private void hideWindows(int windowBitmap) {
+ if (windowBitmap == 0) {
+ return;
+ }
+ for (CCWindowLayout windowLayout : getWindowsFromBitmap(windowBitmap)) {
+ windowLayout.hide();
+ }
+ }
+
+ private void toggleWindows(int windowBitmap) {
+ if (windowBitmap == 0) {
+ return;
+ }
+ for (CCWindowLayout windowLayout : getWindowsFromBitmap(windowBitmap)) {
+ if (windowLayout.isShown()) {
+ windowLayout.hide();
+ } else {
+ windowLayout.show();
+ }
+ }
+ }
+
+ private void deleteWindows(int windowBitmap) {
+ if (windowBitmap == 0) {
+ return;
+ }
+ for (CCWindowLayout windowLayout : getWindowsFromBitmap(windowBitmap)) {
+ windowLayout.removeFromCaptionView();
+ mCaptionWindowLayouts[windowLayout.getCaptionWindowId()] = null;
+ }
+ }
+
+ public void reset() {
+ mCurrentWindowLayout = null;
+ mIsDelayed = false;
+ mPendingCaptionEvents.clear();
+ for (int i = 0; i < CAPTION_WINDOWS_MAX; ++i) {
+ if (mCaptionWindowLayouts[i] != null) {
+ mCaptionWindowLayouts[i].removeFromCaptionView();
+ }
+ mCaptionWindowLayouts[i] = null;
+ }
+ mCCLayout.setVisibility(View.INVISIBLE);
+ mHandler.removeMessages(MSG_CAPTION_CLEAR);
+ }
+
+ private void setWindowAttr(Cea708CCParser.CaptionWindowAttr windowAttr) {
+ if (mCurrentWindowLayout != null) {
+ mCurrentWindowLayout.setWindowAttr(windowAttr);
+ }
+ }
+
+ private void defineWindow(Cea708CCParser.CaptionWindow window) {
+ if (window == null) {
+ return;
+ }
+ int windowId = window.id;
+ if (windowId < 0 || windowId >= mCaptionWindowLayouts.length) {
+ return;
+ }
+ CCWindowLayout windowLayout = mCaptionWindowLayouts[windowId];
+ if (windowLayout == null) {
+ windowLayout = new CCWindowLayout(mCCLayout.getContext());
+ }
+ windowLayout.initWindow(mCCLayout, window);
+ mCurrentWindowLayout = mCaptionWindowLayouts[windowId] = windowLayout;
+ }
+
+ // The job related caption commands
+ private void delay(int tenthsOfSeconds) {
+ if (tenthsOfSeconds < 0 || tenthsOfSeconds > 255) {
+ return;
+ }
+ mIsDelayed = true;
+ mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_DELAY_CANCEL),
+ tenthsOfSeconds * TENTHS_OF_SECOND_IN_MILLIS);
+ }
+
+ private void delayCancel() {
+ mIsDelayed = false;
+ processPendingBuffer();
+ }
+
+ private void processPendingBuffer() {
+ for (Cea708CCParser.CaptionEvent event : mPendingCaptionEvents) {
+ processCaptionEvent(event);
+ }
+ mPendingCaptionEvents.clear();
+ }
+
+ // The implicit write caption commands
+ private void sendControlToCurrentWindow(char control) {
+ if (mCurrentWindowLayout != null) {
+ mCurrentWindowLayout.sendControl(control);
+ }
+ }
+
+ private void sendBufferToCurrentWindow(String buffer) {
+ if (mCurrentWindowLayout != null) {
+ mCurrentWindowLayout.sendBuffer(buffer);
+ mHandler.removeMessages(MSG_CAPTION_CLEAR);
+ mHandler.sendMessageDelayed(mHandler.obtainMessage(MSG_CAPTION_CLEAR),
+ CAPTION_CLEAR_INTERVAL_MS);
+ }
+ }
+
+ // The pen related caption commands
+ private void setPenAttr(Cea708CCParser.CaptionPenAttr attr) {
+ if (mCurrentWindowLayout != null) {
+ mCurrentWindowLayout.setPenAttr(attr);
+ }
+ }
+
+ private void setPenColor(Cea708CCParser.CaptionPenColor color) {
+ if (mCurrentWindowLayout != null) {
+ mCurrentWindowLayout.setPenColor(color);
+ }
+ }
+
+ private void setPenLocation(Cea708CCParser.CaptionPenLocation location) {
+ if (mCurrentWindowLayout != null) {
+ mCurrentWindowLayout.setPenLocation(location.row, location.column);
+ }
+ }
+ }
+
+ /**
+ * @hide
+ *
+ * Layout which renders a caption window of CEA-708B. It contains a {@link TextView} that takes
+ * care of displaying the actual CC text.
+ */
+ static class CCWindowLayout extends RelativeLayout implements View.OnLayoutChangeListener {
+ private static final String TAG = "CCWindowLayout";
+
+ private static final float PROPORTION_PEN_SIZE_SMALL = .75f;
+ private static final float PROPORTION_PEN_SIZE_LARGE = 1.25f;
+
+ // The following values indicates the maximum cell number of a window.
+ private static final int ANCHOR_RELATIVE_POSITIONING_MAX = 99;
+ private static final int ANCHOR_VERTICAL_MAX = 74;
+ private static final int ANCHOR_HORIZONTAL_16_9_MAX = 209;
+ private static final int MAX_COLUMN_COUNT_16_9 = 42;
+
+ // The following values indicates a gravity of a window.
+ private static final int ANCHOR_MODE_DIVIDER = 3;
+ private static final int ANCHOR_HORIZONTAL_MODE_LEFT = 0;
+ private static final int ANCHOR_HORIZONTAL_MODE_CENTER = 1;
+ private static final int ANCHOR_HORIZONTAL_MODE_RIGHT = 2;
+ private static final int ANCHOR_VERTICAL_MODE_TOP = 0;
+ private static final int ANCHOR_VERTICAL_MODE_CENTER = 1;
+ private static final int ANCHOR_VERTICAL_MODE_BOTTOM = 2;
+
+ private CCLayout mCCLayout;
+
+ private CCView mCCView;
+ private CaptionStyle mCaptionStyle;
+ private int mRowLimit = 0;
+ private final SpannableStringBuilder mBuilder = new SpannableStringBuilder();
+ private final List<CharacterStyle> mCharacterStyles = new ArrayList<>();
+ private int mCaptionWindowId;
+ private int mRow = -1;
+ private float mFontScale;
+ private float mTextSize;
+ private String mWidestChar;
+ private int mLastCaptionLayoutWidth;
+ private int mLastCaptionLayoutHeight;
+
+ public CCWindowLayout(Context context) {
+ this(context, null);
+ }
+
+ public CCWindowLayout(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public CCWindowLayout(Context context, AttributeSet attrs, int defStyleAttr) {
+ this(context, attrs, defStyleAttr, 0);
+ }
+
+ public CCWindowLayout(Context context, AttributeSet attrs, int defStyleAttr,
+ int defStyleRes) {
+ super(context, attrs, defStyleAttr, defStyleRes);
+
+ // Add a subtitle view to the layout.
+ mCCView = new CCView(context);
+ LayoutParams params = new RelativeLayout.LayoutParams(
+ ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
+ addView(mCCView, params);
+
+ // Set the system wide CC preferences to the subtitle view.
+ CaptioningManager captioningManager =
+ (CaptioningManager) context.getSystemService(Context.CAPTIONING_SERVICE);
+ mFontScale = captioningManager.getFontScale();
+ setCaptionStyle(captioningManager.getUserStyle());
+ mCCView.setText("");
+ updateWidestChar();
+ }
+
+ public void setCaptionStyle(CaptionStyle style) {
+ mCaptionStyle = style;
+ mCCView.setCaptionStyle(style);
+ }
+
+ public void setFontScale(float fontScale) {
+ mFontScale = fontScale;
+ updateTextSize();
+ }
+
+ public int getCaptionWindowId() {
+ return mCaptionWindowId;
+ }
+
+ public void setCaptionWindowId(int captionWindowId) {
+ mCaptionWindowId = captionWindowId;
+ }
+
+ public void clear() {
+ clearText();
+ hide();
+ }
+
+ public void show() {
+ setVisibility(View.VISIBLE);
+ requestLayout();
+ }
+
+ public void hide() {
+ setVisibility(View.INVISIBLE);
+ requestLayout();
+ }
+
+ public void setPenAttr(Cea708CCParser.CaptionPenAttr penAttr) {
+ mCharacterStyles.clear();
+ if (penAttr.italic) {
+ mCharacterStyles.add(new StyleSpan(Typeface.ITALIC));
+ }
+ if (penAttr.underline) {
+ mCharacterStyles.add(new UnderlineSpan());
+ }
+ switch (penAttr.penSize) {
+ case Cea708CCParser.CaptionPenAttr.PEN_SIZE_SMALL:
+ mCharacterStyles.add(new RelativeSizeSpan(PROPORTION_PEN_SIZE_SMALL));
+ break;
+ case Cea708CCParser.CaptionPenAttr.PEN_SIZE_LARGE:
+ mCharacterStyles.add(new RelativeSizeSpan(PROPORTION_PEN_SIZE_LARGE));
+ break;
+ }
+ switch (penAttr.penOffset) {
+ case Cea708CCParser.CaptionPenAttr.OFFSET_SUBSCRIPT:
+ mCharacterStyles.add(new SubscriptSpan());
+ break;
+ case Cea708CCParser.CaptionPenAttr.OFFSET_SUPERSCRIPT:
+ mCharacterStyles.add(new SuperscriptSpan());
+ break;
+ }
+ }
+
+ public void setPenColor(Cea708CCParser.CaptionPenColor penColor) {
+ // TODO: apply pen colors or skip this and use the style of system wide CC style as is.
+ }
+
+ public void setPenLocation(int row, int column) {
+ // TODO: change the location of pen based on row and column both.
+ if (mRow >= 0) {
+ for (int r = mRow; r < row; ++r) {
+ appendText("\n");
+ }
+ }
+ mRow = row;
+ }
+
+ public void setWindowAttr(Cea708CCParser.CaptionWindowAttr windowAttr) {
+ // TODO: apply window attrs or skip this and use the style of system wide CC style as
+ // is.
+ }
+
+ public void sendBuffer(String buffer) {
+ appendText(buffer);
+ }
+
+ public void sendControl(char control) {
+ // TODO: there are a bunch of ASCII-style control codes.
+ }
+
+ /**
+ * This method places the window on a given CaptionLayout along with the anchor of the
+ * window.
+ * <p>
+ * According to CEA-708B, the anchor id indicates the gravity of the window as the follows.
+ * For example, A value 7 of a anchor id says that a window is align with its parent bottom
+ * and is located at the center horizontally of its parent.
+ * </p>
+ * <h4>Anchor id and the gravity of a window</h4>
+ * <table>
+ * <tr>
+ * <th>GRAVITY</th>
+ * <th>LEFT</th>
+ * <th>CENTER_HORIZONTAL</th>
+ * <th>RIGHT</th>
+ * </tr>
+ * <tr>
+ * <th>TOP</th>
+ * <td>0</td>
+ * <td>1</td>
+ * <td>2</td>
+ * </tr>
+ * <tr>
+ * <th>CENTER_VERTICAL</th>
+ * <td>3</td>
+ * <td>4</td>
+ * <td>5</td>
+ * </tr>
+ * <tr>
+ * <th>BOTTOM</th>
+ * <td>6</td>
+ * <td>7</td>
+ * <td>8</td>
+ * </tr>
+ * </table>
+ * <p>
+ * In order to handle the gravity of a window, there are two steps. First, set the size of
+ * the window. Since the window will be positioned at ScaledLayout, the size factors are
+ * determined in a ratio. Second, set the gravity of the window. CaptionWindowLayout is
+ * inherited from RelativeLayout. Hence, we could set the gravity of its child view,
+ * SubtitleView.
+ * </p>
+ * <p>
+ * The gravity of the window is also related to its size. When it should be pushed to a one
+ * of the end of the window, like LEFT, RIGHT, TOP or BOTTOM, the anchor point should be a
+ * boundary of the window. When it should be pushed in the horizontal/vertical center of its
+ * container, the horizontal/vertical center point of the window should be the same as the
+ * anchor point.
+ * </p>
+ *
+ * @param ccLayout a given CaptionLayout, which contains a safe title area.
+ * @param captionWindow a given CaptionWindow, which stores the construction info of the
+ * window.
+ */
+ public void initWindow(CCLayout ccLayout, Cea708CCParser.CaptionWindow captionWindow) {
+ if (mCCLayout != ccLayout) {
+ if (mCCLayout != null) {
+ mCCLayout.removeOnLayoutChangeListener(this);
+ }
+ mCCLayout = ccLayout;
+ mCCLayout.addOnLayoutChangeListener(this);
+ updateWidestChar();
+ }
+
+ // Both anchor vertical and horizontal indicates the position cell number of the window.
+ float scaleRow = (float) captionWindow.anchorVertical /
+ (captionWindow.relativePositioning
+ ? ANCHOR_RELATIVE_POSITIONING_MAX : ANCHOR_VERTICAL_MAX);
+
+ // Assumes it has a wide aspect ratio track.
+ float scaleCol = (float) captionWindow.anchorHorizontal /
+ (captionWindow.relativePositioning ? ANCHOR_RELATIVE_POSITIONING_MAX
+ : ANCHOR_HORIZONTAL_16_9_MAX);
+
+ // The range of scaleRow/Col need to be verified to be in [0, 1].
+ // Otherwise a RuntimeException will be raised in ScaledLayout.
+ if (scaleRow < 0 || scaleRow > 1) {
+ Log.i(TAG, "The vertical position of the anchor point should be at the range of 0 "
+ + "and 1 but " + scaleRow);
+ scaleRow = Math.max(0, Math.min(scaleRow, 1));
+ }
+ if (scaleCol < 0 || scaleCol > 1) {
+ Log.i(TAG, "The horizontal position of the anchor point should be at the range of 0"
+ + " and 1 but " + scaleCol);
+ scaleCol = Math.max(0, Math.min(scaleCol, 1));
+ }
+ int gravity = Gravity.CENTER;
+ int horizontalMode = captionWindow.anchorId % ANCHOR_MODE_DIVIDER;
+ int verticalMode = captionWindow.anchorId / ANCHOR_MODE_DIVIDER;
+ float scaleStartRow = 0;
+ float scaleEndRow = 1;
+ float scaleStartCol = 0;
+ float scaleEndCol = 1;
+ switch (horizontalMode) {
+ case ANCHOR_HORIZONTAL_MODE_LEFT:
+ gravity = Gravity.LEFT;
+ mCCView.setAlignment(Alignment.ALIGN_NORMAL);
+ scaleStartCol = scaleCol;
+ break;
+ case ANCHOR_HORIZONTAL_MODE_CENTER:
+ float gap = Math.min(1 - scaleCol, scaleCol);
+
+ // Since all TV sets use left text alignment instead of center text alignment
+ // for this case, we follow the industry convention if possible.
+ int columnCount = captionWindow.columnCount + 1;
+ columnCount = Math.min(getScreenColumnCount(), columnCount);
+ StringBuilder widestTextBuilder = new StringBuilder();
+ for (int i = 0; i < columnCount; ++i) {
+ widestTextBuilder.append(mWidestChar);
+ }
+ Paint paint = new Paint();
+ paint.setTypeface(mCaptionStyle.getTypeface());
+ paint.setTextSize(mTextSize);
+ float maxWindowWidth = paint.measureText(widestTextBuilder.toString());
+ float halfMaxWidthScale = mCCLayout.getWidth() > 0
+ ? maxWindowWidth / 2.0f / (mCCLayout.getWidth() * 0.8f) : 0.0f;
+ if (halfMaxWidthScale > 0f && halfMaxWidthScale < scaleCol) {
+ // Calculate the expected max window size based on the column count of the
+ // caption window multiplied by average alphabets char width, then align the
+ // left side of the window with the left side of the expected max window.
+ gravity = Gravity.LEFT;
+ mCCView.setAlignment(Alignment.ALIGN_NORMAL);
+ scaleStartCol = scaleCol - halfMaxWidthScale;
+ scaleEndCol = 1.0f;
+ } else {
+ // The gap will be the minimum distance value of the distances from both
+ // horizontal end points to the anchor point.
+ // If scaleCol <= 0.5, the range of scaleCol is [0, the anchor point * 2].
+ // If scaleCol > 0.5, the range of scaleCol is
+ // [(1 - the anchor point) * 2, 1].
+ // The anchor point is located at the horizontal center of the window in
+ // both cases.
+ gravity = Gravity.CENTER_HORIZONTAL;
+ mCCView.setAlignment(Alignment.ALIGN_CENTER);
+ scaleStartCol = scaleCol - gap;
+ scaleEndCol = scaleCol + gap;
+ }
+ break;
+ case ANCHOR_HORIZONTAL_MODE_RIGHT:
+ gravity = Gravity.RIGHT;
+ mCCView.setAlignment(Alignment.ALIGN_RIGHT);
+ scaleEndCol = scaleCol;
+ break;
+ }
+ switch (verticalMode) {
+ case ANCHOR_VERTICAL_MODE_TOP:
+ gravity |= Gravity.TOP;
+ scaleStartRow = scaleRow;
+ break;
+ case ANCHOR_VERTICAL_MODE_CENTER:
+ gravity |= Gravity.CENTER_VERTICAL;
+
+ // See the above comment.
+ float gap = Math.min(1 - scaleRow, scaleRow);
+ scaleStartRow = scaleRow - gap;
+ scaleEndRow = scaleRow + gap;
+ break;
+ case ANCHOR_VERTICAL_MODE_BOTTOM:
+ gravity |= Gravity.BOTTOM;
+ scaleEndRow = scaleRow;
+ break;
+ }
+ mCCLayout.addOrUpdateViewToSafeTitleArea(this, new ScaledLayout
+ .ScaledLayoutParams(scaleStartRow, scaleEndRow, scaleStartCol, scaleEndCol));
+ setCaptionWindowId(captionWindow.id);
+ setRowLimit(captionWindow.rowCount);
+ setGravity(gravity);
+ if (captionWindow.visible) {
+ show();
+ } else {
+ hide();
+ }
+ }
+
+ @Override
+ public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft,
+ int oldTop, int oldRight, int oldBottom) {
+ int width = right - left;
+ int height = bottom - top;
+ if (width != mLastCaptionLayoutWidth || height != mLastCaptionLayoutHeight) {
+ mLastCaptionLayoutWidth = width;
+ mLastCaptionLayoutHeight = height;
+ updateTextSize();
+ }
+ }
+
+ private void updateWidestChar() {
+ Paint paint = new Paint();
+ paint.setTypeface(mCaptionStyle.getTypeface());
+ Charset latin1 = Charset.forName("ISO-8859-1");
+ float widestCharWidth = 0f;
+ for (int i = 0; i < 256; ++i) {
+ String ch = new String(new byte[]{(byte) i}, latin1);
+ float charWidth = paint.measureText(ch);
+ if (widestCharWidth < charWidth) {
+ widestCharWidth = charWidth;
+ mWidestChar = ch;
+ }
+ }
+ updateTextSize();
+ }
+
+ private void updateTextSize() {
+ if (mCCLayout == null) return;
+
+ // Calculate text size based on the max window size.
+ StringBuilder widestTextBuilder = new StringBuilder();
+ int screenColumnCount = getScreenColumnCount();
+ for (int i = 0; i < screenColumnCount; ++i) {
+ widestTextBuilder.append(mWidestChar);
+ }
+ String widestText = widestTextBuilder.toString();
+ Paint paint = new Paint();
+ paint.setTypeface(mCaptionStyle.getTypeface());
+ float startFontSize = 0f;
+ float endFontSize = 255f;
+ while (startFontSize < endFontSize) {
+ float testTextSize = (startFontSize + endFontSize) / 2f;
+ paint.setTextSize(testTextSize);
+ float width = paint.measureText(widestText);
+ if (mCCLayout.getWidth() * 0.8f > width) {
+ startFontSize = testTextSize + 0.01f;
+ } else {
+ endFontSize = testTextSize - 0.01f;
+ }
+ }
+ mTextSize = endFontSize * mFontScale;
+ mCCView.setTextSize(mTextSize);
+ }
+
+ private int getScreenColumnCount() {
+ // Assume it has a wide aspect ratio track.
+ return MAX_COLUMN_COUNT_16_9;
+ }
+
+ public void removeFromCaptionView() {
+ if (mCCLayout != null) {
+ mCCLayout.removeViewFromSafeTitleArea(this);
+ mCCLayout.removeOnLayoutChangeListener(this);
+ mCCLayout = null;
+ }
+ }
+
+ public void setText(String text) {
+ updateText(text, false);
+ }
+
+ public void appendText(String text) {
+ updateText(text, true);
+ }
+
+ public void clearText() {
+ mBuilder.clear();
+ mCCView.setText("");
+ }
+
+ private void updateText(String text, boolean appended) {
+ if (!appended) {
+ mBuilder.clear();
+ }
+ if (text != null && text.length() > 0) {
+ int length = mBuilder.length();
+ mBuilder.append(text);
+ for (CharacterStyle characterStyle : mCharacterStyles) {
+ mBuilder.setSpan(characterStyle, length, mBuilder.length(),
+ Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
+ }
+ }
+ String[] lines = TextUtils.split(mBuilder.toString(), "\n");
+
+ // Truncate text not to exceed the row limit.
+ // Plus one here since the range of the rows is [0, mRowLimit].
+ String truncatedText = TextUtils.join("\n", Arrays.copyOfRange(
+ lines, Math.max(0, lines.length - (mRowLimit + 1)), lines.length));
+ mBuilder.delete(0, mBuilder.length() - truncatedText.length());
+
+ // Trim the buffer first then set text to CCView.
+ int start = 0, last = mBuilder.length() - 1;
+ int end = last;
+ while ((start <= end) && (mBuilder.charAt(start) <= ' ')) {
+ ++start;
+ }
+ while ((end >= start) && (mBuilder.charAt(end) <= ' ')) {
+ --end;
+ }
+ if (start == 0 && end == last) {
+ mCCView.setText(mBuilder);
+ } else {
+ SpannableStringBuilder trim = new SpannableStringBuilder();
+ trim.append(mBuilder);
+ if (end < last) {
+ trim.delete(end + 1, last + 1);
+ }
+ if (start > 0) {
+ trim.delete(0, start);
+ }
+ mCCView.setText(trim);
+ }
+ }
+
+ public void setRowLimit(int rowLimit) {
+ if (rowLimit < 0) {
+ throw new IllegalArgumentException("A rowLimit should have a positive number");
+ }
+ mRowLimit = rowLimit;
+ }
+ }
+
+ /** @hide */
+ static class CCView extends SubtitleView {
+ private static final CaptionStyle DEFAULT_CAPTION_STYLE = CaptionStyle.DEFAULT;
+
+ public CCView(Context context) {
+ this(context, null);
+ }
+
+ public CCView(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public CCView(Context context, AttributeSet attrs, int defStyleAttr) {
+ this(context, attrs, defStyleAttr, 0);
+ }
+
+ public CCView(Context context, AttributeSet attrs, int defStyleAttr,
+ int defStyleRes) {
+ super(context, attrs, defStyleAttr, defStyleRes);
+ }
+
+ public void setCaptionStyle(CaptionStyle style) {
+ setForegroundColor(style.hasForegroundColor()
+ ? style.foregroundColor : DEFAULT_CAPTION_STYLE.foregroundColor);
+ setBackgroundColor(style.hasBackgroundColor()
+ ? style.backgroundColor : DEFAULT_CAPTION_STYLE.backgroundColor);
+ setEdgeType(style.hasEdgeType()
+ ? style.edgeType : DEFAULT_CAPTION_STYLE.edgeType);
+ setEdgeColor(style.hasEdgeColor()
+ ? style.edgeColor : DEFAULT_CAPTION_STYLE.edgeColor);
+ setTypeface(style.getTypeface());
+ }
+ }
+}
diff --git a/android/media/ClosedCaptionRenderer.java b/android/media/ClosedCaptionRenderer.java
new file mode 100644
index 00000000..cc7722a0
--- /dev/null
+++ b/android/media/ClosedCaptionRenderer.java
@@ -0,0 +1,1510 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.Context;
+import android.content.res.Resources;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.graphics.Typeface;
+import android.text.Spannable;
+import android.text.SpannableStringBuilder;
+import android.text.TextPaint;
+import android.text.style.CharacterStyle;
+import android.text.style.StyleSpan;
+import android.text.style.UnderlineSpan;
+import android.text.style.UpdateAppearance;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.util.TypedValue;
+import android.view.Gravity;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.accessibility.CaptioningManager;
+import android.view.accessibility.CaptioningManager.CaptionStyle;
+import android.view.accessibility.CaptioningManager.CaptioningChangeListener;
+import android.widget.LinearLayout;
+import android.widget.TextView;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Vector;
+
+/** @hide */
+public class ClosedCaptionRenderer extends SubtitleController.Renderer {
+ private final Context mContext;
+ private Cea608CCWidget mCCWidget;
+
+ public ClosedCaptionRenderer(Context context) {
+ mContext = context;
+ }
+
+ @Override
+ public boolean supports(MediaFormat format) {
+ if (format.containsKey(MediaFormat.KEY_MIME)) {
+ String mimeType = format.getString(MediaFormat.KEY_MIME);
+ return MediaPlayer.MEDIA_MIMETYPE_TEXT_CEA_608.equals(mimeType);
+ }
+ return false;
+ }
+
+ @Override
+ public SubtitleTrack createTrack(MediaFormat format) {
+ String mimeType = format.getString(MediaFormat.KEY_MIME);
+ if (MediaPlayer.MEDIA_MIMETYPE_TEXT_CEA_608.equals(mimeType)) {
+ if (mCCWidget == null) {
+ mCCWidget = new Cea608CCWidget(mContext);
+ }
+ return new Cea608CaptionTrack(mCCWidget, format);
+ }
+ throw new RuntimeException("No matching format: " + format.toString());
+ }
+}
+
+/** @hide */
+class Cea608CaptionTrack extends SubtitleTrack {
+ private final Cea608CCParser mCCParser;
+ private final Cea608CCWidget mRenderingWidget;
+
+ Cea608CaptionTrack(Cea608CCWidget renderingWidget, MediaFormat format) {
+ super(format);
+
+ mRenderingWidget = renderingWidget;
+ mCCParser = new Cea608CCParser(mRenderingWidget);
+ }
+
+ @Override
+ public void onData(byte[] data, boolean eos, long runID) {
+ mCCParser.parse(data);
+ }
+
+ @Override
+ public RenderingWidget getRenderingWidget() {
+ return mRenderingWidget;
+ }
+
+ @Override
+ public void updateView(Vector<Cue> activeCues) {
+ // Overriding with NO-OP, CC rendering by-passes this
+ }
+}
+
+/**
+ * Abstract widget class to render a closed caption track.
+ *
+ * @hide
+ */
+abstract class ClosedCaptionWidget extends ViewGroup implements SubtitleTrack.RenderingWidget {
+
+ /** @hide */
+ interface ClosedCaptionLayout {
+ void setCaptionStyle(CaptionStyle captionStyle);
+ void setFontScale(float scale);
+ }
+
+ private static final CaptionStyle DEFAULT_CAPTION_STYLE = CaptionStyle.DEFAULT;
+
+ /** Captioning manager, used to obtain and track caption properties. */
+ private final CaptioningManager mManager;
+
+ /** Current caption style. */
+ protected CaptionStyle mCaptionStyle;
+
+ /** Callback for rendering changes. */
+ protected OnChangedListener mListener;
+
+ /** Concrete layout of CC. */
+ protected ClosedCaptionLayout mClosedCaptionLayout;
+
+ /** Whether a caption style change listener is registered. */
+ private boolean mHasChangeListener;
+
+ public ClosedCaptionWidget(Context context) {
+ this(context, null);
+ }
+
+ public ClosedCaptionWidget(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public ClosedCaptionWidget(Context context, AttributeSet attrs, int defStyle) {
+ this(context, attrs, defStyle, 0);
+ }
+
+ public ClosedCaptionWidget(Context context, AttributeSet attrs, int defStyleAttr,
+ int defStyleRes) {
+ super(context, attrs, defStyleAttr, defStyleRes);
+
+ // Cannot render text over video when layer type is hardware.
+ setLayerType(View.LAYER_TYPE_SOFTWARE, null);
+
+ mManager = (CaptioningManager) context.getSystemService(Context.CAPTIONING_SERVICE);
+ mCaptionStyle = DEFAULT_CAPTION_STYLE.applyStyle(mManager.getUserStyle());
+
+ mClosedCaptionLayout = createCaptionLayout(context);
+ mClosedCaptionLayout.setCaptionStyle(mCaptionStyle);
+ mClosedCaptionLayout.setFontScale(mManager.getFontScale());
+ addView((ViewGroup) mClosedCaptionLayout, LayoutParams.MATCH_PARENT,
+ LayoutParams.MATCH_PARENT);
+
+ requestLayout();
+ }
+
+ public abstract ClosedCaptionLayout createCaptionLayout(Context context);
+
+ @Override
+ public void setOnChangedListener(OnChangedListener listener) {
+ mListener = listener;
+ }
+
+ @Override
+ public void setSize(int width, int height) {
+ final int widthSpec = MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY);
+
+ measure(widthSpec, heightSpec);
+ layout(0, 0, width, height);
+ }
+
+ @Override
+ public void setVisible(boolean visible) {
+ if (visible) {
+ setVisibility(View.VISIBLE);
+ } else {
+ setVisibility(View.GONE);
+ }
+
+ manageChangeListener();
+ }
+
+ @Override
+ public void onAttachedToWindow() {
+ super.onAttachedToWindow();
+
+ manageChangeListener();
+ }
+
+ @Override
+ public void onDetachedFromWindow() {
+ super.onDetachedFromWindow();
+
+ manageChangeListener();
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ ((ViewGroup) mClosedCaptionLayout).measure(widthMeasureSpec, heightMeasureSpec);
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int l, int t, int r, int b) {
+ ((ViewGroup) mClosedCaptionLayout).layout(l, t, r, b);
+ }
+
+ /**
+ * Manages whether this renderer is listening for caption style changes.
+ */
+ private final CaptioningChangeListener mCaptioningListener = new CaptioningChangeListener() {
+ @Override
+ public void onUserStyleChanged(CaptionStyle userStyle) {
+ mCaptionStyle = DEFAULT_CAPTION_STYLE.applyStyle(userStyle);
+ mClosedCaptionLayout.setCaptionStyle(mCaptionStyle);
+ }
+
+ @Override
+ public void onFontScaleChanged(float fontScale) {
+ mClosedCaptionLayout.setFontScale(fontScale);
+ }
+ };
+
+ private void manageChangeListener() {
+ final boolean needsListener = isAttachedToWindow() && getVisibility() == View.VISIBLE;
+ if (mHasChangeListener != needsListener) {
+ mHasChangeListener = needsListener;
+
+ if (needsListener) {
+ mManager.addCaptioningChangeListener(mCaptioningListener);
+ } else {
+ mManager.removeCaptioningChangeListener(mCaptioningListener);
+ }
+ }
+ }
+}
+
+/**
+ * @hide
+ *
+ * CCParser processes CEA-608 closed caption data.
+ *
+ * It calls back into OnDisplayChangedListener upon
+ * display change with styled text for rendering.
+ *
+ */
+class Cea608CCParser {
+ public static final int MAX_ROWS = 15;
+ public static final int MAX_COLS = 32;
+
+ private static final String TAG = "Cea608CCParser";
+ private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+
+ private static final int INVALID = -1;
+
+ // EIA-CEA-608: Table 70 - Control Codes
+ private static final int RCL = 0x20;
+ private static final int BS = 0x21;
+ private static final int AOF = 0x22;
+ private static final int AON = 0x23;
+ private static final int DER = 0x24;
+ private static final int RU2 = 0x25;
+ private static final int RU3 = 0x26;
+ private static final int RU4 = 0x27;
+ private static final int FON = 0x28;
+ private static final int RDC = 0x29;
+ private static final int TR = 0x2a;
+ private static final int RTD = 0x2b;
+ private static final int EDM = 0x2c;
+ private static final int CR = 0x2d;
+ private static final int ENM = 0x2e;
+ private static final int EOC = 0x2f;
+
+ // Transparent Space
+ private static final char TS = '\u00A0';
+
+ // Captioning Modes
+ private static final int MODE_UNKNOWN = 0;
+ private static final int MODE_PAINT_ON = 1;
+ private static final int MODE_ROLL_UP = 2;
+ private static final int MODE_POP_ON = 3;
+ private static final int MODE_TEXT = 4;
+
+ private final DisplayListener mListener;
+
+ private int mMode = MODE_PAINT_ON;
+ private int mRollUpSize = 4;
+ private int mPrevCtrlCode = INVALID;
+
+ private CCMemory mDisplay = new CCMemory();
+ private CCMemory mNonDisplay = new CCMemory();
+ private CCMemory mTextMem = new CCMemory();
+
+ Cea608CCParser(DisplayListener listener) {
+ mListener = listener;
+ }
+
+ public void parse(byte[] data) {
+ CCData[] ccData = CCData.fromByteArray(data);
+
+ for (int i = 0; i < ccData.length; i++) {
+ if (DEBUG) {
+ Log.d(TAG, ccData[i].toString());
+ }
+
+ if (handleCtrlCode(ccData[i])
+ || handleTabOffsets(ccData[i])
+ || handlePACCode(ccData[i])
+ || handleMidRowCode(ccData[i])) {
+ continue;
+ }
+
+ handleDisplayableChars(ccData[i]);
+ }
+ }
+
+ interface DisplayListener {
+ void onDisplayChanged(SpannableStringBuilder[] styledTexts);
+ CaptionStyle getCaptionStyle();
+ }
+
+ private CCMemory getMemory() {
+ // get the CC memory to operate on for current mode
+ switch (mMode) {
+ case MODE_POP_ON:
+ return mNonDisplay;
+ case MODE_TEXT:
+ // TODO(chz): support only caption mode for now,
+ // in text mode, dump everything to text mem.
+ return mTextMem;
+ case MODE_PAINT_ON:
+ case MODE_ROLL_UP:
+ return mDisplay;
+ default:
+ Log.w(TAG, "unrecoginized mode: " + mMode);
+ }
+ return mDisplay;
+ }
+
+ private boolean handleDisplayableChars(CCData ccData) {
+ if (!ccData.isDisplayableChar()) {
+ return false;
+ }
+
+ // Extended char includes 1 automatic backspace
+ if (ccData.isExtendedChar()) {
+ getMemory().bs();
+ }
+
+ getMemory().writeText(ccData.getDisplayText());
+
+ if (mMode == MODE_PAINT_ON || mMode == MODE_ROLL_UP) {
+ updateDisplay();
+ }
+
+ return true;
+ }
+
+ private boolean handleMidRowCode(CCData ccData) {
+ StyleCode m = ccData.getMidRow();
+ if (m != null) {
+ getMemory().writeMidRowCode(m);
+ return true;
+ }
+ return false;
+ }
+
+ private boolean handlePACCode(CCData ccData) {
+ PAC pac = ccData.getPAC();
+
+ if (pac != null) {
+ if (mMode == MODE_ROLL_UP) {
+ getMemory().moveBaselineTo(pac.getRow(), mRollUpSize);
+ }
+ getMemory().writePAC(pac);
+ return true;
+ }
+
+ return false;
+ }
+
+ private boolean handleTabOffsets(CCData ccData) {
+ int tabs = ccData.getTabOffset();
+
+ if (tabs > 0) {
+ getMemory().tab(tabs);
+ return true;
+ }
+
+ return false;
+ }
+
+ private boolean handleCtrlCode(CCData ccData) {
+ int ctrlCode = ccData.getCtrlCode();
+
+ if (mPrevCtrlCode != INVALID && mPrevCtrlCode == ctrlCode) {
+ // discard double ctrl codes (but if there's a 3rd one, we still take that)
+ mPrevCtrlCode = INVALID;
+ return true;
+ }
+
+ switch(ctrlCode) {
+ case RCL:
+ // select pop-on style
+ mMode = MODE_POP_ON;
+ break;
+ case BS:
+ getMemory().bs();
+ break;
+ case DER:
+ getMemory().der();
+ break;
+ case RU2:
+ case RU3:
+ case RU4:
+ mRollUpSize = (ctrlCode - 0x23);
+ // erase memory if currently in other style
+ if (mMode != MODE_ROLL_UP) {
+ mDisplay.erase();
+ mNonDisplay.erase();
+ }
+ // select roll-up style
+ mMode = MODE_ROLL_UP;
+ break;
+ case FON:
+ Log.i(TAG, "Flash On");
+ break;
+ case RDC:
+ // select paint-on style
+ mMode = MODE_PAINT_ON;
+ break;
+ case TR:
+ mMode = MODE_TEXT;
+ mTextMem.erase();
+ break;
+ case RTD:
+ mMode = MODE_TEXT;
+ break;
+ case EDM:
+ // erase display memory
+ mDisplay.erase();
+ updateDisplay();
+ break;
+ case CR:
+ if (mMode == MODE_ROLL_UP) {
+ getMemory().rollUp(mRollUpSize);
+ } else {
+ getMemory().cr();
+ }
+ if (mMode == MODE_ROLL_UP) {
+ updateDisplay();
+ }
+ break;
+ case ENM:
+ // erase non-display memory
+ mNonDisplay.erase();
+ break;
+ case EOC:
+ // swap display/non-display memory
+ swapMemory();
+ // switch to pop-on style
+ mMode = MODE_POP_ON;
+ updateDisplay();
+ break;
+ case INVALID:
+ default:
+ mPrevCtrlCode = INVALID;
+ return false;
+ }
+
+ mPrevCtrlCode = ctrlCode;
+
+ // handled
+ return true;
+ }
+
+ private void updateDisplay() {
+ if (mListener != null) {
+ CaptionStyle captionStyle = mListener.getCaptionStyle();
+ mListener.onDisplayChanged(mDisplay.getStyledText(captionStyle));
+ }
+ }
+
+ private void swapMemory() {
+ CCMemory temp = mDisplay;
+ mDisplay = mNonDisplay;
+ mNonDisplay = temp;
+ }
+
+ private static class StyleCode {
+ static final int COLOR_WHITE = 0;
+ static final int COLOR_GREEN = 1;
+ static final int COLOR_BLUE = 2;
+ static final int COLOR_CYAN = 3;
+ static final int COLOR_RED = 4;
+ static final int COLOR_YELLOW = 5;
+ static final int COLOR_MAGENTA = 6;
+ static final int COLOR_INVALID = 7;
+
+ static final int STYLE_ITALICS = 0x00000001;
+ static final int STYLE_UNDERLINE = 0x00000002;
+
+ static final String[] mColorMap = {
+ "WHITE", "GREEN", "BLUE", "CYAN", "RED", "YELLOW", "MAGENTA", "INVALID"
+ };
+
+ final int mStyle;
+ final int mColor;
+
+ static StyleCode fromByte(byte data2) {
+ int style = 0;
+ int color = (data2 >> 1) & 0x7;
+
+ if ((data2 & 0x1) != 0) {
+ style |= STYLE_UNDERLINE;
+ }
+
+ if (color == COLOR_INVALID) {
+ // WHITE ITALICS
+ color = COLOR_WHITE;
+ style |= STYLE_ITALICS;
+ }
+
+ return new StyleCode(style, color);
+ }
+
+ StyleCode(int style, int color) {
+ mStyle = style;
+ mColor = color;
+ }
+
+ boolean isItalics() {
+ return (mStyle & STYLE_ITALICS) != 0;
+ }
+
+ boolean isUnderline() {
+ return (mStyle & STYLE_UNDERLINE) != 0;
+ }
+
+ int getColor() {
+ return mColor;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder str = new StringBuilder();
+ str.append("{");
+ str.append(mColorMap[mColor]);
+ if ((mStyle & STYLE_ITALICS) != 0) {
+ str.append(", ITALICS");
+ }
+ if ((mStyle & STYLE_UNDERLINE) != 0) {
+ str.append(", UNDERLINE");
+ }
+ str.append("}");
+
+ return str.toString();
+ }
+ }
+
+ private static class PAC extends StyleCode {
+ final int mRow;
+ final int mCol;
+
+ static PAC fromBytes(byte data1, byte data2) {
+ int[] rowTable = {11, 1, 3, 12, 14, 5, 7, 9};
+ int row = rowTable[data1 & 0x07] + ((data2 & 0x20) >> 5);
+ int style = 0;
+ if ((data2 & 1) != 0) {
+ style |= STYLE_UNDERLINE;
+ }
+ if ((data2 & 0x10) != 0) {
+ // indent code
+ int indent = (data2 >> 1) & 0x7;
+ return new PAC(row, indent * 4, style, COLOR_WHITE);
+ } else {
+ // style code
+ int color = (data2 >> 1) & 0x7;
+
+ if (color == COLOR_INVALID) {
+ // WHITE ITALICS
+ color = COLOR_WHITE;
+ style |= STYLE_ITALICS;
+ }
+ return new PAC(row, -1, style, color);
+ }
+ }
+
+ PAC(int row, int col, int style, int color) {
+ super(style, color);
+ mRow = row;
+ mCol = col;
+ }
+
+ boolean isIndentPAC() {
+ return (mCol >= 0);
+ }
+
+ int getRow() {
+ return mRow;
+ }
+
+ int getCol() {
+ return mCol;
+ }
+
+ @Override
+ public String toString() {
+ return String.format("{%d, %d}, %s",
+ mRow, mCol, super.toString());
+ }
+ }
+
+ /**
+ * Mutable version of BackgroundSpan to facilitate text rendering with edge styles.
+ *
+ * @hide
+ */
+ public static class MutableBackgroundColorSpan extends CharacterStyle
+ implements UpdateAppearance {
+ private int mColor;
+
+ public MutableBackgroundColorSpan(int color) {
+ mColor = color;
+ }
+
+ public void setBackgroundColor(int color) {
+ mColor = color;
+ }
+
+ public int getBackgroundColor() {
+ return mColor;
+ }
+
+ @Override
+ public void updateDrawState(TextPaint ds) {
+ ds.bgColor = mColor;
+ }
+ }
+
+ /* CCLineBuilder keeps track of displayable chars, as well as
+ * MidRow styles and PACs, for a single line of CC memory.
+ *
+ * It generates styled text via getStyledText() method.
+ */
+ private static class CCLineBuilder {
+ private final StringBuilder mDisplayChars;
+ private final StyleCode[] mMidRowStyles;
+ private final StyleCode[] mPACStyles;
+
+ CCLineBuilder(String str) {
+ mDisplayChars = new StringBuilder(str);
+ mMidRowStyles = new StyleCode[mDisplayChars.length()];
+ mPACStyles = new StyleCode[mDisplayChars.length()];
+ }
+
+ void setCharAt(int index, char ch) {
+ mDisplayChars.setCharAt(index, ch);
+ mMidRowStyles[index] = null;
+ }
+
+ void setMidRowAt(int index, StyleCode m) {
+ mDisplayChars.setCharAt(index, ' ');
+ mMidRowStyles[index] = m;
+ }
+
+ void setPACAt(int index, PAC pac) {
+ mPACStyles[index] = pac;
+ }
+
+ char charAt(int index) {
+ return mDisplayChars.charAt(index);
+ }
+
+ int length() {
+ return mDisplayChars.length();
+ }
+
+ void applyStyleSpan(
+ SpannableStringBuilder styledText,
+ StyleCode s, int start, int end) {
+ if (s.isItalics()) {
+ styledText.setSpan(
+ new StyleSpan(android.graphics.Typeface.ITALIC),
+ start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
+ }
+ if (s.isUnderline()) {
+ styledText.setSpan(
+ new UnderlineSpan(),
+ start, end, Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
+ }
+ }
+
+ SpannableStringBuilder getStyledText(CaptionStyle captionStyle) {
+ SpannableStringBuilder styledText = new SpannableStringBuilder(mDisplayChars);
+ int start = -1, next = 0;
+ int styleStart = -1;
+ StyleCode curStyle = null;
+ while (next < mDisplayChars.length()) {
+ StyleCode newStyle = null;
+ if (mMidRowStyles[next] != null) {
+ // apply mid-row style change
+ newStyle = mMidRowStyles[next];
+ } else if (mPACStyles[next] != null
+ && (styleStart < 0 || start < 0)) {
+ // apply PAC style change, only if:
+ // 1. no style set, or
+ // 2. style set, but prev char is none-displayable
+ newStyle = mPACStyles[next];
+ }
+ if (newStyle != null) {
+ curStyle = newStyle;
+ if (styleStart >= 0 && start >= 0) {
+ applyStyleSpan(styledText, newStyle, styleStart, next);
+ }
+ styleStart = next;
+ }
+
+ if (mDisplayChars.charAt(next) != TS) {
+ if (start < 0) {
+ start = next;
+ }
+ } else if (start >= 0) {
+ int expandedStart = mDisplayChars.charAt(start) == ' ' ? start : start - 1;
+ int expandedEnd = mDisplayChars.charAt(next - 1) == ' ' ? next : next + 1;
+ styledText.setSpan(
+ new MutableBackgroundColorSpan(captionStyle.backgroundColor),
+ expandedStart, expandedEnd,
+ Spannable.SPAN_EXCLUSIVE_EXCLUSIVE);
+ if (styleStart >= 0) {
+ applyStyleSpan(styledText, curStyle, styleStart, expandedEnd);
+ }
+ start = -1;
+ }
+ next++;
+ }
+
+ return styledText;
+ }
+ }
+
+ /*
+ * CCMemory models a console-style display.
+ */
+ private static class CCMemory {
+ private final String mBlankLine;
+ private final CCLineBuilder[] mLines = new CCLineBuilder[MAX_ROWS + 2];
+ private int mRow;
+ private int mCol;
+
+ CCMemory() {
+ char[] blank = new char[MAX_COLS + 2];
+ Arrays.fill(blank, TS);
+ mBlankLine = new String(blank);
+ }
+
+ void erase() {
+ // erase all lines
+ for (int i = 0; i < mLines.length; i++) {
+ mLines[i] = null;
+ }
+ mRow = MAX_ROWS;
+ mCol = 1;
+ }
+
+ void der() {
+ if (mLines[mRow] != null) {
+ for (int i = 0; i < mCol; i++) {
+ if (mLines[mRow].charAt(i) != TS) {
+ for (int j = mCol; j < mLines[mRow].length(); j++) {
+ mLines[j].setCharAt(j, TS);
+ }
+ return;
+ }
+ }
+ mLines[mRow] = null;
+ }
+ }
+
+ void tab(int tabs) {
+ moveCursorByCol(tabs);
+ }
+
+ void bs() {
+ moveCursorByCol(-1);
+ if (mLines[mRow] != null) {
+ mLines[mRow].setCharAt(mCol, TS);
+ if (mCol == MAX_COLS - 1) {
+ // Spec recommendation:
+ // if cursor was at col 32, move cursor
+ // back to col 31 and erase both col 31&32
+ mLines[mRow].setCharAt(MAX_COLS, TS);
+ }
+ }
+ }
+
+ void cr() {
+ moveCursorTo(mRow + 1, 1);
+ }
+
+ void rollUp(int windowSize) {
+ int i;
+ for (i = 0; i <= mRow - windowSize; i++) {
+ mLines[i] = null;
+ }
+ int startRow = mRow - windowSize + 1;
+ if (startRow < 1) {
+ startRow = 1;
+ }
+ for (i = startRow; i < mRow; i++) {
+ mLines[i] = mLines[i + 1];
+ }
+ for (i = mRow; i < mLines.length; i++) {
+ // clear base row
+ mLines[i] = null;
+ }
+ // default to col 1, in case PAC is not sent
+ mCol = 1;
+ }
+
+ void writeText(String text) {
+ for (int i = 0; i < text.length(); i++) {
+ getLineBuffer(mRow).setCharAt(mCol, text.charAt(i));
+ moveCursorByCol(1);
+ }
+ }
+
+ void writeMidRowCode(StyleCode m) {
+ getLineBuffer(mRow).setMidRowAt(mCol, m);
+ moveCursorByCol(1);
+ }
+
+ void writePAC(PAC pac) {
+ if (pac.isIndentPAC()) {
+ moveCursorTo(pac.getRow(), pac.getCol());
+ } else {
+ moveCursorTo(pac.getRow(), 1);
+ }
+ getLineBuffer(mRow).setPACAt(mCol, pac);
+ }
+
+ SpannableStringBuilder[] getStyledText(CaptionStyle captionStyle) {
+ ArrayList<SpannableStringBuilder> rows = new ArrayList<>(MAX_ROWS);
+ for (int i = 1; i <= MAX_ROWS; i++) {
+ rows.add(mLines[i] != null ?
+ mLines[i].getStyledText(captionStyle) : null);
+ }
+ return rows.toArray(new SpannableStringBuilder[MAX_ROWS]);
+ }
+
+ private static int clamp(int x, int min, int max) {
+ return x < min ? min : (x > max ? max : x);
+ }
+
+ private void moveCursorTo(int row, int col) {
+ mRow = clamp(row, 1, MAX_ROWS);
+ mCol = clamp(col, 1, MAX_COLS);
+ }
+
+ private void moveCursorToRow(int row) {
+ mRow = clamp(row, 1, MAX_ROWS);
+ }
+
+ private void moveCursorByCol(int col) {
+ mCol = clamp(mCol + col, 1, MAX_COLS);
+ }
+
+ private void moveBaselineTo(int baseRow, int windowSize) {
+ if (mRow == baseRow) {
+ return;
+ }
+ int actualWindowSize = windowSize;
+ if (baseRow < actualWindowSize) {
+ actualWindowSize = baseRow;
+ }
+ if (mRow < actualWindowSize) {
+ actualWindowSize = mRow;
+ }
+
+ int i;
+ if (baseRow < mRow) {
+ // copy from bottom to top row
+ for (i = actualWindowSize - 1; i >= 0; i--) {
+ mLines[baseRow - i] = mLines[mRow - i];
+ }
+ } else {
+ // copy from top to bottom row
+ for (i = 0; i < actualWindowSize; i++) {
+ mLines[baseRow - i] = mLines[mRow - i];
+ }
+ }
+ // clear rest of the rows
+ for (i = 0; i <= baseRow - windowSize; i++) {
+ mLines[i] = null;
+ }
+ for (i = baseRow + 1; i < mLines.length; i++) {
+ mLines[i] = null;
+ }
+ }
+
+ private CCLineBuilder getLineBuffer(int row) {
+ if (mLines[row] == null) {
+ mLines[row] = new CCLineBuilder(mBlankLine);
+ }
+ return mLines[row];
+ }
+ }
+
+ /*
+ * CCData parses the raw CC byte pair into displayable chars,
+ * misc control codes, Mid-Row or Preamble Address Codes.
+ */
+ private static class CCData {
+ private final byte mType;
+ private final byte mData1;
+ private final byte mData2;
+
+ private static final String[] mCtrlCodeMap = {
+ "RCL", "BS" , "AOF", "AON",
+ "DER", "RU2", "RU3", "RU4",
+ "FON", "RDC", "TR" , "RTD",
+ "EDM", "CR" , "ENM", "EOC",
+ };
+
+ private static final String[] mSpecialCharMap = {
+ "\u00AE",
+ "\u00B0",
+ "\u00BD",
+ "\u00BF",
+ "\u2122",
+ "\u00A2",
+ "\u00A3",
+ "\u266A", // Eighth note
+ "\u00E0",
+ "\u00A0", // Transparent space
+ "\u00E8",
+ "\u00E2",
+ "\u00EA",
+ "\u00EE",
+ "\u00F4",
+ "\u00FB",
+ };
+
+ private static final String[] mSpanishCharMap = {
+ // Spanish and misc chars
+ "\u00C1", // A
+ "\u00C9", // E
+ "\u00D3", // I
+ "\u00DA", // O
+ "\u00DC", // U
+ "\u00FC", // u
+ "\u2018", // opening single quote
+ "\u00A1", // inverted exclamation mark
+ "*",
+ "'",
+ "\u2014", // em dash
+ "\u00A9", // Copyright
+ "\u2120", // Servicemark
+ "\u2022", // round bullet
+ "\u201C", // opening double quote
+ "\u201D", // closing double quote
+ // French
+ "\u00C0",
+ "\u00C2",
+ "\u00C7",
+ "\u00C8",
+ "\u00CA",
+ "\u00CB",
+ "\u00EB",
+ "\u00CE",
+ "\u00CF",
+ "\u00EF",
+ "\u00D4",
+ "\u00D9",
+ "\u00F9",
+ "\u00DB",
+ "\u00AB",
+ "\u00BB"
+ };
+
+ private static final String[] mProtugueseCharMap = {
+ // Portuguese
+ "\u00C3",
+ "\u00E3",
+ "\u00CD",
+ "\u00CC",
+ "\u00EC",
+ "\u00D2",
+ "\u00F2",
+ "\u00D5",
+ "\u00F5",
+ "{",
+ "}",
+ "\\",
+ "^",
+ "_",
+ "|",
+ "~",
+ // German and misc chars
+ "\u00C4",
+ "\u00E4",
+ "\u00D6",
+ "\u00F6",
+ "\u00DF",
+ "\u00A5",
+ "\u00A4",
+ "\u2502", // vertical bar
+ "\u00C5",
+ "\u00E5",
+ "\u00D8",
+ "\u00F8",
+ "\u250C", // top-left corner
+ "\u2510", // top-right corner
+ "\u2514", // lower-left corner
+ "\u2518", // lower-right corner
+ };
+
+ static CCData[] fromByteArray(byte[] data) {
+ CCData[] ccData = new CCData[data.length / 3];
+
+ for (int i = 0; i < ccData.length; i++) {
+ ccData[i] = new CCData(
+ data[i * 3],
+ data[i * 3 + 1],
+ data[i * 3 + 2]);
+ }
+
+ return ccData;
+ }
+
+ CCData(byte type, byte data1, byte data2) {
+ mType = type;
+ mData1 = data1;
+ mData2 = data2;
+ }
+
+ int getCtrlCode() {
+ if ((mData1 == 0x14 || mData1 == 0x1c)
+ && mData2 >= 0x20 && mData2 <= 0x2f) {
+ return mData2;
+ }
+ return INVALID;
+ }
+
+ StyleCode getMidRow() {
+ // only support standard Mid-row codes, ignore
+ // optional background/foreground mid-row codes
+ if ((mData1 == 0x11 || mData1 == 0x19)
+ && mData2 >= 0x20 && mData2 <= 0x2f) {
+ return StyleCode.fromByte(mData2);
+ }
+ return null;
+ }
+
+ PAC getPAC() {
+ if ((mData1 & 0x70) == 0x10
+ && (mData2 & 0x40) == 0x40
+ && ((mData1 & 0x07) != 0 || (mData2 & 0x20) == 0)) {
+ return PAC.fromBytes(mData1, mData2);
+ }
+ return null;
+ }
+
+ int getTabOffset() {
+ if ((mData1 == 0x17 || mData1 == 0x1f)
+ && mData2 >= 0x21 && mData2 <= 0x23) {
+ return mData2 & 0x3;
+ }
+ return 0;
+ }
+
+ boolean isDisplayableChar() {
+ return isBasicChar() || isSpecialChar() || isExtendedChar();
+ }
+
+ String getDisplayText() {
+ String str = getBasicChars();
+
+ if (str == null) {
+ str = getSpecialChar();
+
+ if (str == null) {
+ str = getExtendedChar();
+ }
+ }
+
+ return str;
+ }
+
+ private String ctrlCodeToString(int ctrlCode) {
+ return mCtrlCodeMap[ctrlCode - 0x20];
+ }
+
+ private boolean isBasicChar() {
+ return mData1 >= 0x20 && mData1 <= 0x7f;
+ }
+
+ private boolean isSpecialChar() {
+ return ((mData1 == 0x11 || mData1 == 0x19)
+ && mData2 >= 0x30 && mData2 <= 0x3f);
+ }
+
+ private boolean isExtendedChar() {
+ return ((mData1 == 0x12 || mData1 == 0x1A
+ || mData1 == 0x13 || mData1 == 0x1B)
+ && mData2 >= 0x20 && mData2 <= 0x3f);
+ }
+
+ private char getBasicChar(byte data) {
+ char c;
+ // replace the non-ASCII ones
+ switch (data) {
+ case 0x2A: c = '\u00E1'; break;
+ case 0x5C: c = '\u00E9'; break;
+ case 0x5E: c = '\u00ED'; break;
+ case 0x5F: c = '\u00F3'; break;
+ case 0x60: c = '\u00FA'; break;
+ case 0x7B: c = '\u00E7'; break;
+ case 0x7C: c = '\u00F7'; break;
+ case 0x7D: c = '\u00D1'; break;
+ case 0x7E: c = '\u00F1'; break;
+ case 0x7F: c = '\u2588'; break; // Full block
+ default: c = (char) data; break;
+ }
+ return c;
+ }
+
+ private String getBasicChars() {
+ if (mData1 >= 0x20 && mData1 <= 0x7f) {
+ StringBuilder builder = new StringBuilder(2);
+ builder.append(getBasicChar(mData1));
+ if (mData2 >= 0x20 && mData2 <= 0x7f) {
+ builder.append(getBasicChar(mData2));
+ }
+ return builder.toString();
+ }
+
+ return null;
+ }
+
+ private String getSpecialChar() {
+ if ((mData1 == 0x11 || mData1 == 0x19)
+ && mData2 >= 0x30 && mData2 <= 0x3f) {
+ return mSpecialCharMap[mData2 - 0x30];
+ }
+
+ return null;
+ }
+
+ private String getExtendedChar() {
+ if ((mData1 == 0x12 || mData1 == 0x1A)
+ && mData2 >= 0x20 && mData2 <= 0x3f){
+ // 1 Spanish/French char
+ return mSpanishCharMap[mData2 - 0x20];
+ } else if ((mData1 == 0x13 || mData1 == 0x1B)
+ && mData2 >= 0x20 && mData2 <= 0x3f){
+ // 1 Portuguese/German/Danish char
+ return mProtugueseCharMap[mData2 - 0x20];
+ }
+
+ return null;
+ }
+
+ @Override
+ public String toString() {
+ String str;
+
+ if (mData1 < 0x10 && mData2 < 0x10) {
+ // Null Pad, ignore
+ return String.format("[%d]Null: %02x %02x", mType, mData1, mData2);
+ }
+
+ int ctrlCode = getCtrlCode();
+ if (ctrlCode != INVALID) {
+ return String.format("[%d]%s", mType, ctrlCodeToString(ctrlCode));
+ }
+
+ int tabOffset = getTabOffset();
+ if (tabOffset > 0) {
+ return String.format("[%d]Tab%d", mType, tabOffset);
+ }
+
+ PAC pac = getPAC();
+ if (pac != null) {
+ return String.format("[%d]PAC: %s", mType, pac.toString());
+ }
+
+ StyleCode m = getMidRow();
+ if (m != null) {
+ return String.format("[%d]Mid-row: %s", mType, m.toString());
+ }
+
+ if (isDisplayableChar()) {
+ return String.format("[%d]Displayable: %s (%02x %02x)",
+ mType, getDisplayText(), mData1, mData2);
+ }
+
+ return String.format("[%d]Invalid: %02x %02x", mType, mData1, mData2);
+ }
+ }
+}
+
+/**
+ * Widget capable of rendering CEA-608 closed captions.
+ *
+ * @hide
+ */
+class Cea608CCWidget extends ClosedCaptionWidget implements Cea608CCParser.DisplayListener {
+ private static final Rect mTextBounds = new Rect();
+ private static final String mDummyText = "1234567890123456789012345678901234";
+
+ public Cea608CCWidget(Context context) {
+ this(context, null);
+ }
+
+ public Cea608CCWidget(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public Cea608CCWidget(Context context, AttributeSet attrs, int defStyle) {
+ this(context, attrs, defStyle, 0);
+ }
+
+ public Cea608CCWidget(Context context, AttributeSet attrs, int defStyleAttr,
+ int defStyleRes) {
+ super(context, attrs, defStyleAttr, defStyleRes);
+ }
+
+ @Override
+ public ClosedCaptionLayout createCaptionLayout(Context context) {
+ return new CCLayout(context);
+ }
+
+ @Override
+ public void onDisplayChanged(SpannableStringBuilder[] styledTexts) {
+ ((CCLayout) mClosedCaptionLayout).update(styledTexts);
+
+ if (mListener != null) {
+ mListener.onChanged(this);
+ }
+ }
+
+ @Override
+ public CaptionStyle getCaptionStyle() {
+ return mCaptionStyle;
+ }
+
+ private static class CCLineBox extends TextView {
+ private static final float FONT_PADDING_RATIO = 0.75f;
+ private static final float EDGE_OUTLINE_RATIO = 0.1f;
+ private static final float EDGE_SHADOW_RATIO = 0.05f;
+ private float mOutlineWidth;
+ private float mShadowRadius;
+ private float mShadowOffset;
+
+ private int mTextColor = Color.WHITE;
+ private int mBgColor = Color.BLACK;
+ private int mEdgeType = CaptionStyle.EDGE_TYPE_NONE;
+ private int mEdgeColor = Color.TRANSPARENT;
+
+ CCLineBox(Context context) {
+ super(context);
+ setGravity(Gravity.CENTER);
+ setBackgroundColor(Color.TRANSPARENT);
+ setTextColor(Color.WHITE);
+ setTypeface(Typeface.MONOSPACE);
+ setVisibility(View.INVISIBLE);
+
+ final Resources res = getContext().getResources();
+
+ // get the default (will be updated later during measure)
+ mOutlineWidth = res.getDimensionPixelSize(
+ com.android.internal.R.dimen.subtitle_outline_width);
+ mShadowRadius = res.getDimensionPixelSize(
+ com.android.internal.R.dimen.subtitle_shadow_radius);
+ mShadowOffset = res.getDimensionPixelSize(
+ com.android.internal.R.dimen.subtitle_shadow_offset);
+ }
+
+ void setCaptionStyle(CaptionStyle captionStyle) {
+ mTextColor = captionStyle.foregroundColor;
+ mBgColor = captionStyle.backgroundColor;
+ mEdgeType = captionStyle.edgeType;
+ mEdgeColor = captionStyle.edgeColor;
+
+ setTextColor(mTextColor);
+ if (mEdgeType == CaptionStyle.EDGE_TYPE_DROP_SHADOW) {
+ setShadowLayer(mShadowRadius, mShadowOffset, mShadowOffset, mEdgeColor);
+ } else {
+ setShadowLayer(0, 0, 0, 0);
+ }
+ invalidate();
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ float fontSize = MeasureSpec.getSize(heightMeasureSpec) * FONT_PADDING_RATIO;
+ setTextSize(TypedValue.COMPLEX_UNIT_PX, fontSize);
+
+ mOutlineWidth = EDGE_OUTLINE_RATIO * fontSize + 1.0f;
+ mShadowRadius = EDGE_SHADOW_RATIO * fontSize + 1.0f;;
+ mShadowOffset = mShadowRadius;
+
+ // set font scale in the X direction to match the required width
+ setScaleX(1.0f);
+ getPaint().getTextBounds(mDummyText, 0, mDummyText.length(), mTextBounds);
+ float actualTextWidth = mTextBounds.width();
+ float requiredTextWidth = MeasureSpec.getSize(widthMeasureSpec);
+ setScaleX(requiredTextWidth / actualTextWidth);
+
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ }
+
+ @Override
+ protected void onDraw(Canvas c) {
+ if (mEdgeType == CaptionStyle.EDGE_TYPE_UNSPECIFIED
+ || mEdgeType == CaptionStyle.EDGE_TYPE_NONE
+ || mEdgeType == CaptionStyle.EDGE_TYPE_DROP_SHADOW) {
+ // these edge styles don't require a second pass
+ super.onDraw(c);
+ return;
+ }
+
+ if (mEdgeType == CaptionStyle.EDGE_TYPE_OUTLINE) {
+ drawEdgeOutline(c);
+ } else {
+ // Raised or depressed
+ drawEdgeRaisedOrDepressed(c);
+ }
+ }
+
+ private void drawEdgeOutline(Canvas c) {
+ TextPaint textPaint = getPaint();
+
+ Paint.Style previousStyle = textPaint.getStyle();
+ Paint.Join previousJoin = textPaint.getStrokeJoin();
+ float previousWidth = textPaint.getStrokeWidth();
+
+ setTextColor(mEdgeColor);
+ textPaint.setStyle(Paint.Style.FILL_AND_STROKE);
+ textPaint.setStrokeJoin(Paint.Join.ROUND);
+ textPaint.setStrokeWidth(mOutlineWidth);
+
+ // Draw outline and background only.
+ super.onDraw(c);
+
+ // Restore original settings.
+ setTextColor(mTextColor);
+ textPaint.setStyle(previousStyle);
+ textPaint.setStrokeJoin(previousJoin);
+ textPaint.setStrokeWidth(previousWidth);
+
+ // Remove the background.
+ setBackgroundSpans(Color.TRANSPARENT);
+ // Draw foreground only.
+ super.onDraw(c);
+ // Restore the background.
+ setBackgroundSpans(mBgColor);
+ }
+
+ private void drawEdgeRaisedOrDepressed(Canvas c) {
+ TextPaint textPaint = getPaint();
+
+ Paint.Style previousStyle = textPaint.getStyle();
+ textPaint.setStyle(Paint.Style.FILL);
+
+ final boolean raised = mEdgeType == CaptionStyle.EDGE_TYPE_RAISED;
+ final int colorUp = raised ? Color.WHITE : mEdgeColor;
+ final int colorDown = raised ? mEdgeColor : Color.WHITE;
+ final float offset = mShadowRadius / 2f;
+
+ // Draw background and text with shadow up
+ setShadowLayer(mShadowRadius, -offset, -offset, colorUp);
+ super.onDraw(c);
+
+ // Remove the background.
+ setBackgroundSpans(Color.TRANSPARENT);
+
+ // Draw text with shadow down
+ setShadowLayer(mShadowRadius, +offset, +offset, colorDown);
+ super.onDraw(c);
+
+ // Restore settings
+ textPaint.setStyle(previousStyle);
+
+ // Restore the background.
+ setBackgroundSpans(mBgColor);
+ }
+
+ private void setBackgroundSpans(int color) {
+ CharSequence text = getText();
+ if (text instanceof Spannable) {
+ Spannable spannable = (Spannable) text;
+ Cea608CCParser.MutableBackgroundColorSpan[] bgSpans = spannable.getSpans(
+ 0, spannable.length(), Cea608CCParser.MutableBackgroundColorSpan.class);
+ for (int i = 0; i < bgSpans.length; i++) {
+ bgSpans[i].setBackgroundColor(color);
+ }
+ }
+ }
+ }
+
+ private static class CCLayout extends LinearLayout implements ClosedCaptionLayout {
+ private static final int MAX_ROWS = Cea608CCParser.MAX_ROWS;
+ private static final float SAFE_AREA_RATIO = 0.9f;
+
+ private final CCLineBox[] mLineBoxes = new CCLineBox[MAX_ROWS];
+
+ CCLayout(Context context) {
+ super(context);
+ setGravity(Gravity.START);
+ setOrientation(LinearLayout.VERTICAL);
+ for (int i = 0; i < MAX_ROWS; i++) {
+ mLineBoxes[i] = new CCLineBox(getContext());
+ addView(mLineBoxes[i], LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+ }
+ }
+
+ @Override
+ public void setCaptionStyle(CaptionStyle captionStyle) {
+ for (int i = 0; i < MAX_ROWS; i++) {
+ mLineBoxes[i].setCaptionStyle(captionStyle);
+ }
+ }
+
+ @Override
+ public void setFontScale(float fontScale) {
+ // Ignores the font scale changes of the system wide CC preference.
+ }
+
+ void update(SpannableStringBuilder[] textBuffer) {
+ for (int i = 0; i < MAX_ROWS; i++) {
+ if (textBuffer[i] != null) {
+ mLineBoxes[i].setText(textBuffer[i], TextView.BufferType.SPANNABLE);
+ mLineBoxes[i].setVisibility(View.VISIBLE);
+ } else {
+ mLineBoxes[i].setVisibility(View.INVISIBLE);
+ }
+ }
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+
+ int safeWidth = getMeasuredWidth();
+ int safeHeight = getMeasuredHeight();
+
+ // CEA-608 assumes 4:3 video
+ if (safeWidth * 3 >= safeHeight * 4) {
+ safeWidth = safeHeight * 4 / 3;
+ } else {
+ safeHeight = safeWidth * 3 / 4;
+ }
+ safeWidth *= SAFE_AREA_RATIO;
+ safeHeight *= SAFE_AREA_RATIO;
+
+ int lineHeight = safeHeight / MAX_ROWS;
+ int lineHeightMeasureSpec = MeasureSpec.makeMeasureSpec(
+ lineHeight, MeasureSpec.EXACTLY);
+ int lineWidthMeasureSpec = MeasureSpec.makeMeasureSpec(
+ safeWidth, MeasureSpec.EXACTLY);
+
+ for (int i = 0; i < MAX_ROWS; i++) {
+ mLineBoxes[i].measure(lineWidthMeasureSpec, lineHeightMeasureSpec);
+ }
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int l, int t, int r, int b) {
+ // safe caption area
+ int viewPortWidth = r - l;
+ int viewPortHeight = b - t;
+ int safeWidth, safeHeight;
+ // CEA-608 assumes 4:3 video
+ if (viewPortWidth * 3 >= viewPortHeight * 4) {
+ safeWidth = viewPortHeight * 4 / 3;
+ safeHeight = viewPortHeight;
+ } else {
+ safeWidth = viewPortWidth;
+ safeHeight = viewPortWidth * 3 / 4;
+ }
+ safeWidth *= SAFE_AREA_RATIO;
+ safeHeight *= SAFE_AREA_RATIO;
+ int left = (viewPortWidth - safeWidth) / 2;
+ int top = (viewPortHeight - safeHeight) / 2;
+
+ for (int i = 0; i < MAX_ROWS; i++) {
+ mLineBoxes[i].layout(
+ left,
+ top + safeHeight * i / MAX_ROWS,
+ left + safeWidth,
+ top + safeHeight * (i + 1) / MAX_ROWS);
+ }
+ }
+ }
+}
diff --git a/android/media/DecoderCapabilities.java b/android/media/DecoderCapabilities.java
new file mode 100644
index 00000000..f16cccfb
--- /dev/null
+++ b/android/media/DecoderCapabilities.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.util.List;
+import java.util.ArrayList;
+
+/**
+ * {@hide}
+ *
+ * The DecoderCapabilities class is used to retrieve the types of the
+ * video and audio decoder(s) supported on a specific Android platform.
+ */
+public class DecoderCapabilities
+{
+ /**
+ * The VideoDecoder class represents the type of a video decoder
+ *
+ */
+ public enum VideoDecoder {
+ VIDEO_DECODER_WMV,
+ };
+
+ /**
+ * The AudioDecoder class represents the type of an audio decoder
+ */
+ public enum AudioDecoder {
+ AUDIO_DECODER_WMA,
+ };
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ /**
+ * Returns the list of video decoder types
+ * @see android.media.DecoderCapabilities.VideoDecoder
+ */
+ public static List<VideoDecoder> getVideoDecoders() {
+ List<VideoDecoder> decoderList = new ArrayList<VideoDecoder>();
+ int nDecoders = native_get_num_video_decoders();
+ for (int i = 0; i < nDecoders; ++i) {
+ decoderList.add(VideoDecoder.values()[native_get_video_decoder_type(i)]);
+ }
+ return decoderList;
+ }
+
+ /**
+ * Returns the list of audio decoder types
+ * @see android.media.DecoderCapabilities.AudioDecoder
+ */
+ public static List<AudioDecoder> getAudioDecoders() {
+ List<AudioDecoder> decoderList = new ArrayList<AudioDecoder>();
+ int nDecoders = native_get_num_audio_decoders();
+ for (int i = 0; i < nDecoders; ++i) {
+ decoderList.add(AudioDecoder.values()[native_get_audio_decoder_type(i)]);
+ }
+ return decoderList;
+ }
+
+ private DecoderCapabilities() {} // Don't call me
+
+ // Implemented by JNI
+ private static native final void native_init();
+ private static native final int native_get_num_video_decoders();
+ private static native final int native_get_video_decoder_type(int index);
+ private static native final int native_get_num_audio_decoders();
+ private static native final int native_get_audio_decoder_type(int index);
+}
diff --git a/android/media/DeniedByServerException.java b/android/media/DeniedByServerException.java
new file mode 100644
index 00000000..9c1633ad
--- /dev/null
+++ b/android/media/DeniedByServerException.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Exception thrown when the provisioning server or key server denies a
+ * certficate or license for a device.
+ */
+public final class DeniedByServerException extends MediaDrmException {
+ public DeniedByServerException(String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/android/media/DrmInitData.java b/android/media/DrmInitData.java
new file mode 100644
index 00000000..170d9de9
--- /dev/null
+++ b/android/media/DrmInitData.java
@@ -0,0 +1,89 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+import android.media.MediaDrm;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+/**
+ * Encapsulates initialization data required by a {@link MediaDrm} instance.
+ */
+public abstract class DrmInitData {
+
+ /**
+ * Prevent public constuctor access
+ */
+ /* package private */ DrmInitData() {
+ }
+
+ /**
+ * Retrieves initialization data for a given DRM scheme, specified by its UUID.
+ *
+ * @param schemeUuid The DRM scheme's UUID.
+ * @return The initialization data for the scheme, or null if the scheme is not supported.
+ */
+ public abstract SchemeInitData get(UUID schemeUuid);
+
+ /**
+ * Scheme initialization data.
+ */
+ public static final class SchemeInitData {
+
+ /**
+ * The mimeType of {@link #data}.
+ */
+ public final String mimeType;
+ /**
+ * The initialization data.
+ */
+ public final byte[] data;
+
+ /**
+ * @param mimeType The mimeType of the initialization data.
+ * @param data The initialization data.
+ *
+ * @hide
+ */
+ public SchemeInitData(String mimeType, byte[] data) {
+ this.mimeType = mimeType;
+ this.data = data;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof SchemeInitData)) {
+ return false;
+ }
+ if (obj == this) {
+ return true;
+ }
+
+ SchemeInitData other = (SchemeInitData) obj;
+ return mimeType.equals(other.mimeType) && Arrays.equals(data, other.data);
+ }
+
+ @Override
+ public int hashCode() {
+ return mimeType.hashCode() + 31 * Arrays.hashCode(data);
+ }
+
+ }
+
+}
diff --git a/android/media/EncoderCapabilities.java b/android/media/EncoderCapabilities.java
new file mode 100644
index 00000000..332e3604
--- /dev/null
+++ b/android/media/EncoderCapabilities.java
@@ -0,0 +1,163 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.util.List;
+import java.util.ArrayList;
+
+/**
+ * The EncoderCapabilities class is used to retrieve the
+ * capabilities for different video and audio
+ * encoders supported on a specific Android platform.
+ * {@hide}
+ */
+public class EncoderCapabilities
+{
+ private static final String TAG = "EncoderCapabilities";
+
+ /**
+ * The VideoEncoderCap class represents a video encoder's
+ * supported parameter range in:
+ *
+ * <ul>
+ * <li>Resolution: the frame size (width/height) in pixels;
+ * <li>Bit rate: the compressed output bit rate in bits per second;
+ * <li>Frame rate: the output number of frames per second.
+ * </ul>
+ *
+ */
+ static public class VideoEncoderCap {
+ // These are not modifiable externally, thus are public accessible
+ public final int mCodec; // @see android.media.MediaRecorder.VideoEncoder
+ public final int mMinBitRate, mMaxBitRate; // min and max bit rate (bps)
+ public final int mMinFrameRate, mMaxFrameRate; // min and max frame rate (fps)
+ public final int mMinFrameWidth, mMaxFrameWidth; // min and max frame width (pixel)
+ public final int mMinFrameHeight, mMaxFrameHeight; // minn and max frame height (pixel)
+
+ // Private constructor called by JNI
+ private VideoEncoderCap(int codec,
+ int minBitRate, int maxBitRate,
+ int minFrameRate, int maxFrameRate,
+ int minFrameWidth, int maxFrameWidth,
+ int minFrameHeight, int maxFrameHeight) {
+ mCodec = codec;
+ mMinBitRate = minBitRate;
+ mMaxBitRate = maxBitRate;
+ mMinFrameRate = minFrameRate;
+ mMaxFrameRate = maxFrameRate;
+ mMinFrameWidth = minFrameWidth;
+ mMaxFrameWidth = maxFrameWidth;
+ mMinFrameHeight = minFrameHeight;
+ mMaxFrameHeight = maxFrameHeight;
+ }
+ };
+
+ /**
+ * The AudioEncoderCap class represents an audio encoder's
+ * parameter range in:
+ *
+ * <ul>
+ * <li>Bit rate: the compressed output bit rate in bits per second;
+ * <li>Sample rate: the sampling rate used for recording the audio in samples per second;
+ * <li>Number of channels: the number of channels the audio is recorded.
+ * </ul>
+ *
+ */
+ static public class AudioEncoderCap {
+ // These are not modifiable externally, thus are public accessible
+ public final int mCodec; // @see android.media.MediaRecorder.AudioEncoder
+ public final int mMinChannels, mMaxChannels; // min and max number of channels
+ public final int mMinSampleRate, mMaxSampleRate; // min and max sample rate (hz)
+ public final int mMinBitRate, mMaxBitRate; // min and max bit rate (bps)
+
+ // Private constructor called by JNI
+ private AudioEncoderCap(int codec,
+ int minBitRate, int maxBitRate,
+ int minSampleRate, int maxSampleRate,
+ int minChannels, int maxChannels) {
+ mCodec = codec;
+ mMinBitRate = minBitRate;
+ mMaxBitRate = maxBitRate;
+ mMinSampleRate = minSampleRate;
+ mMaxSampleRate = maxSampleRate;
+ mMinChannels = minChannels;
+ mMaxChannels = maxChannels;
+ }
+ };
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ /**
+ * Returns the array of supported output file formats.
+ * @see android.media.MediaRecorder.OutputFormat
+ */
+ public static int[] getOutputFileFormats() {
+ int nFormats = native_get_num_file_formats();
+ if (nFormats == 0) return null;
+
+ int[] formats = new int[nFormats];
+ for (int i = 0; i < nFormats; ++i) {
+ formats[i] = native_get_file_format(i);
+ }
+ return formats;
+ }
+
+ /**
+ * Returns the capabilities of the supported video encoders.
+ * @see android.media.EncoderCapabilities.VideoEncoderCap
+ */
+ public static List<VideoEncoderCap> getVideoEncoders() {
+ int nEncoders = native_get_num_video_encoders();
+ if (nEncoders == 0) return null;
+
+ List<VideoEncoderCap> encoderList = new ArrayList<VideoEncoderCap>();
+ for (int i = 0; i < nEncoders; ++i) {
+ encoderList.add(native_get_video_encoder_cap(i));
+ }
+ return encoderList;
+ }
+
+ /**
+ * Returns the capabilities of the supported audio encoders.
+ * @see android.media.EncoderCapabilities.AudioEncoderCap
+ */
+ public static List<AudioEncoderCap> getAudioEncoders() {
+ int nEncoders = native_get_num_audio_encoders();
+ if (nEncoders == 0) return null;
+
+ List<AudioEncoderCap> encoderList = new ArrayList<AudioEncoderCap>();
+ for (int i = 0; i < nEncoders; ++i) {
+ encoderList.add(native_get_audio_encoder_cap(i));
+ }
+ return encoderList;
+ }
+
+
+ private EncoderCapabilities() {} // Don't call me
+
+ // Implemented by JNI
+ private static native final void native_init();
+ private static native final int native_get_num_file_formats();
+ private static native final int native_get_file_format(int index);
+ private static native final int native_get_num_video_encoders();
+ private static native final VideoEncoderCap native_get_video_encoder_cap(int index);
+ private static native final int native_get_num_audio_encoders();
+ private static native final AudioEncoderCap native_get_audio_encoder_cap(int index);
+}
diff --git a/android/media/ExifInterface.java b/android/media/ExifInterface.java
new file mode 100644
index 00000000..1f5edfa0
--- /dev/null
+++ b/android/media/ExifInterface.java
@@ -0,0 +1,4014 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.content.res.AssetManager;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.system.ErrnoException;
+import android.system.Os;
+import android.system.OsConstants;
+import android.util.Log;
+import android.util.Pair;
+import android.annotation.IntDef;
+
+import java.io.BufferedInputStream;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
+import java.io.DataInput;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.FilterOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.charset.Charset;
+import java.nio.charset.StandardCharsets;
+import java.text.ParsePosition;
+import java.text.SimpleDateFormat;
+import java.util.Arrays;
+import java.util.LinkedList;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import libcore.io.IoUtils;
+import libcore.io.Streams;
+
+/**
+ * This is a class for reading and writing Exif tags in a JPEG file or a RAW image file.
+ * <p>
+ * Supported formats are: JPEG, DNG, CR2, NEF, NRW, ARW, RW2, ORF, PEF, SRW and RAF.
+ * <p>
+ * Attribute mutation is supported for JPEG image files.
+ */
+public class ExifInterface {
+ private static final String TAG = "ExifInterface";
+ private static final boolean DEBUG = false;
+
+ // The Exif tag names. See Tiff 6.0 Section 3 and Section 8.
+ /** Type is String. */
+ public static final String TAG_ARTIST = "Artist";
+ /** Type is int. */
+ public static final String TAG_BITS_PER_SAMPLE = "BitsPerSample";
+ /** Type is int. */
+ public static final String TAG_COMPRESSION = "Compression";
+ /** Type is String. */
+ public static final String TAG_COPYRIGHT = "Copyright";
+ /** Type is String. */
+ public static final String TAG_DATETIME = "DateTime";
+ /** Type is String. */
+ public static final String TAG_IMAGE_DESCRIPTION = "ImageDescription";
+ /** Type is int. */
+ public static final String TAG_IMAGE_LENGTH = "ImageLength";
+ /** Type is int. */
+ public static final String TAG_IMAGE_WIDTH = "ImageWidth";
+ /** Type is int. */
+ public static final String TAG_JPEG_INTERCHANGE_FORMAT = "JPEGInterchangeFormat";
+ /** Type is int. */
+ public static final String TAG_JPEG_INTERCHANGE_FORMAT_LENGTH = "JPEGInterchangeFormatLength";
+ /** Type is String. */
+ public static final String TAG_MAKE = "Make";
+ /** Type is String. */
+ public static final String TAG_MODEL = "Model";
+ /** Type is int. */
+ public static final String TAG_ORIENTATION = "Orientation";
+ /** Type is int. */
+ public static final String TAG_PHOTOMETRIC_INTERPRETATION = "PhotometricInterpretation";
+ /** Type is int. */
+ public static final String TAG_PLANAR_CONFIGURATION = "PlanarConfiguration";
+ /** Type is rational. */
+ public static final String TAG_PRIMARY_CHROMATICITIES = "PrimaryChromaticities";
+ /** Type is rational. */
+ public static final String TAG_REFERENCE_BLACK_WHITE = "ReferenceBlackWhite";
+ /** Type is int. */
+ public static final String TAG_RESOLUTION_UNIT = "ResolutionUnit";
+ /** Type is int. */
+ public static final String TAG_ROWS_PER_STRIP = "RowsPerStrip";
+ /** Type is int. */
+ public static final String TAG_SAMPLES_PER_PIXEL = "SamplesPerPixel";
+ /** Type is String. */
+ public static final String TAG_SOFTWARE = "Software";
+ /** Type is int. */
+ public static final String TAG_STRIP_BYTE_COUNTS = "StripByteCounts";
+ /** Type is int. */
+ public static final String TAG_STRIP_OFFSETS = "StripOffsets";
+ /** Type is int. */
+ public static final String TAG_TRANSFER_FUNCTION = "TransferFunction";
+ /** Type is rational. */
+ public static final String TAG_WHITE_POINT = "WhitePoint";
+ /** Type is rational. */
+ public static final String TAG_X_RESOLUTION = "XResolution";
+ /** Type is rational. */
+ public static final String TAG_Y_CB_CR_COEFFICIENTS = "YCbCrCoefficients";
+ /** Type is int. */
+ public static final String TAG_Y_CB_CR_POSITIONING = "YCbCrPositioning";
+ /** Type is int. */
+ public static final String TAG_Y_CB_CR_SUB_SAMPLING = "YCbCrSubSampling";
+ /** Type is rational. */
+ public static final String TAG_Y_RESOLUTION = "YResolution";
+ /** Type is rational. */
+ public static final String TAG_APERTURE_VALUE = "ApertureValue";
+ /** Type is rational. */
+ public static final String TAG_BRIGHTNESS_VALUE = "BrightnessValue";
+ /** Type is String. */
+ public static final String TAG_CFA_PATTERN = "CFAPattern";
+ /** Type is int. */
+ public static final String TAG_COLOR_SPACE = "ColorSpace";
+ /** Type is String. */
+ public static final String TAG_COMPONENTS_CONFIGURATION = "ComponentsConfiguration";
+ /** Type is rational. */
+ public static final String TAG_COMPRESSED_BITS_PER_PIXEL = "CompressedBitsPerPixel";
+ /** Type is int. */
+ public static final String TAG_CONTRAST = "Contrast";
+ /** Type is int. */
+ public static final String TAG_CUSTOM_RENDERED = "CustomRendered";
+ /** Type is String. */
+ public static final String TAG_DATETIME_DIGITIZED = "DateTimeDigitized";
+ /** Type is String. */
+ public static final String TAG_DATETIME_ORIGINAL = "DateTimeOriginal";
+ /** Type is String. */
+ public static final String TAG_DEVICE_SETTING_DESCRIPTION = "DeviceSettingDescription";
+ /** Type is double. */
+ public static final String TAG_DIGITAL_ZOOM_RATIO = "DigitalZoomRatio";
+ /** Type is String. */
+ public static final String TAG_EXIF_VERSION = "ExifVersion";
+ /** Type is double. */
+ public static final String TAG_EXPOSURE_BIAS_VALUE = "ExposureBiasValue";
+ /** Type is rational. */
+ public static final String TAG_EXPOSURE_INDEX = "ExposureIndex";
+ /** Type is int. */
+ public static final String TAG_EXPOSURE_MODE = "ExposureMode";
+ /** Type is int. */
+ public static final String TAG_EXPOSURE_PROGRAM = "ExposureProgram";
+ /** Type is double. */
+ public static final String TAG_EXPOSURE_TIME = "ExposureTime";
+ /** Type is double. */
+ public static final String TAG_F_NUMBER = "FNumber";
+ /**
+ * Type is double.
+ *
+ * @deprecated use {@link #TAG_F_NUMBER} instead
+ */
+ @Deprecated
+ public static final String TAG_APERTURE = "FNumber";
+ /** Type is String. */
+ public static final String TAG_FILE_SOURCE = "FileSource";
+ /** Type is int. */
+ public static final String TAG_FLASH = "Flash";
+ /** Type is rational. */
+ public static final String TAG_FLASH_ENERGY = "FlashEnergy";
+ /** Type is String. */
+ public static final String TAG_FLASHPIX_VERSION = "FlashpixVersion";
+ /** Type is rational. */
+ public static final String TAG_FOCAL_LENGTH = "FocalLength";
+ /** Type is int. */
+ public static final String TAG_FOCAL_LENGTH_IN_35MM_FILM = "FocalLengthIn35mmFilm";
+ /** Type is int. */
+ public static final String TAG_FOCAL_PLANE_RESOLUTION_UNIT = "FocalPlaneResolutionUnit";
+ /** Type is rational. */
+ public static final String TAG_FOCAL_PLANE_X_RESOLUTION = "FocalPlaneXResolution";
+ /** Type is rational. */
+ public static final String TAG_FOCAL_PLANE_Y_RESOLUTION = "FocalPlaneYResolution";
+ /** Type is int. */
+ public static final String TAG_GAIN_CONTROL = "GainControl";
+ /** Type is int. */
+ public static final String TAG_ISO_SPEED_RATINGS = "ISOSpeedRatings";
+ /**
+ * Type is int.
+ *
+ * @deprecated use {@link #TAG_ISO_SPEED_RATINGS} instead
+ */
+ @Deprecated
+ public static final String TAG_ISO = "ISOSpeedRatings";
+ /** Type is String. */
+ public static final String TAG_IMAGE_UNIQUE_ID = "ImageUniqueID";
+ /** Type is int. */
+ public static final String TAG_LIGHT_SOURCE = "LightSource";
+ /** Type is String. */
+ public static final String TAG_MAKER_NOTE = "MakerNote";
+ /** Type is rational. */
+ public static final String TAG_MAX_APERTURE_VALUE = "MaxApertureValue";
+ /** Type is int. */
+ public static final String TAG_METERING_MODE = "MeteringMode";
+ /** Type is int. */
+ public static final String TAG_NEW_SUBFILE_TYPE = "NewSubfileType";
+ /** Type is String. */
+ public static final String TAG_OECF = "OECF";
+ /** Type is int. */
+ public static final String TAG_PIXEL_X_DIMENSION = "PixelXDimension";
+ /** Type is int. */
+ public static final String TAG_PIXEL_Y_DIMENSION = "PixelYDimension";
+ /** Type is String. */
+ public static final String TAG_RELATED_SOUND_FILE = "RelatedSoundFile";
+ /** Type is int. */
+ public static final String TAG_SATURATION = "Saturation";
+ /** Type is int. */
+ public static final String TAG_SCENE_CAPTURE_TYPE = "SceneCaptureType";
+ /** Type is String. */
+ public static final String TAG_SCENE_TYPE = "SceneType";
+ /** Type is int. */
+ public static final String TAG_SENSING_METHOD = "SensingMethod";
+ /** Type is int. */
+ public static final String TAG_SHARPNESS = "Sharpness";
+ /** Type is rational. */
+ public static final String TAG_SHUTTER_SPEED_VALUE = "ShutterSpeedValue";
+ /** Type is String. */
+ public static final String TAG_SPATIAL_FREQUENCY_RESPONSE = "SpatialFrequencyResponse";
+ /** Type is String. */
+ public static final String TAG_SPECTRAL_SENSITIVITY = "SpectralSensitivity";
+ /** Type is int. */
+ public static final String TAG_SUBFILE_TYPE = "SubfileType";
+ /** Type is String. */
+ public static final String TAG_SUBSEC_TIME = "SubSecTime";
+ /**
+ * Type is String.
+ *
+ * @deprecated use {@link #TAG_SUBSEC_TIME_DIGITIZED} instead
+ */
+ public static final String TAG_SUBSEC_TIME_DIG = "SubSecTimeDigitized";
+ /** Type is String. */
+ public static final String TAG_SUBSEC_TIME_DIGITIZED = "SubSecTimeDigitized";
+ /**
+ * Type is String.
+ *
+ * @deprecated use {@link #TAG_SUBSEC_TIME_ORIGINAL} instead
+ */
+ public static final String TAG_SUBSEC_TIME_ORIG = "SubSecTimeOriginal";
+ /** Type is String. */
+ public static final String TAG_SUBSEC_TIME_ORIGINAL = "SubSecTimeOriginal";
+ /** Type is int. */
+ public static final String TAG_SUBJECT_AREA = "SubjectArea";
+ /** Type is double. */
+ public static final String TAG_SUBJECT_DISTANCE = "SubjectDistance";
+ /** Type is int. */
+ public static final String TAG_SUBJECT_DISTANCE_RANGE = "SubjectDistanceRange";
+ /** Type is int. */
+ public static final String TAG_SUBJECT_LOCATION = "SubjectLocation";
+ /** Type is String. */
+ public static final String TAG_USER_COMMENT = "UserComment";
+ /** Type is int. */
+ public static final String TAG_WHITE_BALANCE = "WhiteBalance";
+ /**
+ * The altitude (in meters) based on the reference in TAG_GPS_ALTITUDE_REF.
+ * Type is rational.
+ */
+ public static final String TAG_GPS_ALTITUDE = "GPSAltitude";
+ /**
+ * 0 if the altitude is above sea level. 1 if the altitude is below sea
+ * level. Type is int.
+ */
+ public static final String TAG_GPS_ALTITUDE_REF = "GPSAltitudeRef";
+ /** Type is String. */
+ public static final String TAG_GPS_AREA_INFORMATION = "GPSAreaInformation";
+ /** Type is rational. */
+ public static final String TAG_GPS_DOP = "GPSDOP";
+ /** Type is String. */
+ public static final String TAG_GPS_DATESTAMP = "GPSDateStamp";
+ /** Type is rational. */
+ public static final String TAG_GPS_DEST_BEARING = "GPSDestBearing";
+ /** Type is String. */
+ public static final String TAG_GPS_DEST_BEARING_REF = "GPSDestBearingRef";
+ /** Type is rational. */
+ public static final String TAG_GPS_DEST_DISTANCE = "GPSDestDistance";
+ /** Type is String. */
+ public static final String TAG_GPS_DEST_DISTANCE_REF = "GPSDestDistanceRef";
+ /** Type is rational. */
+ public static final String TAG_GPS_DEST_LATITUDE = "GPSDestLatitude";
+ /** Type is String. */
+ public static final String TAG_GPS_DEST_LATITUDE_REF = "GPSDestLatitudeRef";
+ /** Type is rational. */
+ public static final String TAG_GPS_DEST_LONGITUDE = "GPSDestLongitude";
+ /** Type is String. */
+ public static final String TAG_GPS_DEST_LONGITUDE_REF = "GPSDestLongitudeRef";
+ /** Type is int. */
+ public static final String TAG_GPS_DIFFERENTIAL = "GPSDifferential";
+ /** Type is rational. */
+ public static final String TAG_GPS_IMG_DIRECTION = "GPSImgDirection";
+ /** Type is String. */
+ public static final String TAG_GPS_IMG_DIRECTION_REF = "GPSImgDirectionRef";
+ /** Type is rational. Format is "num1/denom1,num2/denom2,num3/denom3". */
+ public static final String TAG_GPS_LATITUDE = "GPSLatitude";
+ /** Type is String. */
+ public static final String TAG_GPS_LATITUDE_REF = "GPSLatitudeRef";
+ /** Type is rational. Format is "num1/denom1,num2/denom2,num3/denom3". */
+ public static final String TAG_GPS_LONGITUDE = "GPSLongitude";
+ /** Type is String. */
+ public static final String TAG_GPS_LONGITUDE_REF = "GPSLongitudeRef";
+ /** Type is String. */
+ public static final String TAG_GPS_MAP_DATUM = "GPSMapDatum";
+ /** Type is String. */
+ public static final String TAG_GPS_MEASURE_MODE = "GPSMeasureMode";
+ /** Type is String. Name of GPS processing method used for location finding. */
+ public static final String TAG_GPS_PROCESSING_METHOD = "GPSProcessingMethod";
+ /** Type is String. */
+ public static final String TAG_GPS_SATELLITES = "GPSSatellites";
+ /** Type is rational. */
+ public static final String TAG_GPS_SPEED = "GPSSpeed";
+ /** Type is String. */
+ public static final String TAG_GPS_SPEED_REF = "GPSSpeedRef";
+ /** Type is String. */
+ public static final String TAG_GPS_STATUS = "GPSStatus";
+ /** Type is String. Format is "hh:mm:ss". */
+ public static final String TAG_GPS_TIMESTAMP = "GPSTimeStamp";
+ /** Type is rational. */
+ public static final String TAG_GPS_TRACK = "GPSTrack";
+ /** Type is String. */
+ public static final String TAG_GPS_TRACK_REF = "GPSTrackRef";
+ /** Type is String. */
+ public static final String TAG_GPS_VERSION_ID = "GPSVersionID";
+ /** Type is String. */
+ public static final String TAG_INTEROPERABILITY_INDEX = "InteroperabilityIndex";
+ /** Type is int. */
+ public static final String TAG_THUMBNAIL_IMAGE_LENGTH = "ThumbnailImageLength";
+ /** Type is int. */
+ public static final String TAG_THUMBNAIL_IMAGE_WIDTH = "ThumbnailImageWidth";
+ /** Type is int. DNG Specification 1.4.0.0. Section 4 */
+ public static final String TAG_DNG_VERSION = "DNGVersion";
+ /** Type is int. DNG Specification 1.4.0.0. Section 4 */
+ public static final String TAG_DEFAULT_CROP_SIZE = "DefaultCropSize";
+ /** Type is undefined. See Olympus MakerNote tags in http://www.exiv2.org/tags-olympus.html. */
+ public static final String TAG_ORF_THUMBNAIL_IMAGE = "ThumbnailImage";
+ /** Type is int. See Olympus Camera Settings tags in http://www.exiv2.org/tags-olympus.html. */
+ public static final String TAG_ORF_PREVIEW_IMAGE_START = "PreviewImageStart";
+ /** Type is int. See Olympus Camera Settings tags in http://www.exiv2.org/tags-olympus.html. */
+ public static final String TAG_ORF_PREVIEW_IMAGE_LENGTH = "PreviewImageLength";
+ /** Type is int. See Olympus Image Processing tags in http://www.exiv2.org/tags-olympus.html. */
+ public static final String TAG_ORF_ASPECT_FRAME = "AspectFrame";
+ /**
+ * Type is int. See PanasonicRaw tags in
+ * http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/PanasonicRaw.html
+ */
+ public static final String TAG_RW2_SENSOR_BOTTOM_BORDER = "SensorBottomBorder";
+ /**
+ * Type is int. See PanasonicRaw tags in
+ * http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/PanasonicRaw.html
+ */
+ public static final String TAG_RW2_SENSOR_LEFT_BORDER = "SensorLeftBorder";
+ /**
+ * Type is int. See PanasonicRaw tags in
+ * http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/PanasonicRaw.html
+ */
+ public static final String TAG_RW2_SENSOR_RIGHT_BORDER = "SensorRightBorder";
+ /**
+ * Type is int. See PanasonicRaw tags in
+ * http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/PanasonicRaw.html
+ */
+ public static final String TAG_RW2_SENSOR_TOP_BORDER = "SensorTopBorder";
+ /**
+ * Type is int. See PanasonicRaw tags in
+ * http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/PanasonicRaw.html
+ */
+ public static final String TAG_RW2_ISO = "ISO";
+ /**
+ * Type is undefined. See PanasonicRaw tags in
+ * http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/PanasonicRaw.html
+ */
+ public static final String TAG_RW2_JPG_FROM_RAW = "JpgFromRaw";
+
+ /**
+ * Private tags used for pointing the other IFD offsets.
+ * The types of the following tags are int.
+ * See JEITA CP-3451C Section 4.6.3: Exif-specific IFD.
+ * For SubIFD, see Note 1 of Adobe PageMaker® 6.0 TIFF Technical Notes.
+ */
+ private static final String TAG_EXIF_IFD_POINTER = "ExifIFDPointer";
+ private static final String TAG_GPS_INFO_IFD_POINTER = "GPSInfoIFDPointer";
+ private static final String TAG_INTEROPERABILITY_IFD_POINTER = "InteroperabilityIFDPointer";
+ private static final String TAG_SUB_IFD_POINTER = "SubIFDPointer";
+ // Proprietary pointer tags used for ORF files.
+ // See http://www.exiv2.org/tags-olympus.html
+ private static final String TAG_ORF_CAMERA_SETTINGS_IFD_POINTER = "CameraSettingsIFDPointer";
+ private static final String TAG_ORF_IMAGE_PROCESSING_IFD_POINTER = "ImageProcessingIFDPointer";
+
+ // Private tags used for thumbnail information.
+ private static final String TAG_HAS_THUMBNAIL = "HasThumbnail";
+ private static final String TAG_THUMBNAIL_OFFSET = "ThumbnailOffset";
+ private static final String TAG_THUMBNAIL_LENGTH = "ThumbnailLength";
+ private static final String TAG_THUMBNAIL_DATA = "ThumbnailData";
+ private static final int MAX_THUMBNAIL_SIZE = 512;
+
+ // Constants used for the Orientation Exif tag.
+ public static final int ORIENTATION_UNDEFINED = 0;
+ public static final int ORIENTATION_NORMAL = 1;
+ public static final int ORIENTATION_FLIP_HORIZONTAL = 2; // left right reversed mirror
+ public static final int ORIENTATION_ROTATE_180 = 3;
+ public static final int ORIENTATION_FLIP_VERTICAL = 4; // upside down mirror
+ // flipped about top-left <--> bottom-right axis
+ public static final int ORIENTATION_TRANSPOSE = 5;
+ public static final int ORIENTATION_ROTATE_90 = 6; // rotate 90 cw to right it
+ // flipped about top-right <--> bottom-left axis
+ public static final int ORIENTATION_TRANSVERSE = 7;
+ public static final int ORIENTATION_ROTATE_270 = 8; // rotate 270 to right it
+
+ // Constants used for white balance
+ public static final int WHITEBALANCE_AUTO = 0;
+ public static final int WHITEBALANCE_MANUAL = 1;
+
+ // Maximum size for checking file type signature (see image_type_recognition_lite.cc)
+ private static final int SIGNATURE_CHECK_SIZE = 5000;
+
+ private static final byte[] JPEG_SIGNATURE = new byte[] {(byte) 0xff, (byte) 0xd8, (byte) 0xff};
+ private static final String RAF_SIGNATURE = "FUJIFILMCCD-RAW";
+ private static final int RAF_OFFSET_TO_JPEG_IMAGE_OFFSET = 84;
+ private static final int RAF_INFO_SIZE = 160;
+ private static final int RAF_JPEG_LENGTH_VALUE_SIZE = 4;
+
+ private static final byte[] HEIF_TYPE_FTYP = new byte[] {'f', 't', 'y', 'p'};
+ private static final byte[] HEIF_BRAND_MIF1 = new byte[] {'m', 'i', 'f', '1'};
+ private static final byte[] HEIF_BRAND_HEIC = new byte[] {'h', 'e', 'i', 'c'};
+
+ // See http://fileformats.archiveteam.org/wiki/Olympus_ORF
+ private static final short ORF_SIGNATURE_1 = 0x4f52;
+ private static final short ORF_SIGNATURE_2 = 0x5352;
+ // There are two formats for Olympus Makernote Headers. Each has different identifiers and
+ // offsets to the actual data.
+ // See http://www.exiv2.org/makernote.html#R1
+ private static final byte[] ORF_MAKER_NOTE_HEADER_1 = new byte[] {(byte) 0x4f, (byte) 0x4c,
+ (byte) 0x59, (byte) 0x4d, (byte) 0x50, (byte) 0x00}; // "OLYMP\0"
+ private static final byte[] ORF_MAKER_NOTE_HEADER_2 = new byte[] {(byte) 0x4f, (byte) 0x4c,
+ (byte) 0x59, (byte) 0x4d, (byte) 0x50, (byte) 0x55, (byte) 0x53, (byte) 0x00,
+ (byte) 0x49, (byte) 0x49}; // "OLYMPUS\0II"
+ private static final int ORF_MAKER_NOTE_HEADER_1_SIZE = 8;
+ private static final int ORF_MAKER_NOTE_HEADER_2_SIZE = 12;
+
+ // See http://fileformats.archiveteam.org/wiki/RW2
+ private static final short RW2_SIGNATURE = 0x0055;
+
+ // See http://fileformats.archiveteam.org/wiki/Pentax_PEF
+ private static final String PEF_SIGNATURE = "PENTAX";
+ // See http://www.exiv2.org/makernote.html#R11
+ private static final int PEF_MAKER_NOTE_SKIP_SIZE = 6;
+
+ private static SimpleDateFormat sFormatter;
+
+ // See Exchangeable image file format for digital still cameras: Exif version 2.2.
+ // The following values are for parsing EXIF data area. There are tag groups in EXIF data area.
+ // They are called "Image File Directory". They have multiple data formats to cover various
+ // image metadata from GPS longitude to camera model name.
+
+ // Types of Exif byte alignments (see JEITA CP-3451C Section 4.5.2)
+ private static final short BYTE_ALIGN_II = 0x4949; // II: Intel order
+ private static final short BYTE_ALIGN_MM = 0x4d4d; // MM: Motorola order
+
+ // TIFF Header Fixed Constant (see JEITA CP-3451C Section 4.5.2)
+ private static final byte START_CODE = 0x2a; // 42
+ private static final int IFD_OFFSET = 8;
+
+ // Formats for the value in IFD entry (See TIFF 6.0 Section 2, "Image File Directory".)
+ private static final int IFD_FORMAT_BYTE = 1;
+ private static final int IFD_FORMAT_STRING = 2;
+ private static final int IFD_FORMAT_USHORT = 3;
+ private static final int IFD_FORMAT_ULONG = 4;
+ private static final int IFD_FORMAT_URATIONAL = 5;
+ private static final int IFD_FORMAT_SBYTE = 6;
+ private static final int IFD_FORMAT_UNDEFINED = 7;
+ private static final int IFD_FORMAT_SSHORT = 8;
+ private static final int IFD_FORMAT_SLONG = 9;
+ private static final int IFD_FORMAT_SRATIONAL = 10;
+ private static final int IFD_FORMAT_SINGLE = 11;
+ private static final int IFD_FORMAT_DOUBLE = 12;
+ // Format indicating a new IFD entry (See Adobe PageMaker® 6.0 TIFF Technical Notes, "New Tag")
+ private static final int IFD_FORMAT_IFD = 13;
+ // Names for the data formats for debugging purpose.
+ private static final String[] IFD_FORMAT_NAMES = new String[] {
+ "", "BYTE", "STRING", "USHORT", "ULONG", "URATIONAL", "SBYTE", "UNDEFINED", "SSHORT",
+ "SLONG", "SRATIONAL", "SINGLE", "DOUBLE"
+ };
+ // Sizes of the components of each IFD value format
+ private static final int[] IFD_FORMAT_BYTES_PER_FORMAT = new int[] {
+ 0, 1, 1, 2, 4, 8, 1, 1, 2, 4, 8, 4, 8, 1
+ };
+ private static final byte[] EXIF_ASCII_PREFIX = new byte[] {
+ 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0
+ };
+
+ /**
+ * Constants used for Compression tag.
+ * For Value 1, 2, 32773, see TIFF 6.0 Spec Section 3: Bilevel Images, Compression
+ * For Value 6, see TIFF 6.0 Spec Section 22: JPEG Compression, Extensions to Existing Fields
+ * For Value 7, 8, 34892, see DNG Specification 1.4.0.0. Section 3, Compression
+ */
+ private static final int DATA_UNCOMPRESSED = 1;
+ private static final int DATA_HUFFMAN_COMPRESSED = 2;
+ private static final int DATA_JPEG = 6;
+ private static final int DATA_JPEG_COMPRESSED = 7;
+ private static final int DATA_DEFLATE_ZIP = 8;
+ private static final int DATA_PACK_BITS_COMPRESSED = 32773;
+ private static final int DATA_LOSSY_JPEG = 34892;
+
+ /**
+ * Constants used for BitsPerSample tag.
+ * For RGB, see TIFF 6.0 Spec Section 6, Differences from Palette Color Images
+ * For Greyscale, see TIFF 6.0 Spec Section 4, Differences from Bilevel Images
+ */
+ private static final int[] BITS_PER_SAMPLE_RGB = new int[] { 8, 8, 8 };
+ private static final int[] BITS_PER_SAMPLE_GREYSCALE_1 = new int[] { 4 };
+ private static final int[] BITS_PER_SAMPLE_GREYSCALE_2 = new int[] { 8 };
+
+ /**
+ * Constants used for PhotometricInterpretation tag.
+ * For White/Black, see Section 3, Color.
+ * See TIFF 6.0 Spec Section 22, Minimum Requirements for TIFF with JPEG Compression.
+ */
+ private static final int PHOTOMETRIC_INTERPRETATION_WHITE_IS_ZERO = 0;
+ private static final int PHOTOMETRIC_INTERPRETATION_BLACK_IS_ZERO = 1;
+ private static final int PHOTOMETRIC_INTERPRETATION_RGB = 2;
+ private static final int PHOTOMETRIC_INTERPRETATION_YCBCR = 6;
+
+ /**
+ * Constants used for NewSubfileType tag.
+ * See TIFF 6.0 Spec Section 8
+ * */
+ private static final int ORIGINAL_RESOLUTION_IMAGE = 0;
+ private static final int REDUCED_RESOLUTION_IMAGE = 1;
+
+ // A class for indicating EXIF rational type.
+ private static class Rational {
+ public final long numerator;
+ public final long denominator;
+
+ private Rational(long numerator, long denominator) {
+ // Handle erroneous case
+ if (denominator == 0) {
+ this.numerator = 0;
+ this.denominator = 1;
+ return;
+ }
+ this.numerator = numerator;
+ this.denominator = denominator;
+ }
+
+ @Override
+ public String toString() {
+ return numerator + "/" + denominator;
+ }
+
+ public double calculate() {
+ return (double) numerator / denominator;
+ }
+ }
+
+ // A class for indicating EXIF attribute.
+ private static class ExifAttribute {
+ public final int format;
+ public final int numberOfComponents;
+ public final byte[] bytes;
+
+ private ExifAttribute(int format, int numberOfComponents, byte[] bytes) {
+ this.format = format;
+ this.numberOfComponents = numberOfComponents;
+ this.bytes = bytes;
+ }
+
+ public static ExifAttribute createUShort(int[] values, ByteOrder byteOrder) {
+ final ByteBuffer buffer = ByteBuffer.wrap(
+ new byte[IFD_FORMAT_BYTES_PER_FORMAT[IFD_FORMAT_USHORT] * values.length]);
+ buffer.order(byteOrder);
+ for (int value : values) {
+ buffer.putShort((short) value);
+ }
+ return new ExifAttribute(IFD_FORMAT_USHORT, values.length, buffer.array());
+ }
+
+ public static ExifAttribute createUShort(int value, ByteOrder byteOrder) {
+ return createUShort(new int[] {value}, byteOrder);
+ }
+
+ public static ExifAttribute createULong(long[] values, ByteOrder byteOrder) {
+ final ByteBuffer buffer = ByteBuffer.wrap(
+ new byte[IFD_FORMAT_BYTES_PER_FORMAT[IFD_FORMAT_ULONG] * values.length]);
+ buffer.order(byteOrder);
+ for (long value : values) {
+ buffer.putInt((int) value);
+ }
+ return new ExifAttribute(IFD_FORMAT_ULONG, values.length, buffer.array());
+ }
+
+ public static ExifAttribute createULong(long value, ByteOrder byteOrder) {
+ return createULong(new long[] {value}, byteOrder);
+ }
+
+ public static ExifAttribute createSLong(int[] values, ByteOrder byteOrder) {
+ final ByteBuffer buffer = ByteBuffer.wrap(
+ new byte[IFD_FORMAT_BYTES_PER_FORMAT[IFD_FORMAT_SLONG] * values.length]);
+ buffer.order(byteOrder);
+ for (int value : values) {
+ buffer.putInt(value);
+ }
+ return new ExifAttribute(IFD_FORMAT_SLONG, values.length, buffer.array());
+ }
+
+ public static ExifAttribute createSLong(int value, ByteOrder byteOrder) {
+ return createSLong(new int[] {value}, byteOrder);
+ }
+
+ public static ExifAttribute createByte(String value) {
+ // Exception for GPSAltitudeRef tag
+ if (value.length() == 1 && value.charAt(0) >= '0' && value.charAt(0) <= '1') {
+ final byte[] bytes = new byte[] { (byte) (value.charAt(0) - '0') };
+ return new ExifAttribute(IFD_FORMAT_BYTE, bytes.length, bytes);
+ }
+ final byte[] ascii = value.getBytes(ASCII);
+ return new ExifAttribute(IFD_FORMAT_BYTE, ascii.length, ascii);
+ }
+
+ public static ExifAttribute createString(String value) {
+ final byte[] ascii = (value + '\0').getBytes(ASCII);
+ return new ExifAttribute(IFD_FORMAT_STRING, ascii.length, ascii);
+ }
+
+ public static ExifAttribute createURational(Rational[] values, ByteOrder byteOrder) {
+ final ByteBuffer buffer = ByteBuffer.wrap(
+ new byte[IFD_FORMAT_BYTES_PER_FORMAT[IFD_FORMAT_URATIONAL] * values.length]);
+ buffer.order(byteOrder);
+ for (Rational value : values) {
+ buffer.putInt((int) value.numerator);
+ buffer.putInt((int) value.denominator);
+ }
+ return new ExifAttribute(IFD_FORMAT_URATIONAL, values.length, buffer.array());
+ }
+
+ public static ExifAttribute createURational(Rational value, ByteOrder byteOrder) {
+ return createURational(new Rational[] {value}, byteOrder);
+ }
+
+ public static ExifAttribute createSRational(Rational[] values, ByteOrder byteOrder) {
+ final ByteBuffer buffer = ByteBuffer.wrap(
+ new byte[IFD_FORMAT_BYTES_PER_FORMAT[IFD_FORMAT_SRATIONAL] * values.length]);
+ buffer.order(byteOrder);
+ for (Rational value : values) {
+ buffer.putInt((int) value.numerator);
+ buffer.putInt((int) value.denominator);
+ }
+ return new ExifAttribute(IFD_FORMAT_SRATIONAL, values.length, buffer.array());
+ }
+
+ public static ExifAttribute createSRational(Rational value, ByteOrder byteOrder) {
+ return createSRational(new Rational[] {value}, byteOrder);
+ }
+
+ public static ExifAttribute createDouble(double[] values, ByteOrder byteOrder) {
+ final ByteBuffer buffer = ByteBuffer.wrap(
+ new byte[IFD_FORMAT_BYTES_PER_FORMAT[IFD_FORMAT_DOUBLE] * values.length]);
+ buffer.order(byteOrder);
+ for (double value : values) {
+ buffer.putDouble(value);
+ }
+ return new ExifAttribute(IFD_FORMAT_DOUBLE, values.length, buffer.array());
+ }
+
+ public static ExifAttribute createDouble(double value, ByteOrder byteOrder) {
+ return createDouble(new double[] {value}, byteOrder);
+ }
+
+ @Override
+ public String toString() {
+ return "(" + IFD_FORMAT_NAMES[format] + ", data length:" + bytes.length + ")";
+ }
+
+ private Object getValue(ByteOrder byteOrder) {
+ try {
+ ByteOrderedDataInputStream inputStream =
+ new ByteOrderedDataInputStream(bytes);
+ inputStream.setByteOrder(byteOrder);
+ switch (format) {
+ case IFD_FORMAT_BYTE:
+ case IFD_FORMAT_SBYTE: {
+ // Exception for GPSAltitudeRef tag
+ if (bytes.length == 1 && bytes[0] >= 0 && bytes[0] <= 1) {
+ return new String(new char[] { (char) (bytes[0] + '0') });
+ }
+ return new String(bytes, ASCII);
+ }
+ case IFD_FORMAT_UNDEFINED:
+ case IFD_FORMAT_STRING: {
+ int index = 0;
+ if (numberOfComponents >= EXIF_ASCII_PREFIX.length) {
+ boolean same = true;
+ for (int i = 0; i < EXIF_ASCII_PREFIX.length; ++i) {
+ if (bytes[i] != EXIF_ASCII_PREFIX[i]) {
+ same = false;
+ break;
+ }
+ }
+ if (same) {
+ index = EXIF_ASCII_PREFIX.length;
+ }
+ }
+
+ StringBuilder stringBuilder = new StringBuilder();
+ while (index < numberOfComponents) {
+ int ch = bytes[index];
+ if (ch == 0) {
+ break;
+ }
+ if (ch >= 32) {
+ stringBuilder.append((char) ch);
+ } else {
+ stringBuilder.append('?');
+ }
+ ++index;
+ }
+ return stringBuilder.toString();
+ }
+ case IFD_FORMAT_USHORT: {
+ final int[] values = new int[numberOfComponents];
+ for (int i = 0; i < numberOfComponents; ++i) {
+ values[i] = inputStream.readUnsignedShort();
+ }
+ return values;
+ }
+ case IFD_FORMAT_ULONG: {
+ final long[] values = new long[numberOfComponents];
+ for (int i = 0; i < numberOfComponents; ++i) {
+ values[i] = inputStream.readUnsignedInt();
+ }
+ return values;
+ }
+ case IFD_FORMAT_URATIONAL: {
+ final Rational[] values = new Rational[numberOfComponents];
+ for (int i = 0; i < numberOfComponents; ++i) {
+ final long numerator = inputStream.readUnsignedInt();
+ final long denominator = inputStream.readUnsignedInt();
+ values[i] = new Rational(numerator, denominator);
+ }
+ return values;
+ }
+ case IFD_FORMAT_SSHORT: {
+ final int[] values = new int[numberOfComponents];
+ for (int i = 0; i < numberOfComponents; ++i) {
+ values[i] = inputStream.readShort();
+ }
+ return values;
+ }
+ case IFD_FORMAT_SLONG: {
+ final int[] values = new int[numberOfComponents];
+ for (int i = 0; i < numberOfComponents; ++i) {
+ values[i] = inputStream.readInt();
+ }
+ return values;
+ }
+ case IFD_FORMAT_SRATIONAL: {
+ final Rational[] values = new Rational[numberOfComponents];
+ for (int i = 0; i < numberOfComponents; ++i) {
+ final long numerator = inputStream.readInt();
+ final long denominator = inputStream.readInt();
+ values[i] = new Rational(numerator, denominator);
+ }
+ return values;
+ }
+ case IFD_FORMAT_SINGLE: {
+ final double[] values = new double[numberOfComponents];
+ for (int i = 0; i < numberOfComponents; ++i) {
+ values[i] = inputStream.readFloat();
+ }
+ return values;
+ }
+ case IFD_FORMAT_DOUBLE: {
+ final double[] values = new double[numberOfComponents];
+ for (int i = 0; i < numberOfComponents; ++i) {
+ values[i] = inputStream.readDouble();
+ }
+ return values;
+ }
+ default:
+ return null;
+ }
+ } catch (IOException e) {
+ Log.w(TAG, "IOException occurred during reading a value", e);
+ return null;
+ }
+ }
+
+ public double getDoubleValue(ByteOrder byteOrder) {
+ Object value = getValue(byteOrder);
+ if (value == null) {
+ throw new NumberFormatException("NULL can't be converted to a double value");
+ }
+ if (value instanceof String) {
+ return Double.parseDouble((String) value);
+ }
+ if (value instanceof long[]) {
+ long[] array = (long[]) value;
+ if (array.length == 1) {
+ return array[0];
+ }
+ throw new NumberFormatException("There are more than one component");
+ }
+ if (value instanceof int[]) {
+ int[] array = (int[]) value;
+ if (array.length == 1) {
+ return array[0];
+ }
+ throw new NumberFormatException("There are more than one component");
+ }
+ if (value instanceof double[]) {
+ double[] array = (double[]) value;
+ if (array.length == 1) {
+ return array[0];
+ }
+ throw new NumberFormatException("There are more than one component");
+ }
+ if (value instanceof Rational[]) {
+ Rational[] array = (Rational[]) value;
+ if (array.length == 1) {
+ return array[0].calculate();
+ }
+ throw new NumberFormatException("There are more than one component");
+ }
+ throw new NumberFormatException("Couldn't find a double value");
+ }
+
+ public int getIntValue(ByteOrder byteOrder) {
+ Object value = getValue(byteOrder);
+ if (value == null) {
+ throw new NumberFormatException("NULL can't be converted to a integer value");
+ }
+ if (value instanceof String) {
+ return Integer.parseInt((String) value);
+ }
+ if (value instanceof long[]) {
+ long[] array = (long[]) value;
+ if (array.length == 1) {
+ return (int) array[0];
+ }
+ throw new NumberFormatException("There are more than one component");
+ }
+ if (value instanceof int[]) {
+ int[] array = (int[]) value;
+ if (array.length == 1) {
+ return array[0];
+ }
+ throw new NumberFormatException("There are more than one component");
+ }
+ throw new NumberFormatException("Couldn't find a integer value");
+ }
+
+ public String getStringValue(ByteOrder byteOrder) {
+ Object value = getValue(byteOrder);
+ if (value == null) {
+ return null;
+ }
+ if (value instanceof String) {
+ return (String) value;
+ }
+
+ final StringBuilder stringBuilder = new StringBuilder();
+ if (value instanceof long[]) {
+ long[] array = (long[]) value;
+ for (int i = 0; i < array.length; ++i) {
+ stringBuilder.append(array[i]);
+ if (i + 1 != array.length) {
+ stringBuilder.append(",");
+ }
+ }
+ return stringBuilder.toString();
+ }
+ if (value instanceof int[]) {
+ int[] array = (int[]) value;
+ for (int i = 0; i < array.length; ++i) {
+ stringBuilder.append(array[i]);
+ if (i + 1 != array.length) {
+ stringBuilder.append(",");
+ }
+ }
+ return stringBuilder.toString();
+ }
+ if (value instanceof double[]) {
+ double[] array = (double[]) value;
+ for (int i = 0; i < array.length; ++i) {
+ stringBuilder.append(array[i]);
+ if (i + 1 != array.length) {
+ stringBuilder.append(",");
+ }
+ }
+ return stringBuilder.toString();
+ }
+ if (value instanceof Rational[]) {
+ Rational[] array = (Rational[]) value;
+ for (int i = 0; i < array.length; ++i) {
+ stringBuilder.append(array[i].numerator);
+ stringBuilder.append('/');
+ stringBuilder.append(array[i].denominator);
+ if (i + 1 != array.length) {
+ stringBuilder.append(",");
+ }
+ }
+ return stringBuilder.toString();
+ }
+ return null;
+ }
+
+ public int size() {
+ return IFD_FORMAT_BYTES_PER_FORMAT[format] * numberOfComponents;
+ }
+ }
+
+ // A class for indicating EXIF tag.
+ private static class ExifTag {
+ public final int number;
+ public final String name;
+ public final int primaryFormat;
+ public final int secondaryFormat;
+
+ private ExifTag(String name, int number, int format) {
+ this.name = name;
+ this.number = number;
+ this.primaryFormat = format;
+ this.secondaryFormat = -1;
+ }
+
+ private ExifTag(String name, int number, int primaryFormat, int secondaryFormat) {
+ this.name = name;
+ this.number = number;
+ this.primaryFormat = primaryFormat;
+ this.secondaryFormat = secondaryFormat;
+ }
+ }
+
+ // Primary image IFD TIFF tags (See JEITA CP-3451C Section 4.6.8 Tag Support Levels)
+ private static final ExifTag[] IFD_TIFF_TAGS = new ExifTag[] {
+ // For below two, see TIFF 6.0 Spec Section 3: Bilevel Images.
+ new ExifTag(TAG_NEW_SUBFILE_TYPE, 254, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_SUBFILE_TYPE, 255, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_IMAGE_WIDTH, 256, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_IMAGE_LENGTH, 257, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_BITS_PER_SAMPLE, 258, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_COMPRESSION, 259, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_PHOTOMETRIC_INTERPRETATION, 262, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_IMAGE_DESCRIPTION, 270, IFD_FORMAT_STRING),
+ new ExifTag(TAG_MAKE, 271, IFD_FORMAT_STRING),
+ new ExifTag(TAG_MODEL, 272, IFD_FORMAT_STRING),
+ new ExifTag(TAG_STRIP_OFFSETS, 273, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_ORIENTATION, 274, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SAMPLES_PER_PIXEL, 277, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_ROWS_PER_STRIP, 278, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_STRIP_BYTE_COUNTS, 279, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_X_RESOLUTION, 282, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_Y_RESOLUTION, 283, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_PLANAR_CONFIGURATION, 284, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_RESOLUTION_UNIT, 296, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_TRANSFER_FUNCTION, 301, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SOFTWARE, 305, IFD_FORMAT_STRING),
+ new ExifTag(TAG_DATETIME, 306, IFD_FORMAT_STRING),
+ new ExifTag(TAG_ARTIST, 315, IFD_FORMAT_STRING),
+ new ExifTag(TAG_WHITE_POINT, 318, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_PRIMARY_CHROMATICITIES, 319, IFD_FORMAT_URATIONAL),
+ // See Adobe PageMaker® 6.0 TIFF Technical Notes, Note 1.
+ new ExifTag(TAG_SUB_IFD_POINTER, 330, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_JPEG_INTERCHANGE_FORMAT, 513, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_JPEG_INTERCHANGE_FORMAT_LENGTH, 514, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_Y_CB_CR_COEFFICIENTS, 529, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_Y_CB_CR_SUB_SAMPLING, 530, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_Y_CB_CR_POSITIONING, 531, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_REFERENCE_BLACK_WHITE, 532, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_COPYRIGHT, 33432, IFD_FORMAT_STRING),
+ new ExifTag(TAG_EXIF_IFD_POINTER, 34665, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_GPS_INFO_IFD_POINTER, 34853, IFD_FORMAT_ULONG),
+ // RW2 file tags
+ // See http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/PanasonicRaw.html)
+ new ExifTag(TAG_RW2_SENSOR_TOP_BORDER, 4, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_RW2_SENSOR_LEFT_BORDER, 5, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_RW2_SENSOR_BOTTOM_BORDER, 6, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_RW2_SENSOR_RIGHT_BORDER, 7, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_RW2_ISO, 23, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_RW2_JPG_FROM_RAW, 46, IFD_FORMAT_UNDEFINED)
+ };
+
+ // Primary image IFD Exif Private tags (See JEITA CP-3451C Section 4.6.8 Tag Support Levels)
+ private static final ExifTag[] IFD_EXIF_TAGS = new ExifTag[] {
+ new ExifTag(TAG_EXPOSURE_TIME, 33434, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_F_NUMBER, 33437, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_EXPOSURE_PROGRAM, 34850, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SPECTRAL_SENSITIVITY, 34852, IFD_FORMAT_STRING),
+ new ExifTag(TAG_ISO_SPEED_RATINGS, 34855, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_OECF, 34856, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_EXIF_VERSION, 36864, IFD_FORMAT_STRING),
+ new ExifTag(TAG_DATETIME_ORIGINAL, 36867, IFD_FORMAT_STRING),
+ new ExifTag(TAG_DATETIME_DIGITIZED, 36868, IFD_FORMAT_STRING),
+ new ExifTag(TAG_COMPONENTS_CONFIGURATION, 37121, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_COMPRESSED_BITS_PER_PIXEL, 37122, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_SHUTTER_SPEED_VALUE, 37377, IFD_FORMAT_SRATIONAL),
+ new ExifTag(TAG_APERTURE_VALUE, 37378, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_BRIGHTNESS_VALUE, 37379, IFD_FORMAT_SRATIONAL),
+ new ExifTag(TAG_EXPOSURE_BIAS_VALUE, 37380, IFD_FORMAT_SRATIONAL),
+ new ExifTag(TAG_MAX_APERTURE_VALUE, 37381, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_SUBJECT_DISTANCE, 37382, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_METERING_MODE, 37383, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_LIGHT_SOURCE, 37384, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_FLASH, 37385, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_FOCAL_LENGTH, 37386, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_SUBJECT_AREA, 37396, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_MAKER_NOTE, 37500, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_USER_COMMENT, 37510, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_SUBSEC_TIME, 37520, IFD_FORMAT_STRING),
+ new ExifTag(TAG_SUBSEC_TIME_ORIG, 37521, IFD_FORMAT_STRING),
+ new ExifTag(TAG_SUBSEC_TIME_DIG, 37522, IFD_FORMAT_STRING),
+ new ExifTag(TAG_FLASHPIX_VERSION, 40960, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_COLOR_SPACE, 40961, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_PIXEL_X_DIMENSION, 40962, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_PIXEL_Y_DIMENSION, 40963, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_RELATED_SOUND_FILE, 40964, IFD_FORMAT_STRING),
+ new ExifTag(TAG_INTEROPERABILITY_IFD_POINTER, 40965, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_FLASH_ENERGY, 41483, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_SPATIAL_FREQUENCY_RESPONSE, 41484, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_FOCAL_PLANE_X_RESOLUTION, 41486, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_FOCAL_PLANE_Y_RESOLUTION, 41487, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_FOCAL_PLANE_RESOLUTION_UNIT, 41488, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SUBJECT_LOCATION, 41492, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_EXPOSURE_INDEX, 41493, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_SENSING_METHOD, 41495, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_FILE_SOURCE, 41728, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_SCENE_TYPE, 41729, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_CFA_PATTERN, 41730, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_CUSTOM_RENDERED, 41985, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_EXPOSURE_MODE, 41986, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_WHITE_BALANCE, 41987, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_DIGITAL_ZOOM_RATIO, 41988, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_FOCAL_LENGTH_IN_35MM_FILM, 41989, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SCENE_CAPTURE_TYPE, 41990, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_GAIN_CONTROL, 41991, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_CONTRAST, 41992, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SATURATION, 41993, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SHARPNESS, 41994, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_DEVICE_SETTING_DESCRIPTION, 41995, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_SUBJECT_DISTANCE_RANGE, 41996, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_IMAGE_UNIQUE_ID, 42016, IFD_FORMAT_STRING),
+ new ExifTag(TAG_DNG_VERSION, 50706, IFD_FORMAT_BYTE),
+ new ExifTag(TAG_DEFAULT_CROP_SIZE, 50720, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG)
+ };
+
+ // Primary image IFD GPS Info tags (See JEITA CP-3451C Section 4.6.8 Tag Support Levels)
+ private static final ExifTag[] IFD_GPS_TAGS = new ExifTag[] {
+ new ExifTag(TAG_GPS_VERSION_ID, 0, IFD_FORMAT_BYTE),
+ new ExifTag(TAG_GPS_LATITUDE_REF, 1, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_LATITUDE, 2, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_LONGITUDE_REF, 3, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_LONGITUDE, 4, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_ALTITUDE_REF, 5, IFD_FORMAT_BYTE),
+ new ExifTag(TAG_GPS_ALTITUDE, 6, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_TIMESTAMP, 7, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_SATELLITES, 8, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_STATUS, 9, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_MEASURE_MODE, 10, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_DOP, 11, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_SPEED_REF, 12, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_SPEED, 13, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_TRACK_REF, 14, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_TRACK, 15, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_IMG_DIRECTION_REF, 16, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_IMG_DIRECTION, 17, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_MAP_DATUM, 18, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_DEST_LATITUDE_REF, 19, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_DEST_LATITUDE, 20, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_DEST_LONGITUDE_REF, 21, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_DEST_LONGITUDE, 22, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_DEST_BEARING_REF, 23, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_DEST_BEARING, 24, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_DEST_DISTANCE_REF, 25, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_DEST_DISTANCE, 26, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_GPS_PROCESSING_METHOD, 27, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_GPS_AREA_INFORMATION, 28, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_GPS_DATESTAMP, 29, IFD_FORMAT_STRING),
+ new ExifTag(TAG_GPS_DIFFERENTIAL, 30, IFD_FORMAT_USHORT)
+ };
+ // Primary image IFD Interoperability tag (See JEITA CP-3451C Section 4.6.8 Tag Support Levels)
+ private static final ExifTag[] IFD_INTEROPERABILITY_TAGS = new ExifTag[] {
+ new ExifTag(TAG_INTEROPERABILITY_INDEX, 1, IFD_FORMAT_STRING)
+ };
+ // IFD Thumbnail tags (See JEITA CP-3451C Section 4.6.8 Tag Support Levels)
+ private static final ExifTag[] IFD_THUMBNAIL_TAGS = new ExifTag[] {
+ // For below two, see TIFF 6.0 Spec Section 3: Bilevel Images.
+ new ExifTag(TAG_NEW_SUBFILE_TYPE, 254, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_SUBFILE_TYPE, 255, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_THUMBNAIL_IMAGE_WIDTH, 256, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_THUMBNAIL_IMAGE_LENGTH, 257, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_BITS_PER_SAMPLE, 258, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_COMPRESSION, 259, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_PHOTOMETRIC_INTERPRETATION, 262, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_IMAGE_DESCRIPTION, 270, IFD_FORMAT_STRING),
+ new ExifTag(TAG_MAKE, 271, IFD_FORMAT_STRING),
+ new ExifTag(TAG_MODEL, 272, IFD_FORMAT_STRING),
+ new ExifTag(TAG_STRIP_OFFSETS, 273, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_ORIENTATION, 274, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SAMPLES_PER_PIXEL, 277, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_ROWS_PER_STRIP, 278, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_STRIP_BYTE_COUNTS, 279, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_X_RESOLUTION, 282, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_Y_RESOLUTION, 283, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_PLANAR_CONFIGURATION, 284, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_RESOLUTION_UNIT, 296, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_TRANSFER_FUNCTION, 301, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_SOFTWARE, 305, IFD_FORMAT_STRING),
+ new ExifTag(TAG_DATETIME, 306, IFD_FORMAT_STRING),
+ new ExifTag(TAG_ARTIST, 315, IFD_FORMAT_STRING),
+ new ExifTag(TAG_WHITE_POINT, 318, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_PRIMARY_CHROMATICITIES, 319, IFD_FORMAT_URATIONAL),
+ // See Adobe PageMaker® 6.0 TIFF Technical Notes, Note 1.
+ new ExifTag(TAG_SUB_IFD_POINTER, 330, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_JPEG_INTERCHANGE_FORMAT, 513, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_JPEG_INTERCHANGE_FORMAT_LENGTH, 514, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_Y_CB_CR_COEFFICIENTS, 529, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_Y_CB_CR_SUB_SAMPLING, 530, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_Y_CB_CR_POSITIONING, 531, IFD_FORMAT_USHORT),
+ new ExifTag(TAG_REFERENCE_BLACK_WHITE, 532, IFD_FORMAT_URATIONAL),
+ new ExifTag(TAG_COPYRIGHT, 33432, IFD_FORMAT_STRING),
+ new ExifTag(TAG_EXIF_IFD_POINTER, 34665, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_GPS_INFO_IFD_POINTER, 34853, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_DNG_VERSION, 50706, IFD_FORMAT_BYTE),
+ new ExifTag(TAG_DEFAULT_CROP_SIZE, 50720, IFD_FORMAT_USHORT, IFD_FORMAT_ULONG)
+ };
+
+ // RAF file tag (See piex.cc line 372)
+ private static final ExifTag TAG_RAF_IMAGE_SIZE =
+ new ExifTag(TAG_STRIP_OFFSETS, 273, IFD_FORMAT_USHORT);
+
+ // ORF file tags (See http://www.exiv2.org/tags-olympus.html)
+ private static final ExifTag[] ORF_MAKER_NOTE_TAGS = new ExifTag[] {
+ new ExifTag(TAG_ORF_THUMBNAIL_IMAGE, 256, IFD_FORMAT_UNDEFINED),
+ new ExifTag(TAG_ORF_CAMERA_SETTINGS_IFD_POINTER, 8224, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_ORF_IMAGE_PROCESSING_IFD_POINTER, 8256, IFD_FORMAT_ULONG)
+ };
+ private static final ExifTag[] ORF_CAMERA_SETTINGS_TAGS = new ExifTag[] {
+ new ExifTag(TAG_ORF_PREVIEW_IMAGE_START, 257, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_ORF_PREVIEW_IMAGE_LENGTH, 258, IFD_FORMAT_ULONG)
+ };
+ private static final ExifTag[] ORF_IMAGE_PROCESSING_TAGS = new ExifTag[] {
+ new ExifTag(TAG_ORF_ASPECT_FRAME, 4371, IFD_FORMAT_USHORT)
+ };
+ // PEF file tag (See http://www.sno.phy.queensu.ca/~phil/exiftool/TagNames/Pentax.html)
+ private static final ExifTag[] PEF_TAGS = new ExifTag[] {
+ new ExifTag(TAG_COLOR_SPACE, 55, IFD_FORMAT_USHORT)
+ };
+
+ // See JEITA CP-3451C Section 4.6.3: Exif-specific IFD.
+ // The following values are used for indicating pointers to the other Image File Directories.
+
+ // Indices of Exif Ifd tag groups
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({IFD_TYPE_PRIMARY, IFD_TYPE_EXIF, IFD_TYPE_GPS, IFD_TYPE_INTEROPERABILITY,
+ IFD_TYPE_THUMBNAIL, IFD_TYPE_PREVIEW, IFD_TYPE_ORF_MAKER_NOTE,
+ IFD_TYPE_ORF_CAMERA_SETTINGS, IFD_TYPE_ORF_IMAGE_PROCESSING, IFD_TYPE_PEF})
+ public @interface IfdType {}
+
+ private static final int IFD_TYPE_PRIMARY = 0;
+ private static final int IFD_TYPE_EXIF = 1;
+ private static final int IFD_TYPE_GPS = 2;
+ private static final int IFD_TYPE_INTEROPERABILITY = 3;
+ private static final int IFD_TYPE_THUMBNAIL = 4;
+ private static final int IFD_TYPE_PREVIEW = 5;
+ private static final int IFD_TYPE_ORF_MAKER_NOTE = 6;
+ private static final int IFD_TYPE_ORF_CAMERA_SETTINGS = 7;
+ private static final int IFD_TYPE_ORF_IMAGE_PROCESSING = 8;
+ private static final int IFD_TYPE_PEF = 9;
+
+ // List of Exif tag groups
+ private static final ExifTag[][] EXIF_TAGS = new ExifTag[][] {
+ IFD_TIFF_TAGS, IFD_EXIF_TAGS, IFD_GPS_TAGS, IFD_INTEROPERABILITY_TAGS,
+ IFD_THUMBNAIL_TAGS, IFD_TIFF_TAGS, ORF_MAKER_NOTE_TAGS, ORF_CAMERA_SETTINGS_TAGS,
+ ORF_IMAGE_PROCESSING_TAGS, PEF_TAGS
+ };
+ // List of tags for pointing to the other image file directory offset.
+ private static final ExifTag[] EXIF_POINTER_TAGS = new ExifTag[] {
+ new ExifTag(TAG_SUB_IFD_POINTER, 330, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_EXIF_IFD_POINTER, 34665, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_GPS_INFO_IFD_POINTER, 34853, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_INTEROPERABILITY_IFD_POINTER, 40965, IFD_FORMAT_ULONG),
+ new ExifTag(TAG_ORF_CAMERA_SETTINGS_IFD_POINTER, 8224, IFD_FORMAT_BYTE),
+ new ExifTag(TAG_ORF_IMAGE_PROCESSING_IFD_POINTER, 8256, IFD_FORMAT_BYTE)
+ };
+
+ // Tags for indicating the thumbnail offset and length
+ private static final ExifTag JPEG_INTERCHANGE_FORMAT_TAG =
+ new ExifTag(TAG_JPEG_INTERCHANGE_FORMAT, 513, IFD_FORMAT_ULONG);
+ private static final ExifTag JPEG_INTERCHANGE_FORMAT_LENGTH_TAG =
+ new ExifTag(TAG_JPEG_INTERCHANGE_FORMAT_LENGTH, 514, IFD_FORMAT_ULONG);
+
+ // Mappings from tag number to tag name and each item represents one IFD tag group.
+ private static final HashMap[] sExifTagMapsForReading = new HashMap[EXIF_TAGS.length];
+ // Mappings from tag name to tag number and each item represents one IFD tag group.
+ private static final HashMap[] sExifTagMapsForWriting = new HashMap[EXIF_TAGS.length];
+ private static final HashSet<String> sTagSetForCompatibility = new HashSet<>(Arrays.asList(
+ TAG_F_NUMBER, TAG_DIGITAL_ZOOM_RATIO, TAG_EXPOSURE_TIME, TAG_SUBJECT_DISTANCE,
+ TAG_GPS_TIMESTAMP));
+ // Mappings from tag number to IFD type for pointer tags.
+ private static final HashMap sExifPointerTagMap = new HashMap();
+
+ // See JPEG File Interchange Format Version 1.02.
+ // The following values are defined for handling JPEG streams. In this implementation, we are
+ // not only getting information from EXIF but also from some JPEG special segments such as
+ // MARKER_COM for user comment and MARKER_SOFx for image width and height.
+
+ private static final Charset ASCII = Charset.forName("US-ASCII");
+ // Identifier for EXIF APP1 segment in JPEG
+ private static final byte[] IDENTIFIER_EXIF_APP1 = "Exif\0\0".getBytes(ASCII);
+ // JPEG segment markers, that each marker consumes two bytes beginning with 0xff and ending with
+ // the indicator. There is no SOF4, SOF8, SOF16 markers in JPEG and SOFx markers indicates start
+ // of frame(baseline DCT) and the image size info exists in its beginning part.
+ private static final byte MARKER = (byte) 0xff;
+ private static final byte MARKER_SOI = (byte) 0xd8;
+ private static final byte MARKER_SOF0 = (byte) 0xc0;
+ private static final byte MARKER_SOF1 = (byte) 0xc1;
+ private static final byte MARKER_SOF2 = (byte) 0xc2;
+ private static final byte MARKER_SOF3 = (byte) 0xc3;
+ private static final byte MARKER_SOF5 = (byte) 0xc5;
+ private static final byte MARKER_SOF6 = (byte) 0xc6;
+ private static final byte MARKER_SOF7 = (byte) 0xc7;
+ private static final byte MARKER_SOF9 = (byte) 0xc9;
+ private static final byte MARKER_SOF10 = (byte) 0xca;
+ private static final byte MARKER_SOF11 = (byte) 0xcb;
+ private static final byte MARKER_SOF13 = (byte) 0xcd;
+ private static final byte MARKER_SOF14 = (byte) 0xce;
+ private static final byte MARKER_SOF15 = (byte) 0xcf;
+ private static final byte MARKER_SOS = (byte) 0xda;
+ private static final byte MARKER_APP1 = (byte) 0xe1;
+ private static final byte MARKER_COM = (byte) 0xfe;
+ private static final byte MARKER_EOI = (byte) 0xd9;
+
+ // Supported Image File Types
+ private static final int IMAGE_TYPE_UNKNOWN = 0;
+ private static final int IMAGE_TYPE_ARW = 1;
+ private static final int IMAGE_TYPE_CR2 = 2;
+ private static final int IMAGE_TYPE_DNG = 3;
+ private static final int IMAGE_TYPE_JPEG = 4;
+ private static final int IMAGE_TYPE_NEF = 5;
+ private static final int IMAGE_TYPE_NRW = 6;
+ private static final int IMAGE_TYPE_ORF = 7;
+ private static final int IMAGE_TYPE_PEF = 8;
+ private static final int IMAGE_TYPE_RAF = 9;
+ private static final int IMAGE_TYPE_RW2 = 10;
+ private static final int IMAGE_TYPE_SRW = 11;
+ private static final int IMAGE_TYPE_HEIF = 12;
+
+ static {
+ sFormatter = new SimpleDateFormat("yyyy:MM:dd HH:mm:ss");
+ sFormatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+
+ // Build up the hash tables to look up Exif tags for reading Exif tags.
+ for (int ifdType = 0; ifdType < EXIF_TAGS.length; ++ifdType) {
+ sExifTagMapsForReading[ifdType] = new HashMap();
+ sExifTagMapsForWriting[ifdType] = new HashMap();
+ for (ExifTag tag : EXIF_TAGS[ifdType]) {
+ sExifTagMapsForReading[ifdType].put(tag.number, tag);
+ sExifTagMapsForWriting[ifdType].put(tag.name, tag);
+ }
+ }
+
+ // Build up the hash table to look up Exif pointer tags.
+ sExifPointerTagMap.put(EXIF_POINTER_TAGS[0].number, IFD_TYPE_PREVIEW); // 330
+ sExifPointerTagMap.put(EXIF_POINTER_TAGS[1].number, IFD_TYPE_EXIF); // 34665
+ sExifPointerTagMap.put(EXIF_POINTER_TAGS[2].number, IFD_TYPE_GPS); // 34853
+ sExifPointerTagMap.put(EXIF_POINTER_TAGS[3].number, IFD_TYPE_INTEROPERABILITY); // 40965
+ sExifPointerTagMap.put(EXIF_POINTER_TAGS[4].number, IFD_TYPE_ORF_CAMERA_SETTINGS); // 8224
+ sExifPointerTagMap.put(EXIF_POINTER_TAGS[5].number, IFD_TYPE_ORF_IMAGE_PROCESSING); // 8256
+ }
+
+ private final String mFilename;
+ private final FileDescriptor mSeekableFileDescriptor;
+ private final AssetManager.AssetInputStream mAssetInputStream;
+ private final boolean mIsInputStream;
+ private int mMimeType;
+ private final HashMap[] mAttributes = new HashMap[EXIF_TAGS.length];
+ private ByteOrder mExifByteOrder = ByteOrder.BIG_ENDIAN;
+ private boolean mHasThumbnail;
+ // The following values used for indicating a thumbnail position.
+ private int mThumbnailOffset;
+ private int mThumbnailLength;
+ private byte[] mThumbnailBytes;
+ private int mThumbnailCompression;
+ private int mExifOffset;
+ private int mOrfMakerNoteOffset;
+ private int mOrfThumbnailOffset;
+ private int mOrfThumbnailLength;
+ private int mRw2JpgFromRawOffset;
+ private boolean mIsSupportedFile;
+
+ // Pattern to check non zero timestamp
+ private static final Pattern sNonZeroTimePattern = Pattern.compile(".*[1-9].*");
+ // Pattern to check gps timestamp
+ private static final Pattern sGpsTimestampPattern =
+ Pattern.compile("^([0-9][0-9]):([0-9][0-9]):([0-9][0-9])$");
+
+ /**
+ * Reads Exif tags from the specified image file.
+ */
+ public ExifInterface(String filename) throws IOException {
+ if (filename == null) {
+ throw new IllegalArgumentException("filename cannot be null");
+ }
+ FileInputStream in = null;
+ mAssetInputStream = null;
+ mFilename = filename;
+ mIsInputStream = false;
+ try {
+ in = new FileInputStream(filename);
+ if (isSeekableFD(in.getFD())) {
+ mSeekableFileDescriptor = in.getFD();
+ } else {
+ mSeekableFileDescriptor = null;
+ }
+ loadAttributes(in);
+ } finally {
+ IoUtils.closeQuietly(in);
+ }
+ }
+
+ /**
+ * Reads Exif tags from the specified image file descriptor. Attribute mutation is supported
+ * for writable and seekable file descriptors only. This constructor will not rewind the offset
+ * of the given file descriptor. Developers should close the file descriptor after use.
+ */
+ public ExifInterface(FileDescriptor fileDescriptor) throws IOException {
+ if (fileDescriptor == null) {
+ throw new IllegalArgumentException("fileDescriptor cannot be null");
+ }
+ mAssetInputStream = null;
+ mFilename = null;
+ if (isSeekableFD(fileDescriptor)) {
+ mSeekableFileDescriptor = fileDescriptor;
+ // Keep the original file descriptor in order to save attributes when it's seekable.
+ // Otherwise, just close the given file descriptor after reading it because the save
+ // feature won't be working.
+ try {
+ fileDescriptor = Os.dup(fileDescriptor);
+ } catch (ErrnoException e) {
+ throw e.rethrowAsIOException();
+ }
+ } else {
+ mSeekableFileDescriptor = null;
+ }
+ mIsInputStream = false;
+ FileInputStream in = null;
+ try {
+ in = new FileInputStream(fileDescriptor);
+ loadAttributes(in);
+ } finally {
+ IoUtils.closeQuietly(in);
+ }
+ }
+
+ /**
+ * Reads Exif tags from the specified image input stream. Attribute mutation is not supported
+ * for input streams. The given input stream will proceed its current position. Developers
+ * should close the input stream after use.
+ */
+ public ExifInterface(InputStream inputStream) throws IOException {
+ if (inputStream == null) {
+ throw new IllegalArgumentException("inputStream cannot be null");
+ }
+ mFilename = null;
+ if (inputStream instanceof AssetManager.AssetInputStream) {
+ mAssetInputStream = (AssetManager.AssetInputStream) inputStream;
+ mSeekableFileDescriptor = null;
+ } else if (inputStream instanceof FileInputStream
+ && isSeekableFD(((FileInputStream) inputStream).getFD())) {
+ mAssetInputStream = null;
+ mSeekableFileDescriptor = ((FileInputStream) inputStream).getFD();
+ } else {
+ mAssetInputStream = null;
+ mSeekableFileDescriptor = null;
+ }
+ mIsInputStream = true;
+ loadAttributes(inputStream);
+ }
+
+ /**
+ * Returns the EXIF attribute of the specified tag or {@code null} if there is no such tag in
+ * the image file.
+ *
+ * @param tag the name of the tag.
+ */
+ private ExifAttribute getExifAttribute(String tag) {
+ // Retrieves all tag groups. The value from primary image tag group has a higher priority
+ // than the value from the thumbnail tag group if there are more than one candidates.
+ for (int i = 0; i < EXIF_TAGS.length; ++i) {
+ Object value = mAttributes[i].get(tag);
+ if (value != null) {
+ return (ExifAttribute) value;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Returns the value of the specified tag or {@code null} if there
+ * is no such tag in the image file.
+ *
+ * @param tag the name of the tag.
+ */
+ public String getAttribute(String tag) {
+ ExifAttribute attribute = getExifAttribute(tag);
+ if (attribute != null) {
+ if (!sTagSetForCompatibility.contains(tag)) {
+ return attribute.getStringValue(mExifByteOrder);
+ }
+ if (tag.equals(TAG_GPS_TIMESTAMP)) {
+ // Convert the rational values to the custom formats for backwards compatibility.
+ if (attribute.format != IFD_FORMAT_URATIONAL
+ && attribute.format != IFD_FORMAT_SRATIONAL) {
+ return null;
+ }
+ Rational[] array = (Rational[]) attribute.getValue(mExifByteOrder);
+ if (array.length != 3) {
+ return null;
+ }
+ return String.format("%02d:%02d:%02d",
+ (int) ((float) array[0].numerator / array[0].denominator),
+ (int) ((float) array[1].numerator / array[1].denominator),
+ (int) ((float) array[2].numerator / array[2].denominator));
+ }
+ try {
+ return Double.toString(attribute.getDoubleValue(mExifByteOrder));
+ } catch (NumberFormatException e) {
+ return null;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Returns the integer value of the specified tag. If there is no such tag
+ * in the image file or the value cannot be parsed as integer, return
+ * <var>defaultValue</var>.
+ *
+ * @param tag the name of the tag.
+ * @param defaultValue the value to return if the tag is not available.
+ */
+ public int getAttributeInt(String tag, int defaultValue) {
+ ExifAttribute exifAttribute = getExifAttribute(tag);
+ if (exifAttribute == null) {
+ return defaultValue;
+ }
+
+ try {
+ return exifAttribute.getIntValue(mExifByteOrder);
+ } catch (NumberFormatException e) {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Returns the double value of the tag that is specified as rational or contains a
+ * double-formatted value. If there is no such tag in the image file or the value cannot be
+ * parsed as double, return <var>defaultValue</var>.
+ *
+ * @param tag the name of the tag.
+ * @param defaultValue the value to return if the tag is not available.
+ */
+ public double getAttributeDouble(String tag, double defaultValue) {
+ ExifAttribute exifAttribute = getExifAttribute(tag);
+ if (exifAttribute == null) {
+ return defaultValue;
+ }
+
+ try {
+ return exifAttribute.getDoubleValue(mExifByteOrder);
+ } catch (NumberFormatException e) {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Set the value of the specified tag.
+ *
+ * @param tag the name of the tag.
+ * @param value the value of the tag.
+ */
+ public void setAttribute(String tag, String value) {
+ // Convert the given value to rational values for backwards compatibility.
+ if (value != null && sTagSetForCompatibility.contains(tag)) {
+ if (tag.equals(TAG_GPS_TIMESTAMP)) {
+ Matcher m = sGpsTimestampPattern.matcher(value);
+ if (!m.find()) {
+ Log.w(TAG, "Invalid value for " + tag + " : " + value);
+ return;
+ }
+ value = Integer.parseInt(m.group(1)) + "/1," + Integer.parseInt(m.group(2)) + "/1,"
+ + Integer.parseInt(m.group(3)) + "/1";
+ } else {
+ try {
+ double doubleValue = Double.parseDouble(value);
+ value = (long) (doubleValue * 10000L) + "/10000";
+ } catch (NumberFormatException e) {
+ Log.w(TAG, "Invalid value for " + tag + " : " + value);
+ return;
+ }
+ }
+ }
+
+ for (int i = 0 ; i < EXIF_TAGS.length; ++i) {
+ if (i == IFD_TYPE_THUMBNAIL && !mHasThumbnail) {
+ continue;
+ }
+ final Object obj = sExifTagMapsForWriting[i].get(tag);
+ if (obj != null) {
+ if (value == null) {
+ mAttributes[i].remove(tag);
+ continue;
+ }
+ final ExifTag exifTag = (ExifTag) obj;
+ Pair<Integer, Integer> guess = guessDataFormat(value);
+ int dataFormat;
+ if (exifTag.primaryFormat == guess.first || exifTag.primaryFormat == guess.second) {
+ dataFormat = exifTag.primaryFormat;
+ } else if (exifTag.secondaryFormat != -1 && (exifTag.secondaryFormat == guess.first
+ || exifTag.secondaryFormat == guess.second)) {
+ dataFormat = exifTag.secondaryFormat;
+ } else if (exifTag.primaryFormat == IFD_FORMAT_BYTE
+ || exifTag.primaryFormat == IFD_FORMAT_UNDEFINED
+ || exifTag.primaryFormat == IFD_FORMAT_STRING) {
+ dataFormat = exifTag.primaryFormat;
+ } else {
+ Log.w(TAG, "Given tag (" + tag + ") value didn't match with one of expected "
+ + "formats: " + IFD_FORMAT_NAMES[exifTag.primaryFormat]
+ + (exifTag.secondaryFormat == -1 ? "" : ", "
+ + IFD_FORMAT_NAMES[exifTag.secondaryFormat]) + " (guess: "
+ + IFD_FORMAT_NAMES[guess.first] + (guess.second == -1 ? "" : ", "
+ + IFD_FORMAT_NAMES[guess.second]) + ")");
+ continue;
+ }
+ switch (dataFormat) {
+ case IFD_FORMAT_BYTE: {
+ mAttributes[i].put(tag, ExifAttribute.createByte(value));
+ break;
+ }
+ case IFD_FORMAT_UNDEFINED:
+ case IFD_FORMAT_STRING: {
+ mAttributes[i].put(tag, ExifAttribute.createString(value));
+ break;
+ }
+ case IFD_FORMAT_USHORT: {
+ final String[] values = value.split(",");
+ final int[] intArray = new int[values.length];
+ for (int j = 0; j < values.length; ++j) {
+ intArray[j] = Integer.parseInt(values[j]);
+ }
+ mAttributes[i].put(tag,
+ ExifAttribute.createUShort(intArray, mExifByteOrder));
+ break;
+ }
+ case IFD_FORMAT_SLONG: {
+ final String[] values = value.split(",");
+ final int[] intArray = new int[values.length];
+ for (int j = 0; j < values.length; ++j) {
+ intArray[j] = Integer.parseInt(values[j]);
+ }
+ mAttributes[i].put(tag,
+ ExifAttribute.createSLong(intArray, mExifByteOrder));
+ break;
+ }
+ case IFD_FORMAT_ULONG: {
+ final String[] values = value.split(",");
+ final long[] longArray = new long[values.length];
+ for (int j = 0; j < values.length; ++j) {
+ longArray[j] = Long.parseLong(values[j]);
+ }
+ mAttributes[i].put(tag,
+ ExifAttribute.createULong(longArray, mExifByteOrder));
+ break;
+ }
+ case IFD_FORMAT_URATIONAL: {
+ final String[] values = value.split(",");
+ final Rational[] rationalArray = new Rational[values.length];
+ for (int j = 0; j < values.length; ++j) {
+ final String[] numbers = values[j].split("/");
+ rationalArray[j] = new Rational((long) Double.parseDouble(numbers[0]),
+ (long) Double.parseDouble(numbers[1]));
+ }
+ mAttributes[i].put(tag,
+ ExifAttribute.createURational(rationalArray, mExifByteOrder));
+ break;
+ }
+ case IFD_FORMAT_SRATIONAL: {
+ final String[] values = value.split(",");
+ final Rational[] rationalArray = new Rational[values.length];
+ for (int j = 0; j < values.length; ++j) {
+ final String[] numbers = values[j].split("/");
+ rationalArray[j] = new Rational((long) Double.parseDouble(numbers[0]),
+ (long) Double.parseDouble(numbers[1]));
+ }
+ mAttributes[i].put(tag,
+ ExifAttribute.createSRational(rationalArray, mExifByteOrder));
+ break;
+ }
+ case IFD_FORMAT_DOUBLE: {
+ final String[] values = value.split(",");
+ final double[] doubleArray = new double[values.length];
+ for (int j = 0; j < values.length; ++j) {
+ doubleArray[j] = Double.parseDouble(values[j]);
+ }
+ mAttributes[i].put(tag,
+ ExifAttribute.createDouble(doubleArray, mExifByteOrder));
+ break;
+ }
+ default:
+ Log.w(TAG, "Data format isn't one of expected formats: " + dataFormat);
+ continue;
+ }
+ }
+ }
+ }
+
+ /**
+ * Update the values of the tags in the tag groups if any value for the tag already was stored.
+ *
+ * @param tag the name of the tag.
+ * @param value the value of the tag in a form of {@link ExifAttribute}.
+ * @return Returns {@code true} if updating is placed.
+ */
+ private boolean updateAttribute(String tag, ExifAttribute value) {
+ boolean updated = false;
+ for (int i = 0 ; i < EXIF_TAGS.length; ++i) {
+ if (mAttributes[i].containsKey(tag)) {
+ mAttributes[i].put(tag, value);
+ updated = true;
+ }
+ }
+ return updated;
+ }
+
+ /**
+ * Remove any values of the specified tag.
+ *
+ * @param tag the name of the tag.
+ */
+ private void removeAttribute(String tag) {
+ for (int i = 0 ; i < EXIF_TAGS.length; ++i) {
+ mAttributes[i].remove(tag);
+ }
+ }
+
+ /**
+ * This function decides which parser to read the image data according to the given input stream
+ * type and the content of the input stream. In each case, it reads the first three bytes to
+ * determine whether the image data format is JPEG or not.
+ */
+ private void loadAttributes(@NonNull InputStream in) throws IOException {
+ try {
+ // Initialize mAttributes.
+ for (int i = 0; i < EXIF_TAGS.length; ++i) {
+ mAttributes[i] = new HashMap();
+ }
+
+ // Check file type
+ in = new BufferedInputStream(in, SIGNATURE_CHECK_SIZE);
+ mMimeType = getMimeType((BufferedInputStream) in);
+
+ // Create byte-ordered input stream
+ ByteOrderedDataInputStream inputStream = new ByteOrderedDataInputStream(in);
+
+ switch (mMimeType) {
+ case IMAGE_TYPE_JPEG: {
+ getJpegAttributes(inputStream, 0, IFD_TYPE_PRIMARY); // 0 is offset
+ break;
+ }
+ case IMAGE_TYPE_RAF: {
+ getRafAttributes(inputStream);
+ break;
+ }
+ case IMAGE_TYPE_HEIF: {
+ getHeifAttributes(inputStream);
+ break;
+ }
+ case IMAGE_TYPE_ORF: {
+ getOrfAttributes(inputStream);
+ break;
+ }
+ case IMAGE_TYPE_RW2: {
+ getRw2Attributes(inputStream);
+ break;
+ }
+ case IMAGE_TYPE_ARW:
+ case IMAGE_TYPE_CR2:
+ case IMAGE_TYPE_DNG:
+ case IMAGE_TYPE_NEF:
+ case IMAGE_TYPE_NRW:
+ case IMAGE_TYPE_PEF:
+ case IMAGE_TYPE_SRW:
+ case IMAGE_TYPE_UNKNOWN: {
+ getRawAttributes(inputStream);
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+ // Set thumbnail image offset and length
+ setThumbnailData(inputStream);
+ mIsSupportedFile = true;
+ } catch (IOException e) {
+ // Ignore exceptions in order to keep the compatibility with the old versions of
+ // ExifInterface.
+ mIsSupportedFile = false;
+ if (DEBUG) {
+ Log.w(TAG, "Invalid image: ExifInterface got an unsupported image format file"
+ + "(ExifInterface supports JPEG and some RAW image formats only) "
+ + "or a corrupted JPEG file to ExifInterface.", e);
+ }
+ } finally {
+ addDefaultValuesForCompatibility();
+
+ if (DEBUG) {
+ printAttributes();
+ }
+ }
+ }
+
+ private static boolean isSeekableFD(FileDescriptor fd) throws IOException {
+ try {
+ Os.lseek(fd, 0, OsConstants.SEEK_CUR);
+ return true;
+ } catch (ErrnoException e) {
+ return false;
+ }
+ }
+
+ // Prints out attributes for debugging.
+ private void printAttributes() {
+ for (int i = 0; i < mAttributes.length; ++i) {
+ Log.d(TAG, "The size of tag group[" + i + "]: " + mAttributes[i].size());
+ for (Map.Entry entry : (Set<Map.Entry>) mAttributes[i].entrySet()) {
+ final ExifAttribute tagValue = (ExifAttribute) entry.getValue();
+ Log.d(TAG, "tagName: " + entry.getKey() + ", tagType: " + tagValue.toString()
+ + ", tagValue: '" + tagValue.getStringValue(mExifByteOrder) + "'");
+ }
+ }
+ }
+
+ /**
+ * Save the tag data into the original image file. This is expensive because it involves
+ * copying all the data from one file to another and deleting the old file and renaming the
+ * other. It's best to use {@link #setAttribute(String,String)} to set all attributes to write
+ * and make a single call rather than multiple calls for each attribute.
+ * <p>
+ * This method is only supported for JPEG files.
+ * </p>
+ */
+ public void saveAttributes() throws IOException {
+ if (!mIsSupportedFile || mMimeType != IMAGE_TYPE_JPEG) {
+ throw new IOException("ExifInterface only supports saving attributes on JPEG formats.");
+ }
+ if (mIsInputStream || (mSeekableFileDescriptor == null && mFilename == null)) {
+ throw new IOException(
+ "ExifInterface does not support saving attributes for the current input.");
+ }
+
+ // Keep the thumbnail in memory
+ mThumbnailBytes = getThumbnail();
+
+ FileInputStream in = null;
+ FileOutputStream out = null;
+ File tempFile = null;
+ try {
+ // Move the original file to temporary file.
+ if (mFilename != null) {
+ tempFile = new File(mFilename + ".tmp");
+ File originalFile = new File(mFilename);
+ if (!originalFile.renameTo(tempFile)) {
+ throw new IOException("Could'nt rename to " + tempFile.getAbsolutePath());
+ }
+ } else if (mSeekableFileDescriptor != null) {
+ tempFile = File.createTempFile("temp", "jpg");
+ Os.lseek(mSeekableFileDescriptor, 0, OsConstants.SEEK_SET);
+ in = new FileInputStream(mSeekableFileDescriptor);
+ out = new FileOutputStream(tempFile);
+ Streams.copy(in, out);
+ }
+ } catch (ErrnoException e) {
+ throw e.rethrowAsIOException();
+ } finally {
+ IoUtils.closeQuietly(in);
+ IoUtils.closeQuietly(out);
+ }
+
+ in = null;
+ out = null;
+ try {
+ // Save the new file.
+ in = new FileInputStream(tempFile);
+ if (mFilename != null) {
+ out = new FileOutputStream(mFilename);
+ } else if (mSeekableFileDescriptor != null) {
+ Os.lseek(mSeekableFileDescriptor, 0, OsConstants.SEEK_SET);
+ out = new FileOutputStream(mSeekableFileDescriptor);
+ }
+ saveJpegAttributes(in, out);
+ } catch (ErrnoException e) {
+ throw e.rethrowAsIOException();
+ } finally {
+ IoUtils.closeQuietly(in);
+ IoUtils.closeQuietly(out);
+ tempFile.delete();
+ }
+
+ // Discard the thumbnail in memory
+ mThumbnailBytes = null;
+ }
+
+ /**
+ * Returns true if the image file has a thumbnail.
+ */
+ public boolean hasThumbnail() {
+ return mHasThumbnail;
+ }
+
+ /**
+ * Returns the JPEG compressed thumbnail inside the image file, or {@code null} if there is no
+ * JPEG compressed thumbnail.
+ * The returned data can be decoded using
+ * {@link android.graphics.BitmapFactory#decodeByteArray(byte[],int,int)}
+ */
+ public byte[] getThumbnail() {
+ if (mThumbnailCompression == DATA_JPEG || mThumbnailCompression == DATA_JPEG_COMPRESSED) {
+ return getThumbnailBytes();
+ }
+ return null;
+ }
+
+ /**
+ * Returns the thumbnail bytes inside the image file, regardless of the compression type of the
+ * thumbnail image.
+ */
+ public byte[] getThumbnailBytes() {
+ if (!mHasThumbnail) {
+ return null;
+ }
+ if (mThumbnailBytes != null) {
+ return mThumbnailBytes;
+ }
+
+ // Read the thumbnail.
+ InputStream in = null;
+ try {
+ if (mAssetInputStream != null) {
+ in = mAssetInputStream;
+ if (in.markSupported()) {
+ in.reset();
+ } else {
+ Log.d(TAG, "Cannot read thumbnail from inputstream without mark/reset support");
+ return null;
+ }
+ } else if (mFilename != null) {
+ in = new FileInputStream(mFilename);
+ } else if (mSeekableFileDescriptor != null) {
+ FileDescriptor fileDescriptor = Os.dup(mSeekableFileDescriptor);
+ Os.lseek(fileDescriptor, 0, OsConstants.SEEK_SET);
+ in = new FileInputStream(fileDescriptor);
+ }
+ if (in == null) {
+ // Should not be reached this.
+ throw new FileNotFoundException();
+ }
+ if (in.skip(mThumbnailOffset) != mThumbnailOffset) {
+ throw new IOException("Corrupted image");
+ }
+ byte[] buffer = new byte[mThumbnailLength];
+ if (in.read(buffer) != mThumbnailLength) {
+ throw new IOException("Corrupted image");
+ }
+ mThumbnailBytes = buffer;
+ return buffer;
+ } catch (IOException | ErrnoException e) {
+ // Couldn't get a thumbnail image.
+ Log.d(TAG, "Encountered exception while getting thumbnail", e);
+ } finally {
+ IoUtils.closeQuietly(in);
+ }
+ return null;
+ }
+
+ /**
+ * Creates and returns a Bitmap object of the thumbnail image based on the byte array and the
+ * thumbnail compression value, or {@code null} if the compression type is unsupported.
+ */
+ public Bitmap getThumbnailBitmap() {
+ if (!mHasThumbnail) {
+ return null;
+ } else if (mThumbnailBytes == null) {
+ mThumbnailBytes = getThumbnailBytes();
+ }
+
+ if (mThumbnailCompression == DATA_JPEG || mThumbnailCompression == DATA_JPEG_COMPRESSED) {
+ return BitmapFactory.decodeByteArray(mThumbnailBytes, 0, mThumbnailLength);
+ } else if (mThumbnailCompression == DATA_UNCOMPRESSED) {
+ int[] rgbValues = new int[mThumbnailBytes.length / 3];
+ byte alpha = (byte) 0xff000000;
+ for (int i = 0; i < rgbValues.length; i++) {
+ rgbValues[i] = alpha + (mThumbnailBytes[3 * i] << 16)
+ + (mThumbnailBytes[3 * i + 1] << 8) + mThumbnailBytes[3 * i + 2];
+ }
+
+ ExifAttribute imageLengthAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_THUMBNAIL].get(TAG_IMAGE_LENGTH);
+ ExifAttribute imageWidthAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_THUMBNAIL].get(TAG_IMAGE_WIDTH);
+ if (imageLengthAttribute != null && imageWidthAttribute != null) {
+ int imageLength = imageLengthAttribute.getIntValue(mExifByteOrder);
+ int imageWidth = imageWidthAttribute.getIntValue(mExifByteOrder);
+ return Bitmap.createBitmap(
+ rgbValues, imageWidth, imageLength, Bitmap.Config.ARGB_8888);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Returns true if thumbnail image is JPEG Compressed, or false if either thumbnail image does
+ * not exist or thumbnail image is uncompressed.
+ */
+ public boolean isThumbnailCompressed() {
+ if (!mHasThumbnail) {
+ return false;
+ }
+ if (mThumbnailCompression == DATA_JPEG || mThumbnailCompression == DATA_JPEG_COMPRESSED) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Returns the offset and length of thumbnail inside the image file, or
+ * {@code null} if there is no thumbnail.
+ *
+ * @return two-element array, the offset in the first value, and length in
+ * the second, or {@code null} if no thumbnail was found.
+ */
+ public long[] getThumbnailRange() {
+ if (!mHasThumbnail) {
+ return null;
+ }
+
+ long[] range = new long[2];
+ range[0] = mThumbnailOffset;
+ range[1] = mThumbnailLength;
+
+ return range;
+ }
+
+ /**
+ * Stores the latitude and longitude value in a float array. The first element is
+ * the latitude, and the second element is the longitude. Returns false if the
+ * Exif tags are not available.
+ */
+ public boolean getLatLong(float output[]) {
+ String latValue = getAttribute(TAG_GPS_LATITUDE);
+ String latRef = getAttribute(TAG_GPS_LATITUDE_REF);
+ String lngValue = getAttribute(TAG_GPS_LONGITUDE);
+ String lngRef = getAttribute(TAG_GPS_LONGITUDE_REF);
+
+ if (latValue != null && latRef != null && lngValue != null && lngRef != null) {
+ try {
+ output[0] = convertRationalLatLonToFloat(latValue, latRef);
+ output[1] = convertRationalLatLonToFloat(lngValue, lngRef);
+ return true;
+ } catch (IllegalArgumentException e) {
+ // if values are not parseable
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Return the altitude in meters. If the exif tag does not exist, return
+ * <var>defaultValue</var>.
+ *
+ * @param defaultValue the value to return if the tag is not available.
+ */
+ public double getAltitude(double defaultValue) {
+ double altitude = getAttributeDouble(TAG_GPS_ALTITUDE, -1);
+ int ref = getAttributeInt(TAG_GPS_ALTITUDE_REF, -1);
+
+ if (altitude >= 0 && ref >= 0) {
+ return (altitude * ((ref == 1) ? -1 : 1));
+ } else {
+ return defaultValue;
+ }
+ }
+
+ /**
+ * Returns number of milliseconds since Jan. 1, 1970, midnight local time.
+ * Returns -1 if the date time information if not available.
+ * @hide
+ */
+ public long getDateTime() {
+ String dateTimeString = getAttribute(TAG_DATETIME);
+ if (dateTimeString == null
+ || !sNonZeroTimePattern.matcher(dateTimeString).matches()) return -1;
+
+ ParsePosition pos = new ParsePosition(0);
+ try {
+ // The exif field is in local time. Parsing it as if it is UTC will yield time
+ // since 1/1/1970 local time
+ Date datetime = sFormatter.parse(dateTimeString, pos);
+ if (datetime == null) return -1;
+ long msecs = datetime.getTime();
+
+ String subSecs = getAttribute(TAG_SUBSEC_TIME);
+ if (subSecs != null) {
+ try {
+ long sub = Long.parseLong(subSecs);
+ while (sub > 1000) {
+ sub /= 10;
+ }
+ msecs += sub;
+ } catch (NumberFormatException e) {
+ // Ignored
+ }
+ }
+ return msecs;
+ } catch (IllegalArgumentException e) {
+ return -1;
+ }
+ }
+
+ /**
+ * Returns number of milliseconds since Jan. 1, 1970, midnight UTC.
+ * Returns -1 if the date time information if not available.
+ * @hide
+ */
+ public long getGpsDateTime() {
+ String date = getAttribute(TAG_GPS_DATESTAMP);
+ String time = getAttribute(TAG_GPS_TIMESTAMP);
+ if (date == null || time == null
+ || (!sNonZeroTimePattern.matcher(date).matches()
+ && !sNonZeroTimePattern.matcher(time).matches())) {
+ return -1;
+ }
+
+ String dateTimeString = date + ' ' + time;
+
+ ParsePosition pos = new ParsePosition(0);
+ try {
+ Date datetime = sFormatter.parse(dateTimeString, pos);
+ if (datetime == null) return -1;
+ return datetime.getTime();
+ } catch (IllegalArgumentException e) {
+ return -1;
+ }
+ }
+
+ /** {@hide} */
+ public static float convertRationalLatLonToFloat(String rationalString, String ref) {
+ try {
+ String [] parts = rationalString.split(",");
+
+ String [] pair;
+ pair = parts[0].split("/");
+ double degrees = Double.parseDouble(pair[0].trim())
+ / Double.parseDouble(pair[1].trim());
+
+ pair = parts[1].split("/");
+ double minutes = Double.parseDouble(pair[0].trim())
+ / Double.parseDouble(pair[1].trim());
+
+ pair = parts[2].split("/");
+ double seconds = Double.parseDouble(pair[0].trim())
+ / Double.parseDouble(pair[1].trim());
+
+ double result = degrees + (minutes / 60.0) + (seconds / 3600.0);
+ if ((ref.equals("S") || ref.equals("W"))) {
+ return (float) -result;
+ }
+ return (float) result;
+ } catch (NumberFormatException | ArrayIndexOutOfBoundsException e) {
+ // Not valid
+ throw new IllegalArgumentException();
+ }
+ }
+
+ // Checks the type of image file
+ private int getMimeType(BufferedInputStream in) throws IOException {
+ in.mark(SIGNATURE_CHECK_SIZE);
+ byte[] signatureCheckBytes = new byte[SIGNATURE_CHECK_SIZE];
+ in.read(signatureCheckBytes);
+ in.reset();
+ if (isJpegFormat(signatureCheckBytes)) {
+ return IMAGE_TYPE_JPEG;
+ } else if (isRafFormat(signatureCheckBytes)) {
+ return IMAGE_TYPE_RAF;
+ } else if (isHeifFormat(signatureCheckBytes)) {
+ return IMAGE_TYPE_HEIF;
+ } else if (isOrfFormat(signatureCheckBytes)) {
+ return IMAGE_TYPE_ORF;
+ } else if (isRw2Format(signatureCheckBytes)) {
+ return IMAGE_TYPE_RW2;
+ }
+ // Certain file formats (PEF) are identified in readImageFileDirectory()
+ return IMAGE_TYPE_UNKNOWN;
+ }
+
+ /**
+ * This method looks at the first 3 bytes to determine if this file is a JPEG file.
+ * See http://www.media.mit.edu/pia/Research/deepview/exif.html, "JPEG format and Marker"
+ */
+ private static boolean isJpegFormat(byte[] signatureCheckBytes) throws IOException {
+ for (int i = 0; i < JPEG_SIGNATURE.length; i++) {
+ if (signatureCheckBytes[i] != JPEG_SIGNATURE[i]) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * This method looks at the first 15 bytes to determine if this file is a RAF file.
+ * There is no official specification for RAF files from Fuji, but there is an online archive of
+ * image file specifications:
+ * http://fileformats.archiveteam.org/wiki/Fujifilm_RAF
+ */
+ private boolean isRafFormat(byte[] signatureCheckBytes) throws IOException {
+ byte[] rafSignatureBytes = RAF_SIGNATURE.getBytes();
+ for (int i = 0; i < rafSignatureBytes.length; i++) {
+ if (signatureCheckBytes[i] != rafSignatureBytes[i]) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private boolean isHeifFormat(byte[] signatureCheckBytes) throws IOException {
+ ByteOrderedDataInputStream signatureInputStream = null;
+ try {
+ signatureInputStream = new ByteOrderedDataInputStream(signatureCheckBytes);
+ signatureInputStream.setByteOrder(ByteOrder.BIG_ENDIAN);
+
+ long chunkSize = signatureInputStream.readInt();
+ byte[] chunkType = new byte[4];
+ signatureInputStream.read(chunkType);
+
+ if (!Arrays.equals(chunkType, HEIF_TYPE_FTYP)) {
+ return false;
+ }
+
+ long chunkDataOffset = 8;
+ if (chunkSize == 1) {
+ // This indicates that the next 8 bytes represent the chunk size,
+ // and chunk data comes after that.
+ chunkSize = signatureInputStream.readLong();
+ if (chunkSize < 16) {
+ // The smallest valid chunk is 16 bytes long in this case.
+ return false;
+ }
+ chunkDataOffset += 8;
+ }
+
+ // only sniff up to signatureCheckBytes.length
+ if (chunkSize > signatureCheckBytes.length) {
+ chunkSize = signatureCheckBytes.length;
+ }
+
+ long chunkDataSize = chunkSize - chunkDataOffset;
+
+ // It should at least have major brand (4-byte) and minor version (4-byte).
+ // The rest of the chunk (if any) is a list of (4-byte) compatible brands.
+ if (chunkDataSize < 8) {
+ return false;
+ }
+
+ byte[] brand = new byte[4];
+ boolean isMif1 = false;
+ boolean isHeic = false;
+ for (long i = 0; i < chunkDataSize / 4; ++i) {
+ if (signatureInputStream.read(brand) != brand.length) {
+ return false;
+ }
+ if (i == 1) {
+ // Skip this index, it refers to the minorVersion, not a brand.
+ continue;
+ }
+ if (Arrays.equals(brand, HEIF_BRAND_MIF1)) {
+ isMif1 = true;
+ } else if (Arrays.equals(brand, HEIF_BRAND_HEIC)) {
+ isHeic = true;
+ }
+ if (isMif1 && isHeic) {
+ return true;
+ }
+ }
+ } catch (Exception e) {
+ if (DEBUG) {
+ Log.d(TAG, "Exception parsing HEIF file type box.", e);
+ }
+ } finally {
+ if (signatureInputStream != null) {
+ signatureInputStream.close();
+ signatureInputStream = null;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * ORF has a similar structure to TIFF but it contains a different signature at the TIFF Header.
+ * This method looks at the 2 bytes following the Byte Order bytes to determine if this file is
+ * an ORF file.
+ * There is no official specification for ORF files from Olympus, but there is an online archive
+ * of image file specifications:
+ * http://fileformats.archiveteam.org/wiki/Olympus_ORF
+ */
+ private boolean isOrfFormat(byte[] signatureCheckBytes) throws IOException {
+ ByteOrderedDataInputStream signatureInputStream =
+ new ByteOrderedDataInputStream(signatureCheckBytes);
+ // Read byte order
+ mExifByteOrder = readByteOrder(signatureInputStream);
+ // Set byte order
+ signatureInputStream.setByteOrder(mExifByteOrder);
+
+ short orfSignature = signatureInputStream.readShort();
+ if (orfSignature == ORF_SIGNATURE_1 || orfSignature == ORF_SIGNATURE_2) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * RW2 is TIFF-based, but stores 0x55 signature byte instead of 0x42 at the header
+ * See http://lclevy.free.fr/raw/
+ */
+ private boolean isRw2Format(byte[] signatureCheckBytes) throws IOException {
+ ByteOrderedDataInputStream signatureInputStream =
+ new ByteOrderedDataInputStream(signatureCheckBytes);
+ // Read byte order
+ mExifByteOrder = readByteOrder(signatureInputStream);
+ // Set byte order
+ signatureInputStream.setByteOrder(mExifByteOrder);
+
+ short signatureByte = signatureInputStream.readShort();
+ if (signatureByte == RW2_SIGNATURE) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Loads EXIF attributes from a JPEG input stream.
+ *
+ * @param in The input stream that starts with the JPEG data.
+ * @param jpegOffset The offset value in input stream for JPEG data.
+ * @param imageType The image type from which to retrieve metadata. Use IFD_TYPE_PRIMARY for
+ * primary image, IFD_TYPE_PREVIEW for preview image, and
+ * IFD_TYPE_THUMBNAIL for thumbnail image.
+ * @throws IOException If the data contains invalid JPEG markers, offsets, or length values.
+ */
+ private void getJpegAttributes(ByteOrderedDataInputStream in, int jpegOffset, int imageType)
+ throws IOException {
+ // See JPEG File Interchange Format Specification, "JFIF Specification"
+ if (DEBUG) {
+ Log.d(TAG, "getJpegAttributes starting with: " + in);
+ }
+
+ // JPEG uses Big Endian by default. See https://people.cs.umass.edu/~verts/cs32/endian.html
+ in.setByteOrder(ByteOrder.BIG_ENDIAN);
+
+ // Skip to JPEG data
+ in.seek(jpegOffset);
+ int bytesRead = jpegOffset;
+
+ byte marker;
+ if ((marker = in.readByte()) != MARKER) {
+ throw new IOException("Invalid marker: " + Integer.toHexString(marker & 0xff));
+ }
+ ++bytesRead;
+ if (in.readByte() != MARKER_SOI) {
+ throw new IOException("Invalid marker: " + Integer.toHexString(marker & 0xff));
+ }
+ ++bytesRead;
+ while (true) {
+ marker = in.readByte();
+ if (marker != MARKER) {
+ throw new IOException("Invalid marker:" + Integer.toHexString(marker & 0xff));
+ }
+ ++bytesRead;
+ marker = in.readByte();
+ if (DEBUG) {
+ Log.d(TAG, "Found JPEG segment indicator: " + Integer.toHexString(marker & 0xff));
+ }
+ ++bytesRead;
+
+ // EOI indicates the end of an image and in case of SOS, JPEG image stream starts and
+ // the image data will terminate right after.
+ if (marker == MARKER_EOI || marker == MARKER_SOS) {
+ break;
+ }
+ int length = in.readUnsignedShort() - 2;
+ bytesRead += 2;
+ if (DEBUG) {
+ Log.d(TAG, "JPEG segment: " + Integer.toHexString(marker & 0xff) + " (length: "
+ + (length + 2) + ")");
+ }
+ if (length < 0) {
+ throw new IOException("Invalid length");
+ }
+ switch (marker) {
+ case MARKER_APP1: {
+ if (DEBUG) {
+ Log.d(TAG, "MARKER_APP1");
+ }
+ if (length < 6) {
+ // Skip if it's not an EXIF APP1 segment.
+ break;
+ }
+ byte[] identifier = new byte[6];
+ if (in.read(identifier) != 6) {
+ throw new IOException("Invalid exif");
+ }
+ bytesRead += 6;
+ length -= 6;
+ if (!Arrays.equals(identifier, IDENTIFIER_EXIF_APP1)) {
+ // Skip if it's not an EXIF APP1 segment.
+ break;
+ }
+ if (length <= 0) {
+ throw new IOException("Invalid exif");
+ }
+ if (DEBUG) {
+ Log.d(TAG, "readExifSegment with a byte array (length: " + length + ")");
+ }
+ // Save offset values for createJpegThumbnailBitmap() function
+ mExifOffset = bytesRead;
+
+ byte[] bytes = new byte[length];
+ if (in.read(bytes) != length) {
+ throw new IOException("Invalid exif");
+ }
+ bytesRead += length;
+ length = 0;
+
+ readExifSegment(bytes, imageType);
+ break;
+ }
+
+ case MARKER_COM: {
+ byte[] bytes = new byte[length];
+ if (in.read(bytes) != length) {
+ throw new IOException("Invalid exif");
+ }
+ length = 0;
+ if (getAttribute(TAG_USER_COMMENT) == null) {
+ mAttributes[IFD_TYPE_EXIF].put(TAG_USER_COMMENT, ExifAttribute.createString(
+ new String(bytes, ASCII)));
+ }
+ break;
+ }
+
+ case MARKER_SOF0:
+ case MARKER_SOF1:
+ case MARKER_SOF2:
+ case MARKER_SOF3:
+ case MARKER_SOF5:
+ case MARKER_SOF6:
+ case MARKER_SOF7:
+ case MARKER_SOF9:
+ case MARKER_SOF10:
+ case MARKER_SOF11:
+ case MARKER_SOF13:
+ case MARKER_SOF14:
+ case MARKER_SOF15: {
+ if (in.skipBytes(1) != 1) {
+ throw new IOException("Invalid SOFx");
+ }
+ mAttributes[imageType].put(TAG_IMAGE_LENGTH, ExifAttribute.createULong(
+ in.readUnsignedShort(), mExifByteOrder));
+ mAttributes[imageType].put(TAG_IMAGE_WIDTH, ExifAttribute.createULong(
+ in.readUnsignedShort(), mExifByteOrder));
+ length -= 5;
+ break;
+ }
+
+ default: {
+ break;
+ }
+ }
+ if (length < 0) {
+ throw new IOException("Invalid length");
+ }
+ if (in.skipBytes(length) != length) {
+ throw new IOException("Invalid JPEG segment");
+ }
+ bytesRead += length;
+ }
+ // Restore original byte order
+ in.setByteOrder(mExifByteOrder);
+ }
+
+ private void getRawAttributes(ByteOrderedDataInputStream in) throws IOException {
+ // Parse TIFF Headers. See JEITA CP-3451C Section 4.5.2. Table 1.
+ parseTiffHeaders(in, in.available());
+
+ // Read TIFF image file directories. See JEITA CP-3451C Section 4.5.2. Figure 6.
+ readImageFileDirectory(in, IFD_TYPE_PRIMARY);
+
+ // Update ImageLength/Width tags for all image data.
+ updateImageSizeValues(in, IFD_TYPE_PRIMARY);
+ updateImageSizeValues(in, IFD_TYPE_PREVIEW);
+ updateImageSizeValues(in, IFD_TYPE_THUMBNAIL);
+
+ // Check if each image data is in valid position.
+ validateImages(in);
+
+ if (mMimeType == IMAGE_TYPE_PEF) {
+ // PEF files contain a MakerNote data, which contains the data for ColorSpace tag.
+ // See http://lclevy.free.fr/raw/ and piex.cc PefGetPreviewData()
+ ExifAttribute makerNoteAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_EXIF].get(TAG_MAKER_NOTE);
+ if (makerNoteAttribute != null) {
+ // Create an ordered DataInputStream for MakerNote
+ ByteOrderedDataInputStream makerNoteDataInputStream =
+ new ByteOrderedDataInputStream(makerNoteAttribute.bytes);
+ makerNoteDataInputStream.setByteOrder(mExifByteOrder);
+
+ // Seek to MakerNote data
+ makerNoteDataInputStream.seek(PEF_MAKER_NOTE_SKIP_SIZE);
+
+ // Read IFD data from MakerNote
+ readImageFileDirectory(makerNoteDataInputStream, IFD_TYPE_PEF);
+
+ // Update ColorSpace tag
+ ExifAttribute colorSpaceAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_PEF].get(TAG_COLOR_SPACE);
+ if (colorSpaceAttribute != null) {
+ mAttributes[IFD_TYPE_EXIF].put(TAG_COLOR_SPACE, colorSpaceAttribute);
+ }
+ }
+ }
+ }
+
+ /**
+ * RAF files contains a JPEG and a CFA data.
+ * The JPEG contains two images, a preview and a thumbnail, while the CFA contains a RAW image.
+ * This method looks at the first 160 bytes of a RAF file to retrieve the offset and length
+ * values for the JPEG and CFA data.
+ * Using that data, it parses the JPEG data to retrieve the preview and thumbnail image data,
+ * then parses the CFA metadata to retrieve the primary image length/width values.
+ * For data format details, see http://fileformats.archiveteam.org/wiki/Fujifilm_RAF
+ */
+ private void getRafAttributes(ByteOrderedDataInputStream in) throws IOException {
+ // Retrieve offset & length values
+ in.skipBytes(RAF_OFFSET_TO_JPEG_IMAGE_OFFSET);
+ byte[] jpegOffsetBytes = new byte[4];
+ byte[] cfaHeaderOffsetBytes = new byte[4];
+ in.read(jpegOffsetBytes);
+ // Skip JPEG length value since it is not needed
+ in.skipBytes(RAF_JPEG_LENGTH_VALUE_SIZE);
+ in.read(cfaHeaderOffsetBytes);
+ int rafJpegOffset = ByteBuffer.wrap(jpegOffsetBytes).getInt();
+ int rafCfaHeaderOffset = ByteBuffer.wrap(cfaHeaderOffsetBytes).getInt();
+
+ // Retrieve JPEG image metadata
+ getJpegAttributes(in, rafJpegOffset, IFD_TYPE_PREVIEW);
+
+ // Skip to CFA header offset.
+ in.seek(rafCfaHeaderOffset);
+
+ // Retrieve primary image length/width values, if TAG_RAF_IMAGE_SIZE exists
+ in.setByteOrder(ByteOrder.BIG_ENDIAN);
+ int numberOfDirectoryEntry = in.readInt();
+ if (DEBUG) {
+ Log.d(TAG, "numberOfDirectoryEntry: " + numberOfDirectoryEntry);
+ }
+ // CFA stores some metadata about the RAW image. Since CFA uses proprietary tags, can only
+ // find and retrieve image size information tags, while skipping others.
+ // See piex.cc RafGetDimension()
+ for (int i = 0; i < numberOfDirectoryEntry; ++i) {
+ int tagNumber = in.readUnsignedShort();
+ int numberOfBytes = in.readUnsignedShort();
+ if (tagNumber == TAG_RAF_IMAGE_SIZE.number) {
+ int imageLength = in.readShort();
+ int imageWidth = in.readShort();
+ ExifAttribute imageLengthAttribute =
+ ExifAttribute.createUShort(imageLength, mExifByteOrder);
+ ExifAttribute imageWidthAttribute =
+ ExifAttribute.createUShort(imageWidth, mExifByteOrder);
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_LENGTH, imageLengthAttribute);
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_WIDTH, imageWidthAttribute);
+ if (DEBUG) {
+ Log.d(TAG, "Updated to length: " + imageLength + ", width: " + imageWidth);
+ }
+ return;
+ }
+ in.skipBytes(numberOfBytes);
+ }
+ }
+
+ private void getHeifAttributes(ByteOrderedDataInputStream in) throws IOException {
+ MediaMetadataRetriever retriever = new MediaMetadataRetriever();
+ try {
+ if (mSeekableFileDescriptor != null) {
+ retriever.setDataSource(mSeekableFileDescriptor);
+ } else {
+ retriever.setDataSource(new MediaDataSource() {
+ long mPosition;
+
+ @Override
+ public void close() throws IOException {}
+
+ @Override
+ public int readAt(long position, byte[] buffer, int offset, int size)
+ throws IOException {
+ if (size == 0) {
+ return 0;
+ }
+ if (position < 0) {
+ return -1;
+ }
+ if (mPosition != position) {
+ in.seek(position);
+ mPosition = position;
+ }
+
+ int bytesRead = in.read(buffer, offset, size);
+ if (bytesRead < 0) {
+ mPosition = -1; // need to seek on next read
+ return -1;
+ }
+
+ mPosition += bytesRead;
+ return bytesRead;
+ }
+
+ @Override
+ public long getSize() throws IOException {
+ return -1;
+ }
+ });
+ }
+
+ String hasVideo = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_HAS_VIDEO);
+
+ final String METADATA_HAS_VIDEO_VALUE_YES = "yes";
+ if (METADATA_HAS_VIDEO_VALUE_YES.equals(hasVideo)) {
+ String width = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH);
+ String height = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT);
+
+ if (width != null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_WIDTH,
+ ExifAttribute.createUShort(Integer.parseInt(width), mExifByteOrder));
+ }
+
+ if (height != null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_LENGTH,
+ ExifAttribute.createUShort(Integer.parseInt(height), mExifByteOrder));
+ }
+
+ // Note that the rotation angle from MediaMetadataRetriever for heif images
+ // are CCW, while rotation in ExifInterface orientations are CW.
+ String rotation = retriever.extractMetadata(
+ MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION);
+ if (rotation != null) {
+ int orientation = ExifInterface.ORIENTATION_NORMAL;
+
+ switch (Integer.parseInt(rotation)) {
+ case 90:
+ orientation = ExifInterface.ORIENTATION_ROTATE_270;
+ break;
+ case 180:
+ orientation = ExifInterface.ORIENTATION_ROTATE_180;
+ break;
+ case 270:
+ orientation = ExifInterface.ORIENTATION_ROTATE_90;
+ break;
+ }
+
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_ORIENTATION,
+ ExifAttribute.createUShort(orientation, mExifByteOrder));
+ }
+
+ if (DEBUG) {
+ Log.d(TAG, "Heif meta: " + width + "x" + height + ", rotation " + rotation);
+ }
+ }
+ } finally {
+ retriever.release();
+ }
+ }
+
+ /**
+ * ORF files contains a primary image data and a MakerNote data that contains preview/thumbnail
+ * images. Both data takes the form of IFDs and can therefore be read with the
+ * readImageFileDirectory() method.
+ * This method reads all the necessary data and updates the primary/preview/thumbnail image
+ * information according to the GetOlympusPreviewImage() method in piex.cc.
+ * For data format details, see the following:
+ * http://fileformats.archiveteam.org/wiki/Olympus_ORF
+ * https://libopenraw.freedesktop.org/wiki/Olympus_ORF
+ */
+ private void getOrfAttributes(ByteOrderedDataInputStream in) throws IOException {
+ // Retrieve primary image data
+ // Other Exif data will be located in the Makernote.
+ getRawAttributes(in);
+
+ // Additionally retrieve preview/thumbnail information from MakerNote tag, which contains
+ // proprietary tags and therefore does not have offical documentation
+ // See GetOlympusPreviewImage() in piex.cc & http://www.exiv2.org/tags-olympus.html
+ ExifAttribute makerNoteAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_EXIF].get(TAG_MAKER_NOTE);
+ if (makerNoteAttribute != null) {
+ // Create an ordered DataInputStream for MakerNote
+ ByteOrderedDataInputStream makerNoteDataInputStream =
+ new ByteOrderedDataInputStream(makerNoteAttribute.bytes);
+ makerNoteDataInputStream.setByteOrder(mExifByteOrder);
+
+ // There are two types of headers for Olympus MakerNotes
+ // See http://www.exiv2.org/makernote.html#R1
+ byte[] makerNoteHeader1Bytes = new byte[ORF_MAKER_NOTE_HEADER_1.length];
+ makerNoteDataInputStream.readFully(makerNoteHeader1Bytes);
+ makerNoteDataInputStream.seek(0);
+ byte[] makerNoteHeader2Bytes = new byte[ORF_MAKER_NOTE_HEADER_2.length];
+ makerNoteDataInputStream.readFully(makerNoteHeader2Bytes);
+ // Skip the corresponding amount of bytes for each header type
+ if (Arrays.equals(makerNoteHeader1Bytes, ORF_MAKER_NOTE_HEADER_1)) {
+ makerNoteDataInputStream.seek(ORF_MAKER_NOTE_HEADER_1_SIZE);
+ } else if (Arrays.equals(makerNoteHeader2Bytes, ORF_MAKER_NOTE_HEADER_2)) {
+ makerNoteDataInputStream.seek(ORF_MAKER_NOTE_HEADER_2_SIZE);
+ }
+
+ // Read IFD data from MakerNote
+ readImageFileDirectory(makerNoteDataInputStream, IFD_TYPE_ORF_MAKER_NOTE);
+
+ // Retrieve & update preview image offset & length values
+ ExifAttribute imageLengthAttribute = (ExifAttribute)
+ mAttributes[IFD_TYPE_ORF_CAMERA_SETTINGS].get(TAG_ORF_PREVIEW_IMAGE_START);
+ ExifAttribute bitsPerSampleAttribute = (ExifAttribute)
+ mAttributes[IFD_TYPE_ORF_CAMERA_SETTINGS].get(TAG_ORF_PREVIEW_IMAGE_LENGTH);
+
+ if (imageLengthAttribute != null && bitsPerSampleAttribute != null) {
+ mAttributes[IFD_TYPE_PREVIEW].put(TAG_JPEG_INTERCHANGE_FORMAT,
+ imageLengthAttribute);
+ mAttributes[IFD_TYPE_PREVIEW].put(TAG_JPEG_INTERCHANGE_FORMAT_LENGTH,
+ bitsPerSampleAttribute);
+ }
+
+ // TODO: Check this behavior in other ORF files
+ // Retrieve primary image length & width values
+ // See piex.cc GetOlympusPreviewImage()
+ ExifAttribute aspectFrameAttribute = (ExifAttribute)
+ mAttributes[IFD_TYPE_ORF_IMAGE_PROCESSING].get(TAG_ORF_ASPECT_FRAME);
+ if (aspectFrameAttribute != null) {
+ int[] aspectFrameValues = new int[4];
+ aspectFrameValues = (int[]) aspectFrameAttribute.getValue(mExifByteOrder);
+ if (aspectFrameValues[2] > aspectFrameValues[0] &&
+ aspectFrameValues[3] > aspectFrameValues[1]) {
+ int primaryImageWidth = aspectFrameValues[2] - aspectFrameValues[0] + 1;
+ int primaryImageLength = aspectFrameValues[3] - aspectFrameValues[1] + 1;
+ // Swap width & length values
+ if (primaryImageWidth < primaryImageLength) {
+ primaryImageWidth += primaryImageLength;
+ primaryImageLength = primaryImageWidth - primaryImageLength;
+ primaryImageWidth -= primaryImageLength;
+ }
+ ExifAttribute primaryImageWidthAttribute =
+ ExifAttribute.createUShort(primaryImageWidth, mExifByteOrder);
+ ExifAttribute primaryImageLengthAttribute =
+ ExifAttribute.createUShort(primaryImageLength, mExifByteOrder);
+
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_WIDTH, primaryImageWidthAttribute);
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_LENGTH, primaryImageLengthAttribute);
+ }
+ }
+ }
+ }
+
+ // RW2 contains the primary image data in IFD0 and the preview and/or thumbnail image data in
+ // the JpgFromRaw tag
+ // See https://libopenraw.freedesktop.org/wiki/Panasonic_RAW/ and piex.cc Rw2GetPreviewData()
+ private void getRw2Attributes(ByteOrderedDataInputStream in) throws IOException {
+ // Retrieve primary image data
+ getRawAttributes(in);
+
+ // Retrieve preview and/or thumbnail image data
+ ExifAttribute jpgFromRawAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_PRIMARY].get(TAG_RW2_JPG_FROM_RAW);
+ if (jpgFromRawAttribute != null) {
+ getJpegAttributes(in, mRw2JpgFromRawOffset, IFD_TYPE_PREVIEW);
+ }
+
+ // Set ISO tag value if necessary
+ ExifAttribute rw2IsoAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_PRIMARY].get(TAG_RW2_ISO);
+ ExifAttribute exifIsoAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_EXIF].get(TAG_ISO_SPEED_RATINGS);
+ if (rw2IsoAttribute != null && exifIsoAttribute == null) {
+ // Place this attribute only if it doesn't exist
+ mAttributes[IFD_TYPE_EXIF].put(TAG_ISO_SPEED_RATINGS, rw2IsoAttribute);
+ }
+ }
+
+ // Stores a new JPEG image with EXIF attributes into a given output stream.
+ private void saveJpegAttributes(InputStream inputStream, OutputStream outputStream)
+ throws IOException {
+ // See JPEG File Interchange Format Specification, "JFIF Specification"
+ if (DEBUG) {
+ Log.d(TAG, "saveJpegAttributes starting with (inputStream: " + inputStream
+ + ", outputStream: " + outputStream + ")");
+ }
+ DataInputStream dataInputStream = new DataInputStream(inputStream);
+ ByteOrderedDataOutputStream dataOutputStream =
+ new ByteOrderedDataOutputStream(outputStream, ByteOrder.BIG_ENDIAN);
+ if (dataInputStream.readByte() != MARKER) {
+ throw new IOException("Invalid marker");
+ }
+ dataOutputStream.writeByte(MARKER);
+ if (dataInputStream.readByte() != MARKER_SOI) {
+ throw new IOException("Invalid marker");
+ }
+ dataOutputStream.writeByte(MARKER_SOI);
+
+ // Write EXIF APP1 segment
+ dataOutputStream.writeByte(MARKER);
+ dataOutputStream.writeByte(MARKER_APP1);
+ writeExifSegment(dataOutputStream, 6);
+
+ byte[] bytes = new byte[4096];
+
+ while (true) {
+ byte marker = dataInputStream.readByte();
+ if (marker != MARKER) {
+ throw new IOException("Invalid marker");
+ }
+ marker = dataInputStream.readByte();
+ switch (marker) {
+ case MARKER_APP1: {
+ int length = dataInputStream.readUnsignedShort() - 2;
+ if (length < 0) {
+ throw new IOException("Invalid length");
+ }
+ byte[] identifier = new byte[6];
+ if (length >= 6) {
+ if (dataInputStream.read(identifier) != 6) {
+ throw new IOException("Invalid exif");
+ }
+ if (Arrays.equals(identifier, IDENTIFIER_EXIF_APP1)) {
+ // Skip the original EXIF APP1 segment.
+ if (dataInputStream.skipBytes(length - 6) != length - 6) {
+ throw new IOException("Invalid length");
+ }
+ break;
+ }
+ }
+ // Copy non-EXIF APP1 segment.
+ dataOutputStream.writeByte(MARKER);
+ dataOutputStream.writeByte(marker);
+ dataOutputStream.writeUnsignedShort(length + 2);
+ if (length >= 6) {
+ length -= 6;
+ dataOutputStream.write(identifier);
+ }
+ int read;
+ while (length > 0 && (read = dataInputStream.read(
+ bytes, 0, Math.min(length, bytes.length))) >= 0) {
+ dataOutputStream.write(bytes, 0, read);
+ length -= read;
+ }
+ break;
+ }
+ case MARKER_EOI:
+ case MARKER_SOS: {
+ dataOutputStream.writeByte(MARKER);
+ dataOutputStream.writeByte(marker);
+ // Copy all the remaining data
+ Streams.copy(dataInputStream, dataOutputStream);
+ return;
+ }
+ default: {
+ // Copy JPEG segment
+ dataOutputStream.writeByte(MARKER);
+ dataOutputStream.writeByte(marker);
+ int length = dataInputStream.readUnsignedShort();
+ dataOutputStream.writeUnsignedShort(length);
+ length -= 2;
+ if (length < 0) {
+ throw new IOException("Invalid length");
+ }
+ int read;
+ while (length > 0 && (read = dataInputStream.read(
+ bytes, 0, Math.min(length, bytes.length))) >= 0) {
+ dataOutputStream.write(bytes, 0, read);
+ length -= read;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ // Reads the given EXIF byte area and save its tag data into attributes.
+ private void readExifSegment(byte[] exifBytes, int imageType) throws IOException {
+ ByteOrderedDataInputStream dataInputStream =
+ new ByteOrderedDataInputStream(exifBytes);
+
+ // Parse TIFF Headers. See JEITA CP-3451C Section 4.5.2. Table 1.
+ parseTiffHeaders(dataInputStream, exifBytes.length);
+
+ // Read TIFF image file directories. See JEITA CP-3451C Section 4.5.2. Figure 6.
+ readImageFileDirectory(dataInputStream, imageType);
+ }
+
+ private void addDefaultValuesForCompatibility() {
+ // If DATETIME tag has no value, then set the value to DATETIME_ORIGINAL tag's.
+ String valueOfDateTimeOriginal = getAttribute(TAG_DATETIME_ORIGINAL);
+ if (valueOfDateTimeOriginal != null && getAttribute(TAG_DATETIME) == null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_DATETIME,
+ ExifAttribute.createString(valueOfDateTimeOriginal));
+ }
+
+ // Add the default value.
+ if (getAttribute(TAG_IMAGE_WIDTH) == null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_WIDTH,
+ ExifAttribute.createULong(0, mExifByteOrder));
+ }
+ if (getAttribute(TAG_IMAGE_LENGTH) == null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_LENGTH,
+ ExifAttribute.createULong(0, mExifByteOrder));
+ }
+ if (getAttribute(TAG_ORIENTATION) == null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_ORIENTATION,
+ ExifAttribute.createUShort(0, mExifByteOrder));
+ }
+ if (getAttribute(TAG_LIGHT_SOURCE) == null) {
+ mAttributes[IFD_TYPE_EXIF].put(TAG_LIGHT_SOURCE,
+ ExifAttribute.createULong(0, mExifByteOrder));
+ }
+ }
+
+ private ByteOrder readByteOrder(ByteOrderedDataInputStream dataInputStream)
+ throws IOException {
+ // Read byte order.
+ short byteOrder = dataInputStream.readShort();
+ switch (byteOrder) {
+ case BYTE_ALIGN_II:
+ if (DEBUG) {
+ Log.d(TAG, "readExifSegment: Byte Align II");
+ }
+ return ByteOrder.LITTLE_ENDIAN;
+ case BYTE_ALIGN_MM:
+ if (DEBUG) {
+ Log.d(TAG, "readExifSegment: Byte Align MM");
+ }
+ return ByteOrder.BIG_ENDIAN;
+ default:
+ throw new IOException("Invalid byte order: " + Integer.toHexString(byteOrder));
+ }
+ }
+
+ private void parseTiffHeaders(ByteOrderedDataInputStream dataInputStream,
+ int exifBytesLength) throws IOException {
+ // Read byte order
+ mExifByteOrder = readByteOrder(dataInputStream);
+ // Set byte order
+ dataInputStream.setByteOrder(mExifByteOrder);
+
+ // Check start code
+ int startCode = dataInputStream.readUnsignedShort();
+ if (mMimeType != IMAGE_TYPE_ORF && mMimeType != IMAGE_TYPE_RW2 && startCode != START_CODE) {
+ throw new IOException("Invalid start code: " + Integer.toHexString(startCode));
+ }
+
+ // Read and skip to first ifd offset
+ int firstIfdOffset = dataInputStream.readInt();
+ if (firstIfdOffset < 8 || firstIfdOffset >= exifBytesLength) {
+ throw new IOException("Invalid first Ifd offset: " + firstIfdOffset);
+ }
+ firstIfdOffset -= 8;
+ if (firstIfdOffset > 0) {
+ if (dataInputStream.skipBytes(firstIfdOffset) != firstIfdOffset) {
+ throw new IOException("Couldn't jump to first Ifd: " + firstIfdOffset);
+ }
+ }
+ }
+
+ // Reads image file directory, which is a tag group in EXIF.
+ private void readImageFileDirectory(ByteOrderedDataInputStream dataInputStream,
+ @IfdType int ifdType) throws IOException {
+ if (dataInputStream.mPosition + 2 > dataInputStream.mLength) {
+ // Return if there is no data from the offset.
+ return;
+ }
+ // See TIFF 6.0 Section 2: TIFF Structure, Figure 1.
+ short numberOfDirectoryEntry = dataInputStream.readShort();
+ if (dataInputStream.mPosition + 12 * numberOfDirectoryEntry > dataInputStream.mLength) {
+ // Return if the size of entries is too big.
+ return;
+ }
+
+ if (DEBUG) {
+ Log.d(TAG, "numberOfDirectoryEntry: " + numberOfDirectoryEntry);
+ }
+
+ // See TIFF 6.0 Section 2: TIFF Structure, "Image File Directory".
+ for (short i = 0; i < numberOfDirectoryEntry; ++i) {
+ int tagNumber = dataInputStream.readUnsignedShort();
+ int dataFormat = dataInputStream.readUnsignedShort();
+ int numberOfComponents = dataInputStream.readInt();
+ // Next four bytes is for data offset or value.
+ long nextEntryOffset = dataInputStream.peek() + 4;
+
+ // Look up a corresponding tag from tag number
+ ExifTag tag = (ExifTag) sExifTagMapsForReading[ifdType].get(tagNumber);
+
+ if (DEBUG) {
+ Log.d(TAG, String.format("ifdType: %d, tagNumber: %d, tagName: %s, dataFormat: %d, "
+ + "numberOfComponents: %d", ifdType, tagNumber,
+ tag != null ? tag.name : null, dataFormat, numberOfComponents));
+ }
+
+ long byteCount = 0;
+ boolean valid = false;
+ if (tag == null) {
+ Log.w(TAG, "Skip the tag entry since tag number is not defined: " + tagNumber);
+ } else if (dataFormat <= 0 || dataFormat >= IFD_FORMAT_BYTES_PER_FORMAT.length) {
+ Log.w(TAG, "Skip the tag entry since data format is invalid: " + dataFormat);
+ } else {
+ byteCount = (long) numberOfComponents * IFD_FORMAT_BYTES_PER_FORMAT[dataFormat];
+ if (byteCount < 0 || byteCount > Integer.MAX_VALUE) {
+ Log.w(TAG, "Skip the tag entry since the number of components is invalid: "
+ + numberOfComponents);
+ } else {
+ valid = true;
+ }
+ }
+ if (!valid) {
+ dataInputStream.seek(nextEntryOffset);
+ continue;
+ }
+
+ // Read a value from data field or seek to the value offset which is stored in data
+ // field if the size of the entry value is bigger than 4.
+ if (byteCount > 4) {
+ int offset = dataInputStream.readInt();
+ if (DEBUG) {
+ Log.d(TAG, "seek to data offset: " + offset);
+ }
+ if (mMimeType == IMAGE_TYPE_ORF) {
+ if (tag.name == TAG_MAKER_NOTE) {
+ // Save offset value for reading thumbnail
+ mOrfMakerNoteOffset = offset;
+ } else if (ifdType == IFD_TYPE_ORF_MAKER_NOTE
+ && tag.name == TAG_ORF_THUMBNAIL_IMAGE) {
+ // Retrieve & update values for thumbnail offset and length values for ORF
+ mOrfThumbnailOffset = offset;
+ mOrfThumbnailLength = numberOfComponents;
+
+ ExifAttribute compressionAttribute =
+ ExifAttribute.createUShort(DATA_JPEG, mExifByteOrder);
+ ExifAttribute jpegInterchangeFormatAttribute =
+ ExifAttribute.createULong(mOrfThumbnailOffset, mExifByteOrder);
+ ExifAttribute jpegInterchangeFormatLengthAttribute =
+ ExifAttribute.createULong(mOrfThumbnailLength, mExifByteOrder);
+
+ mAttributes[IFD_TYPE_THUMBNAIL].put(TAG_COMPRESSION, compressionAttribute);
+ mAttributes[IFD_TYPE_THUMBNAIL].put(TAG_JPEG_INTERCHANGE_FORMAT,
+ jpegInterchangeFormatAttribute);
+ mAttributes[IFD_TYPE_THUMBNAIL].put(TAG_JPEG_INTERCHANGE_FORMAT_LENGTH,
+ jpegInterchangeFormatLengthAttribute);
+ }
+ } else if (mMimeType == IMAGE_TYPE_RW2) {
+ if (tag.name == TAG_RW2_JPG_FROM_RAW) {
+ mRw2JpgFromRawOffset = offset;
+ }
+ }
+ if (offset + byteCount <= dataInputStream.mLength) {
+ dataInputStream.seek(offset);
+ } else {
+ // Skip if invalid data offset.
+ Log.w(TAG, "Skip the tag entry since data offset is invalid: " + offset);
+ dataInputStream.seek(nextEntryOffset);
+ continue;
+ }
+ }
+
+ // Recursively parse IFD when a IFD pointer tag appears.
+ Object nextIfdType = sExifPointerTagMap.get(tagNumber);
+ if (DEBUG) {
+ Log.d(TAG, "nextIfdType: " + nextIfdType + " byteCount: " + byteCount);
+ }
+
+ if (nextIfdType != null) {
+ long offset = -1L;
+ // Get offset from data field
+ switch (dataFormat) {
+ case IFD_FORMAT_USHORT: {
+ offset = dataInputStream.readUnsignedShort();
+ break;
+ }
+ case IFD_FORMAT_SSHORT: {
+ offset = dataInputStream.readShort();
+ break;
+ }
+ case IFD_FORMAT_ULONG: {
+ offset = dataInputStream.readUnsignedInt();
+ break;
+ }
+ case IFD_FORMAT_SLONG:
+ case IFD_FORMAT_IFD: {
+ offset = dataInputStream.readInt();
+ break;
+ }
+ default: {
+ // Nothing to do
+ break;
+ }
+ }
+ if (DEBUG) {
+ Log.d(TAG, String.format("Offset: %d, tagName: %s", offset, tag.name));
+ }
+ if (offset > 0L && offset < dataInputStream.mLength) {
+ dataInputStream.seek(offset);
+ readImageFileDirectory(dataInputStream, (int) nextIfdType);
+ } else {
+ Log.w(TAG, "Skip jump into the IFD since its offset is invalid: " + offset);
+ }
+
+ dataInputStream.seek(nextEntryOffset);
+ continue;
+ }
+
+ byte[] bytes = new byte[(int) byteCount];
+ dataInputStream.readFully(bytes);
+ ExifAttribute attribute = new ExifAttribute(dataFormat, numberOfComponents, bytes);
+ mAttributes[ifdType].put(tag.name, attribute);
+
+ // DNG files have a DNG Version tag specifying the version of specifications that the
+ // image file is following.
+ // See http://fileformats.archiveteam.org/wiki/DNG
+ if (tag.name == TAG_DNG_VERSION) {
+ mMimeType = IMAGE_TYPE_DNG;
+ }
+
+ // PEF files have a Make or Model tag that begins with "PENTAX" or a compression tag
+ // that is 65535.
+ // See http://fileformats.archiveteam.org/wiki/Pentax_PEF
+ if (((tag.name == TAG_MAKE || tag.name == TAG_MODEL)
+ && attribute.getStringValue(mExifByteOrder).contains(PEF_SIGNATURE))
+ || (tag.name == TAG_COMPRESSION
+ && attribute.getIntValue(mExifByteOrder) == 65535)) {
+ mMimeType = IMAGE_TYPE_PEF;
+ }
+
+ // Seek to next tag offset
+ if (dataInputStream.peek() != nextEntryOffset) {
+ dataInputStream.seek(nextEntryOffset);
+ }
+ }
+
+ if (dataInputStream.peek() + 4 <= dataInputStream.mLength) {
+ int nextIfdOffset = dataInputStream.readInt();
+ if (DEBUG) {
+ Log.d(TAG, String.format("nextIfdOffset: %d", nextIfdOffset));
+ }
+ // The next IFD offset needs to be bigger than 8
+ // since the first IFD offset is at least 8.
+ if (nextIfdOffset > 8 && nextIfdOffset < dataInputStream.mLength) {
+ dataInputStream.seek(nextIfdOffset);
+ if (mAttributes[IFD_TYPE_THUMBNAIL].isEmpty()) {
+ // Do not overwrite thumbnail IFD data if it alreay exists.
+ readImageFileDirectory(dataInputStream, IFD_TYPE_THUMBNAIL);
+ } else if (mAttributes[IFD_TYPE_PREVIEW].isEmpty()) {
+ readImageFileDirectory(dataInputStream, IFD_TYPE_PREVIEW);
+ }
+ }
+ }
+ }
+
+ /**
+ * JPEG compressed images do not contain IMAGE_LENGTH & IMAGE_WIDTH tags.
+ * This value uses JpegInterchangeFormat(JPEG data offset) value, and calls getJpegAttributes()
+ * to locate SOF(Start of Frame) marker and update the image length & width values.
+ * See JEITA CP-3451C Table 5 and Section 4.8.1. B.
+ */
+ private void retrieveJpegImageSize(ByteOrderedDataInputStream in, int imageType)
+ throws IOException {
+ // Check if image already has IMAGE_LENGTH & IMAGE_WIDTH values
+ ExifAttribute imageLengthAttribute =
+ (ExifAttribute) mAttributes[imageType].get(TAG_IMAGE_LENGTH);
+ ExifAttribute imageWidthAttribute =
+ (ExifAttribute) mAttributes[imageType].get(TAG_IMAGE_WIDTH);
+
+ if (imageLengthAttribute == null || imageWidthAttribute == null) {
+ // Find if offset for JPEG data exists
+ ExifAttribute jpegInterchangeFormatAttribute =
+ (ExifAttribute) mAttributes[imageType].get(TAG_JPEG_INTERCHANGE_FORMAT);
+ if (jpegInterchangeFormatAttribute != null) {
+ int jpegInterchangeFormat =
+ jpegInterchangeFormatAttribute.getIntValue(mExifByteOrder);
+
+ // Searches for SOF marker in JPEG data and updates IMAGE_LENGTH & IMAGE_WIDTH tags
+ getJpegAttributes(in, jpegInterchangeFormat, imageType);
+ }
+ }
+ }
+
+ // Sets thumbnail offset & length attributes based on JpegInterchangeFormat or StripOffsets tags
+ private void setThumbnailData(ByteOrderedDataInputStream in) throws IOException {
+ HashMap thumbnailData = mAttributes[IFD_TYPE_THUMBNAIL];
+
+ ExifAttribute compressionAttribute =
+ (ExifAttribute) thumbnailData.get(TAG_COMPRESSION);
+ if (compressionAttribute != null) {
+ mThumbnailCompression = compressionAttribute.getIntValue(mExifByteOrder);
+ switch (mThumbnailCompression) {
+ case DATA_JPEG: {
+ handleThumbnailFromJfif(in, thumbnailData);
+ break;
+ }
+ case DATA_UNCOMPRESSED:
+ case DATA_JPEG_COMPRESSED: {
+ if (isSupportedDataType(thumbnailData)) {
+ handleThumbnailFromStrips(in, thumbnailData);
+ }
+ break;
+ }
+ }
+ } else {
+ // Thumbnail data may not contain Compression tag value
+ handleThumbnailFromJfif(in, thumbnailData);
+ }
+ }
+
+ // Check JpegInterchangeFormat(JFIF) tags to retrieve thumbnail offset & length values
+ // and reads the corresponding bytes if stream does not support seek function
+ private void handleThumbnailFromJfif(ByteOrderedDataInputStream in, HashMap thumbnailData)
+ throws IOException {
+ ExifAttribute jpegInterchangeFormatAttribute =
+ (ExifAttribute) thumbnailData.get(TAG_JPEG_INTERCHANGE_FORMAT);
+ ExifAttribute jpegInterchangeFormatLengthAttribute =
+ (ExifAttribute) thumbnailData.get(TAG_JPEG_INTERCHANGE_FORMAT_LENGTH);
+ if (jpegInterchangeFormatAttribute != null
+ && jpegInterchangeFormatLengthAttribute != null) {
+ int thumbnailOffset = jpegInterchangeFormatAttribute.getIntValue(mExifByteOrder);
+ int thumbnailLength = jpegInterchangeFormatLengthAttribute.getIntValue(mExifByteOrder);
+
+ // The following code limits the size of thumbnail size not to overflow EXIF data area.
+ thumbnailLength = Math.min(thumbnailLength, in.available() - thumbnailOffset);
+ if (mMimeType == IMAGE_TYPE_JPEG || mMimeType == IMAGE_TYPE_RAF
+ || mMimeType == IMAGE_TYPE_RW2) {
+ thumbnailOffset += mExifOffset;
+ } else if (mMimeType == IMAGE_TYPE_ORF) {
+ // Update offset value since RAF files have IFD data preceding MakerNote data.
+ thumbnailOffset += mOrfMakerNoteOffset;
+ }
+ if (DEBUG) {
+ Log.d(TAG, "Setting thumbnail attributes with offset: " + thumbnailOffset
+ + ", length: " + thumbnailLength);
+ }
+ if (thumbnailOffset > 0 && thumbnailLength > 0) {
+ mHasThumbnail = true;
+ mThumbnailOffset = thumbnailOffset;
+ mThumbnailLength = thumbnailLength;
+ mThumbnailCompression = DATA_JPEG;
+
+ if (mFilename == null && mAssetInputStream == null
+ && mSeekableFileDescriptor == null) {
+ // Save the thumbnail in memory if the input doesn't support reading again.
+ byte[] thumbnailBytes = new byte[thumbnailLength];
+ in.seek(thumbnailOffset);
+ in.readFully(thumbnailBytes);
+ mThumbnailBytes = thumbnailBytes;
+ }
+ }
+ }
+ }
+
+ // Check StripOffsets & StripByteCounts tags to retrieve thumbnail offset & length values
+ private void handleThumbnailFromStrips(ByteOrderedDataInputStream in, HashMap thumbnailData)
+ throws IOException {
+ ExifAttribute stripOffsetsAttribute =
+ (ExifAttribute) thumbnailData.get(TAG_STRIP_OFFSETS);
+ ExifAttribute stripByteCountsAttribute =
+ (ExifAttribute) thumbnailData.get(TAG_STRIP_BYTE_COUNTS);
+
+ if (stripOffsetsAttribute != null && stripByteCountsAttribute != null) {
+ long[] stripOffsets =
+ (long[]) stripOffsetsAttribute.getValue(mExifByteOrder);
+ long[] stripByteCounts =
+ (long[]) stripByteCountsAttribute.getValue(mExifByteOrder);
+
+ // Set thumbnail byte array data for non-consecutive strip bytes
+ byte[] totalStripBytes =
+ new byte[(int) Arrays.stream(stripByteCounts).sum()];
+
+ int bytesRead = 0;
+ int bytesAdded = 0;
+ for (int i = 0; i < stripOffsets.length; i++) {
+ int stripOffset = (int) stripOffsets[i];
+ int stripByteCount = (int) stripByteCounts[i];
+
+ // Skip to offset
+ int skipBytes = stripOffset - bytesRead;
+ if (skipBytes < 0) {
+ Log.d(TAG, "Invalid strip offset value");
+ }
+ in.seek(skipBytes);
+ bytesRead += skipBytes;
+
+ // Read strip bytes
+ byte[] stripBytes = new byte[stripByteCount];
+ in.read(stripBytes);
+ bytesRead += stripByteCount;
+
+ // Add bytes to array
+ System.arraycopy(stripBytes, 0, totalStripBytes, bytesAdded,
+ stripBytes.length);
+ bytesAdded += stripBytes.length;
+ }
+
+ mHasThumbnail = true;
+ mThumbnailBytes = totalStripBytes;
+ mThumbnailLength = totalStripBytes.length;
+ }
+ }
+
+ // Check if thumbnail data type is currently supported or not
+ private boolean isSupportedDataType(HashMap thumbnailData) throws IOException {
+ ExifAttribute bitsPerSampleAttribute =
+ (ExifAttribute) thumbnailData.get(TAG_BITS_PER_SAMPLE);
+ if (bitsPerSampleAttribute != null) {
+ int[] bitsPerSampleValue = (int[]) bitsPerSampleAttribute.getValue(mExifByteOrder);
+
+ if (Arrays.equals(BITS_PER_SAMPLE_RGB, bitsPerSampleValue)) {
+ return true;
+ }
+
+ // See DNG Specification 1.4.0.0. Section 3, Compression.
+ if (mMimeType == IMAGE_TYPE_DNG) {
+ ExifAttribute photometricInterpretationAttribute =
+ (ExifAttribute) thumbnailData.get(TAG_PHOTOMETRIC_INTERPRETATION);
+ if (photometricInterpretationAttribute != null) {
+ int photometricInterpretationValue
+ = photometricInterpretationAttribute.getIntValue(mExifByteOrder);
+ if ((photometricInterpretationValue == PHOTOMETRIC_INTERPRETATION_BLACK_IS_ZERO
+ && Arrays.equals(bitsPerSampleValue, BITS_PER_SAMPLE_GREYSCALE_2))
+ || ((photometricInterpretationValue == PHOTOMETRIC_INTERPRETATION_YCBCR)
+ && (Arrays.equals(bitsPerSampleValue, BITS_PER_SAMPLE_RGB)))) {
+ return true;
+ } else {
+ // TODO: Add support for lossless Huffman JPEG data
+ }
+ }
+ }
+ }
+ if (DEBUG) {
+ Log.d(TAG, "Unsupported data type value");
+ }
+ return false;
+ }
+
+ // Returns true if the image length and width values are <= 512.
+ // See Section 4.8 of http://standardsproposals.bsigroup.com/Home/getPDF/567
+ private boolean isThumbnail(HashMap map) throws IOException {
+ ExifAttribute imageLengthAttribute = (ExifAttribute) map.get(TAG_IMAGE_LENGTH);
+ ExifAttribute imageWidthAttribute = (ExifAttribute) map.get(TAG_IMAGE_WIDTH);
+
+ if (imageLengthAttribute != null && imageWidthAttribute != null) {
+ int imageLengthValue = imageLengthAttribute.getIntValue(mExifByteOrder);
+ int imageWidthValue = imageWidthAttribute.getIntValue(mExifByteOrder);
+ if (imageLengthValue <= MAX_THUMBNAIL_SIZE && imageWidthValue <= MAX_THUMBNAIL_SIZE) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // Validate primary, preview, thumbnail image data by comparing image size
+ private void validateImages(InputStream in) throws IOException {
+ // Swap images based on size (primary > preview > thumbnail)
+ swapBasedOnImageSize(IFD_TYPE_PRIMARY, IFD_TYPE_PREVIEW);
+ swapBasedOnImageSize(IFD_TYPE_PRIMARY, IFD_TYPE_THUMBNAIL);
+ swapBasedOnImageSize(IFD_TYPE_PREVIEW, IFD_TYPE_THUMBNAIL);
+
+ // Check if image has PixelXDimension/PixelYDimension tags, which contain valid image
+ // sizes, excluding padding at the right end or bottom end of the image to make sure that
+ // the values are multiples of 64. See JEITA CP-3451C Table 5 and Section 4.8.1. B.
+ ExifAttribute pixelXDimAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_EXIF].get(TAG_PIXEL_X_DIMENSION);
+ ExifAttribute pixelYDimAttribute =
+ (ExifAttribute) mAttributes[IFD_TYPE_EXIF].get(TAG_PIXEL_Y_DIMENSION);
+ if (pixelXDimAttribute != null && pixelYDimAttribute != null) {
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_WIDTH, pixelXDimAttribute);
+ mAttributes[IFD_TYPE_PRIMARY].put(TAG_IMAGE_LENGTH, pixelYDimAttribute);
+ }
+
+ // Check whether thumbnail image exists and whether preview image satisfies the thumbnail
+ // image requirements
+ if (mAttributes[IFD_TYPE_THUMBNAIL].isEmpty()) {
+ if (isThumbnail(mAttributes[IFD_TYPE_PREVIEW])) {
+ mAttributes[IFD_TYPE_THUMBNAIL] = mAttributes[IFD_TYPE_PREVIEW];
+ mAttributes[IFD_TYPE_PREVIEW] = new HashMap();
+ }
+ }
+
+ // Check if the thumbnail image satisfies the thumbnail size requirements
+ if (!isThumbnail(mAttributes[IFD_TYPE_THUMBNAIL])) {
+ Log.d(TAG, "No image meets the size requirements of a thumbnail image.");
+ }
+ }
+
+ /**
+ * If image is uncompressed, ImageWidth/Length tags are used to store size info.
+ * However, uncompressed images often store extra pixels around the edges of the final image,
+ * which results in larger values for TAG_IMAGE_WIDTH and TAG_IMAGE_LENGTH tags.
+ * This method corrects those tag values by checking first the values of TAG_DEFAULT_CROP_SIZE
+ * See DNG Specification 1.4.0.0. Section 4. (DefaultCropSize)
+ *
+ * If image is a RW2 file, valid image sizes are stored in SensorBorder tags.
+ * See tiff_parser.cc GetFullDimension32()
+ * */
+ private void updateImageSizeValues(ByteOrderedDataInputStream in, int imageType)
+ throws IOException {
+ // Uncompressed image valid image size values
+ ExifAttribute defaultCropSizeAttribute =
+ (ExifAttribute) mAttributes[imageType].get(TAG_DEFAULT_CROP_SIZE);
+ // RW2 image valid image size values
+ ExifAttribute topBorderAttribute =
+ (ExifAttribute) mAttributes[imageType].get(TAG_RW2_SENSOR_TOP_BORDER);
+ ExifAttribute leftBorderAttribute =
+ (ExifAttribute) mAttributes[imageType].get(TAG_RW2_SENSOR_LEFT_BORDER);
+ ExifAttribute bottomBorderAttribute =
+ (ExifAttribute) mAttributes[imageType].get(TAG_RW2_SENSOR_BOTTOM_BORDER);
+ ExifAttribute rightBorderAttribute =
+ (ExifAttribute) mAttributes[imageType].get(TAG_RW2_SENSOR_RIGHT_BORDER);
+
+ if (defaultCropSizeAttribute != null) {
+ // Update for uncompressed image
+ ExifAttribute defaultCropSizeXAttribute, defaultCropSizeYAttribute;
+ if (defaultCropSizeAttribute.format == IFD_FORMAT_URATIONAL) {
+ Rational[] defaultCropSizeValue =
+ (Rational[]) defaultCropSizeAttribute.getValue(mExifByteOrder);
+ defaultCropSizeXAttribute =
+ ExifAttribute.createURational(defaultCropSizeValue[0], mExifByteOrder);
+ defaultCropSizeYAttribute =
+ ExifAttribute.createURational(defaultCropSizeValue[1], mExifByteOrder);
+ } else {
+ int[] defaultCropSizeValue =
+ (int[]) defaultCropSizeAttribute.getValue(mExifByteOrder);
+ defaultCropSizeXAttribute =
+ ExifAttribute.createUShort(defaultCropSizeValue[0], mExifByteOrder);
+ defaultCropSizeYAttribute =
+ ExifAttribute.createUShort(defaultCropSizeValue[1], mExifByteOrder);
+ }
+ mAttributes[imageType].put(TAG_IMAGE_WIDTH, defaultCropSizeXAttribute);
+ mAttributes[imageType].put(TAG_IMAGE_LENGTH, defaultCropSizeYAttribute);
+ } else if (topBorderAttribute != null && leftBorderAttribute != null &&
+ bottomBorderAttribute != null && rightBorderAttribute != null) {
+ // Update for RW2 image
+ int topBorderValue = topBorderAttribute.getIntValue(mExifByteOrder);
+ int bottomBorderValue = bottomBorderAttribute.getIntValue(mExifByteOrder);
+ int rightBorderValue = rightBorderAttribute.getIntValue(mExifByteOrder);
+ int leftBorderValue = leftBorderAttribute.getIntValue(mExifByteOrder);
+ if (bottomBorderValue > topBorderValue && rightBorderValue > leftBorderValue) {
+ int length = bottomBorderValue - topBorderValue;
+ int width = rightBorderValue - leftBorderValue;
+ ExifAttribute imageLengthAttribute =
+ ExifAttribute.createUShort(length, mExifByteOrder);
+ ExifAttribute imageWidthAttribute =
+ ExifAttribute.createUShort(width, mExifByteOrder);
+ mAttributes[imageType].put(TAG_IMAGE_LENGTH, imageLengthAttribute);
+ mAttributes[imageType].put(TAG_IMAGE_WIDTH, imageWidthAttribute);
+ }
+ } else {
+ retrieveJpegImageSize(in, imageType);
+ }
+ }
+
+ // Writes an Exif segment into the given output stream.
+ private int writeExifSegment(ByteOrderedDataOutputStream dataOutputStream,
+ int exifOffsetFromBeginning) throws IOException {
+ // The following variables are for calculating each IFD tag group size in bytes.
+ int[] ifdOffsets = new int[EXIF_TAGS.length];
+ int[] ifdDataSizes = new int[EXIF_TAGS.length];
+
+ // Remove IFD pointer tags (we'll re-add it later.)
+ for (ExifTag tag : EXIF_POINTER_TAGS) {
+ removeAttribute(tag.name);
+ }
+ // Remove old thumbnail data
+ removeAttribute(JPEG_INTERCHANGE_FORMAT_TAG.name);
+ removeAttribute(JPEG_INTERCHANGE_FORMAT_LENGTH_TAG.name);
+
+ // Remove null value tags.
+ for (int ifdType = 0; ifdType < EXIF_TAGS.length; ++ifdType) {
+ for (Object obj : mAttributes[ifdType].entrySet().toArray()) {
+ final Map.Entry entry = (Map.Entry) obj;
+ if (entry.getValue() == null) {
+ mAttributes[ifdType].remove(entry.getKey());
+ }
+ }
+ }
+
+ // Add IFD pointer tags. The next offset of primary image TIFF IFD will have thumbnail IFD
+ // offset when there is one or more tags in the thumbnail IFD.
+ if (!mAttributes[IFD_TYPE_EXIF].isEmpty()) {
+ mAttributes[IFD_TYPE_PRIMARY].put(EXIF_POINTER_TAGS[1].name,
+ ExifAttribute.createULong(0, mExifByteOrder));
+ }
+ if (!mAttributes[IFD_TYPE_GPS].isEmpty()) {
+ mAttributes[IFD_TYPE_PRIMARY].put(EXIF_POINTER_TAGS[2].name,
+ ExifAttribute.createULong(0, mExifByteOrder));
+ }
+ if (!mAttributes[IFD_TYPE_INTEROPERABILITY].isEmpty()) {
+ mAttributes[IFD_TYPE_EXIF].put(EXIF_POINTER_TAGS[3].name,
+ ExifAttribute.createULong(0, mExifByteOrder));
+ }
+ if (mHasThumbnail) {
+ mAttributes[IFD_TYPE_THUMBNAIL].put(JPEG_INTERCHANGE_FORMAT_TAG.name,
+ ExifAttribute.createULong(0, mExifByteOrder));
+ mAttributes[IFD_TYPE_THUMBNAIL].put(JPEG_INTERCHANGE_FORMAT_LENGTH_TAG.name,
+ ExifAttribute.createULong(mThumbnailLength, mExifByteOrder));
+ }
+
+ // Calculate IFD group data area sizes. IFD group data area is assigned to save the entry
+ // value which has a bigger size than 4 bytes.
+ for (int i = 0; i < EXIF_TAGS.length; ++i) {
+ int sum = 0;
+ for (Map.Entry entry : (Set<Map.Entry>) mAttributes[i].entrySet()) {
+ final ExifAttribute exifAttribute = (ExifAttribute) entry.getValue();
+ final int size = exifAttribute.size();
+ if (size > 4) {
+ sum += size;
+ }
+ }
+ ifdDataSizes[i] += sum;
+ }
+
+ // Calculate IFD offsets.
+ int position = 8;
+ for (int ifdType = 0; ifdType < EXIF_TAGS.length; ++ifdType) {
+ if (!mAttributes[ifdType].isEmpty()) {
+ ifdOffsets[ifdType] = position;
+ position += 2 + mAttributes[ifdType].size() * 12 + 4 + ifdDataSizes[ifdType];
+ }
+ }
+ if (mHasThumbnail) {
+ int thumbnailOffset = position;
+ mAttributes[IFD_TYPE_THUMBNAIL].put(JPEG_INTERCHANGE_FORMAT_TAG.name,
+ ExifAttribute.createULong(thumbnailOffset, mExifByteOrder));
+ mThumbnailOffset = exifOffsetFromBeginning + thumbnailOffset;
+ position += mThumbnailLength;
+ }
+
+ // Calculate the total size
+ int totalSize = position + 8; // eight bytes is for header part.
+ if (DEBUG) {
+ Log.d(TAG, "totalSize length: " + totalSize);
+ for (int i = 0; i < EXIF_TAGS.length; ++i) {
+ Log.d(TAG, String.format("index: %d, offsets: %d, tag count: %d, data sizes: %d",
+ i, ifdOffsets[i], mAttributes[i].size(), ifdDataSizes[i]));
+ }
+ }
+
+ // Update IFD pointer tags with the calculated offsets.
+ if (!mAttributes[IFD_TYPE_EXIF].isEmpty()) {
+ mAttributes[IFD_TYPE_PRIMARY].put(EXIF_POINTER_TAGS[1].name,
+ ExifAttribute.createULong(ifdOffsets[IFD_TYPE_EXIF], mExifByteOrder));
+ }
+ if (!mAttributes[IFD_TYPE_GPS].isEmpty()) {
+ mAttributes[IFD_TYPE_PRIMARY].put(EXIF_POINTER_TAGS[2].name,
+ ExifAttribute.createULong(ifdOffsets[IFD_TYPE_GPS], mExifByteOrder));
+ }
+ if (!mAttributes[IFD_TYPE_INTEROPERABILITY].isEmpty()) {
+ mAttributes[IFD_TYPE_EXIF].put(EXIF_POINTER_TAGS[3].name, ExifAttribute.createULong(
+ ifdOffsets[IFD_TYPE_INTEROPERABILITY], mExifByteOrder));
+ }
+
+ // Write TIFF Headers. See JEITA CP-3451C Section 4.5.2. Table 1.
+ dataOutputStream.writeUnsignedShort(totalSize);
+ dataOutputStream.write(IDENTIFIER_EXIF_APP1);
+ dataOutputStream.writeShort(mExifByteOrder == ByteOrder.BIG_ENDIAN
+ ? BYTE_ALIGN_MM : BYTE_ALIGN_II);
+ dataOutputStream.setByteOrder(mExifByteOrder);
+ dataOutputStream.writeUnsignedShort(START_CODE);
+ dataOutputStream.writeUnsignedInt(IFD_OFFSET);
+
+ // Write IFD groups. See JEITA CP-3451C Section 4.5.8. Figure 9.
+ for (int ifdType = 0; ifdType < EXIF_TAGS.length; ++ifdType) {
+ if (!mAttributes[ifdType].isEmpty()) {
+ // See JEITA CP-3451C Section 4.6.2: IFD structure.
+ // Write entry count
+ dataOutputStream.writeUnsignedShort(mAttributes[ifdType].size());
+
+ // Write entry info
+ int dataOffset = ifdOffsets[ifdType] + 2 + mAttributes[ifdType].size() * 12 + 4;
+ for (Map.Entry entry : (Set<Map.Entry>) mAttributes[ifdType].entrySet()) {
+ // Convert tag name to tag number.
+ final ExifTag tag =
+ (ExifTag) sExifTagMapsForWriting[ifdType].get(entry.getKey());
+ final int tagNumber = tag.number;
+ final ExifAttribute attribute = (ExifAttribute) entry.getValue();
+ final int size = attribute.size();
+
+ dataOutputStream.writeUnsignedShort(tagNumber);
+ dataOutputStream.writeUnsignedShort(attribute.format);
+ dataOutputStream.writeInt(attribute.numberOfComponents);
+ if (size > 4) {
+ dataOutputStream.writeUnsignedInt(dataOffset);
+ dataOffset += size;
+ } else {
+ dataOutputStream.write(attribute.bytes);
+ // Fill zero up to 4 bytes
+ if (size < 4) {
+ for (int i = size; i < 4; ++i) {
+ dataOutputStream.writeByte(0);
+ }
+ }
+ }
+ }
+
+ // Write the next offset. It writes the offset of thumbnail IFD if there is one or
+ // more tags in the thumbnail IFD when the current IFD is the primary image TIFF
+ // IFD; Otherwise 0.
+ if (ifdType == 0 && !mAttributes[IFD_TYPE_THUMBNAIL].isEmpty()) {
+ dataOutputStream.writeUnsignedInt(ifdOffsets[IFD_TYPE_THUMBNAIL]);
+ } else {
+ dataOutputStream.writeUnsignedInt(0);
+ }
+
+ // Write values of data field exceeding 4 bytes after the next offset.
+ for (Map.Entry entry : (Set<Map.Entry>) mAttributes[ifdType].entrySet()) {
+ ExifAttribute attribute = (ExifAttribute) entry.getValue();
+
+ if (attribute.bytes.length > 4) {
+ dataOutputStream.write(attribute.bytes, 0, attribute.bytes.length);
+ }
+ }
+ }
+ }
+
+ // Write thumbnail
+ if (mHasThumbnail) {
+ dataOutputStream.write(getThumbnailBytes());
+ }
+
+ // Reset the byte order to big endian in order to write remaining parts of the JPEG file.
+ dataOutputStream.setByteOrder(ByteOrder.BIG_ENDIAN);
+
+ return totalSize;
+ }
+
+ /**
+ * Determines the data format of EXIF entry value.
+ *
+ * @param entryValue The value to be determined.
+ * @return Returns two data formats gussed as a pair in integer. If there is no two candidate
+ data formats for the given entry value, returns {@code -1} in the second of the pair.
+ */
+ private static Pair<Integer, Integer> guessDataFormat(String entryValue) {
+ // See TIFF 6.0 Section 2, "Image File Directory".
+ // Take the first component if there are more than one component.
+ if (entryValue.contains(",")) {
+ String[] entryValues = entryValue.split(",");
+ Pair<Integer, Integer> dataFormat = guessDataFormat(entryValues[0]);
+ if (dataFormat.first == IFD_FORMAT_STRING) {
+ return dataFormat;
+ }
+ for (int i = 1; i < entryValues.length; ++i) {
+ final Pair<Integer, Integer> guessDataFormat = guessDataFormat(entryValues[i]);
+ int first = -1, second = -1;
+ if (guessDataFormat.first == dataFormat.first
+ || guessDataFormat.second == dataFormat.first) {
+ first = dataFormat.first;
+ }
+ if (dataFormat.second != -1 && (guessDataFormat.first == dataFormat.second
+ || guessDataFormat.second == dataFormat.second)) {
+ second = dataFormat.second;
+ }
+ if (first == -1 && second == -1) {
+ return new Pair<>(IFD_FORMAT_STRING, -1);
+ }
+ if (first == -1) {
+ dataFormat = new Pair<>(second, -1);
+ continue;
+ }
+ if (second == -1) {
+ dataFormat = new Pair<>(first, -1);
+ continue;
+ }
+ }
+ return dataFormat;
+ }
+
+ if (entryValue.contains("/")) {
+ String[] rationalNumber = entryValue.split("/");
+ if (rationalNumber.length == 2) {
+ try {
+ long numerator = (long) Double.parseDouble(rationalNumber[0]);
+ long denominator = (long) Double.parseDouble(rationalNumber[1]);
+ if (numerator < 0L || denominator < 0L) {
+ return new Pair<>(IFD_FORMAT_SRATIONAL, -1);
+ }
+ if (numerator > Integer.MAX_VALUE || denominator > Integer.MAX_VALUE) {
+ return new Pair<>(IFD_FORMAT_URATIONAL, -1);
+ }
+ return new Pair<>(IFD_FORMAT_SRATIONAL, IFD_FORMAT_URATIONAL);
+ } catch (NumberFormatException e) {
+ // Ignored
+ }
+ }
+ return new Pair<>(IFD_FORMAT_STRING, -1);
+ }
+ try {
+ Long longValue = Long.parseLong(entryValue);
+ if (longValue >= 0 && longValue <= 65535) {
+ return new Pair<>(IFD_FORMAT_USHORT, IFD_FORMAT_ULONG);
+ }
+ if (longValue < 0) {
+ return new Pair<>(IFD_FORMAT_SLONG, -1);
+ }
+ return new Pair<>(IFD_FORMAT_ULONG, -1);
+ } catch (NumberFormatException e) {
+ // Ignored
+ }
+ try {
+ Double.parseDouble(entryValue);
+ return new Pair<>(IFD_FORMAT_DOUBLE, -1);
+ } catch (NumberFormatException e) {
+ // Ignored
+ }
+ return new Pair<>(IFD_FORMAT_STRING, -1);
+ }
+
+ // An input stream to parse EXIF data area, which can be written in either little or big endian
+ // order.
+ private static class ByteOrderedDataInputStream extends InputStream implements DataInput {
+ private static final ByteOrder LITTLE_ENDIAN = ByteOrder.LITTLE_ENDIAN;
+ private static final ByteOrder BIG_ENDIAN = ByteOrder.BIG_ENDIAN;
+
+ private DataInputStream mDataInputStream;
+ private InputStream mInputStream;
+ private ByteOrder mByteOrder = ByteOrder.BIG_ENDIAN;
+ private final int mLength;
+ private int mPosition;
+
+ public ByteOrderedDataInputStream(InputStream in) throws IOException {
+ mInputStream = in;
+ mDataInputStream = new DataInputStream(in);
+ mLength = mDataInputStream.available();
+ mPosition = 0;
+ mDataInputStream.mark(mLength);
+ }
+
+ public ByteOrderedDataInputStream(byte[] bytes) throws IOException {
+ this(new ByteArrayInputStream(bytes));
+ }
+
+ public void setByteOrder(ByteOrder byteOrder) {
+ mByteOrder = byteOrder;
+ }
+
+ public void seek(long byteCount) throws IOException {
+ if (mPosition > byteCount) {
+ mPosition = 0;
+ mDataInputStream.reset();
+ mDataInputStream.mark(mLength);
+ } else {
+ byteCount -= mPosition;
+ }
+
+ if (skipBytes((int) byteCount) != (int) byteCount) {
+ throw new IOException("Couldn't seek up to the byteCount");
+ }
+ }
+
+ public int peek() {
+ return mPosition;
+ }
+
+ @Override
+ public int available() throws IOException {
+ return mDataInputStream.available();
+ }
+
+ @Override
+ public int read() throws IOException {
+ ++mPosition;
+ return mDataInputStream.read();
+ }
+
+ @Override
+ public int readUnsignedByte() throws IOException {
+ ++mPosition;
+ return mDataInputStream.readUnsignedByte();
+ }
+
+ @Override
+ public String readLine() throws IOException {
+ Log.d(TAG, "Currently unsupported");
+ return null;
+ }
+
+ @Override
+ public boolean readBoolean() throws IOException {
+ ++mPosition;
+ return mDataInputStream.readBoolean();
+ }
+
+ @Override
+ public char readChar() throws IOException {
+ mPosition += 2;
+ return mDataInputStream.readChar();
+ }
+
+ @Override
+ public String readUTF() throws IOException {
+ mPosition += 2;
+ return mDataInputStream.readUTF();
+ }
+
+ @Override
+ public void readFully(byte[] buffer, int offset, int length) throws IOException {
+ mPosition += length;
+ if (mPosition > mLength) {
+ throw new EOFException();
+ }
+ if (mDataInputStream.read(buffer, offset, length) != length) {
+ throw new IOException("Couldn't read up to the length of buffer");
+ }
+ }
+
+ @Override
+ public void readFully(byte[] buffer) throws IOException {
+ mPosition += buffer.length;
+ if (mPosition > mLength) {
+ throw new EOFException();
+ }
+ if (mDataInputStream.read(buffer, 0, buffer.length) != buffer.length) {
+ throw new IOException("Couldn't read up to the length of buffer");
+ }
+ }
+
+ @Override
+ public byte readByte() throws IOException {
+ ++mPosition;
+ if (mPosition > mLength) {
+ throw new EOFException();
+ }
+ int ch = mDataInputStream.read();
+ if (ch < 0) {
+ throw new EOFException();
+ }
+ return (byte) ch;
+ }
+
+ @Override
+ public short readShort() throws IOException {
+ mPosition += 2;
+ if (mPosition > mLength) {
+ throw new EOFException();
+ }
+ int ch1 = mDataInputStream.read();
+ int ch2 = mDataInputStream.read();
+ if ((ch1 | ch2) < 0) {
+ throw new EOFException();
+ }
+ if (mByteOrder == LITTLE_ENDIAN) {
+ return (short) ((ch2 << 8) + (ch1));
+ } else if (mByteOrder == BIG_ENDIAN) {
+ return (short) ((ch1 << 8) + (ch2));
+ }
+ throw new IOException("Invalid byte order: " + mByteOrder);
+ }
+
+ @Override
+ public int readInt() throws IOException {
+ mPosition += 4;
+ if (mPosition > mLength) {
+ throw new EOFException();
+ }
+ int ch1 = mDataInputStream.read();
+ int ch2 = mDataInputStream.read();
+ int ch3 = mDataInputStream.read();
+ int ch4 = mDataInputStream.read();
+ if ((ch1 | ch2 | ch3 | ch4) < 0) {
+ throw new EOFException();
+ }
+ if (mByteOrder == LITTLE_ENDIAN) {
+ return ((ch4 << 24) + (ch3 << 16) + (ch2 << 8) + ch1);
+ } else if (mByteOrder == BIG_ENDIAN) {
+ return ((ch1 << 24) + (ch2 << 16) + (ch3 << 8) + ch4);
+ }
+ throw new IOException("Invalid byte order: " + mByteOrder);
+ }
+
+ @Override
+ public int skipBytes(int byteCount) throws IOException {
+ int totalSkip = Math.min(byteCount, mLength - mPosition);
+ int skipped = 0;
+ while (skipped < totalSkip) {
+ skipped += mDataInputStream.skipBytes(totalSkip - skipped);
+ }
+ mPosition += skipped;
+ return skipped;
+ }
+
+ public int readUnsignedShort() throws IOException {
+ mPosition += 2;
+ if (mPosition > mLength) {
+ throw new EOFException();
+ }
+ int ch1 = mDataInputStream.read();
+ int ch2 = mDataInputStream.read();
+ if ((ch1 | ch2) < 0) {
+ throw new EOFException();
+ }
+ if (mByteOrder == LITTLE_ENDIAN) {
+ return ((ch2 << 8) + (ch1));
+ } else if (mByteOrder == BIG_ENDIAN) {
+ return ((ch1 << 8) + (ch2));
+ }
+ throw new IOException("Invalid byte order: " + mByteOrder);
+ }
+
+ public long readUnsignedInt() throws IOException {
+ return readInt() & 0xffffffffL;
+ }
+
+ @Override
+ public long readLong() throws IOException {
+ mPosition += 8;
+ if (mPosition > mLength) {
+ throw new EOFException();
+ }
+ int ch1 = mDataInputStream.read();
+ int ch2 = mDataInputStream.read();
+ int ch3 = mDataInputStream.read();
+ int ch4 = mDataInputStream.read();
+ int ch5 = mDataInputStream.read();
+ int ch6 = mDataInputStream.read();
+ int ch7 = mDataInputStream.read();
+ int ch8 = mDataInputStream.read();
+ if ((ch1 | ch2 | ch3 | ch4 | ch5 | ch6 | ch7 | ch8) < 0) {
+ throw new EOFException();
+ }
+ if (mByteOrder == LITTLE_ENDIAN) {
+ return (((long) ch8 << 56) + ((long) ch7 << 48) + ((long) ch6 << 40)
+ + ((long) ch5 << 32) + ((long) ch4 << 24) + ((long) ch3 << 16)
+ + ((long) ch2 << 8) + (long) ch1);
+ } else if (mByteOrder == BIG_ENDIAN) {
+ return (((long) ch1 << 56) + ((long) ch2 << 48) + ((long) ch3 << 40)
+ + ((long) ch4 << 32) + ((long) ch5 << 24) + ((long) ch6 << 16)
+ + ((long) ch7 << 8) + (long) ch8);
+ }
+ throw new IOException("Invalid byte order: " + mByteOrder);
+ }
+
+ @Override
+ public float readFloat() throws IOException {
+ return Float.intBitsToFloat(readInt());
+ }
+
+ @Override
+ public double readDouble() throws IOException {
+ return Double.longBitsToDouble(readLong());
+ }
+ }
+
+ // An output stream to write EXIF data area, which can be written in either little or big endian
+ // order.
+ private static class ByteOrderedDataOutputStream extends FilterOutputStream {
+ private final OutputStream mOutputStream;
+ private ByteOrder mByteOrder;
+
+ public ByteOrderedDataOutputStream(OutputStream out, ByteOrder byteOrder) {
+ super(out);
+ mOutputStream = out;
+ mByteOrder = byteOrder;
+ }
+
+ public void setByteOrder(ByteOrder byteOrder) {
+ mByteOrder = byteOrder;
+ }
+
+ public void write(byte[] bytes) throws IOException {
+ mOutputStream.write(bytes);
+ }
+
+ public void write(byte[] bytes, int offset, int length) throws IOException {
+ mOutputStream.write(bytes, offset, length);
+ }
+
+ public void writeByte(int val) throws IOException {
+ mOutputStream.write(val);
+ }
+
+ public void writeShort(short val) throws IOException {
+ if (mByteOrder == ByteOrder.LITTLE_ENDIAN) {
+ mOutputStream.write((val >>> 0) & 0xFF);
+ mOutputStream.write((val >>> 8) & 0xFF);
+ } else if (mByteOrder == ByteOrder.BIG_ENDIAN) {
+ mOutputStream.write((val >>> 8) & 0xFF);
+ mOutputStream.write((val >>> 0) & 0xFF);
+ }
+ }
+
+ public void writeInt(int val) throws IOException {
+ if (mByteOrder == ByteOrder.LITTLE_ENDIAN) {
+ mOutputStream.write((val >>> 0) & 0xFF);
+ mOutputStream.write((val >>> 8) & 0xFF);
+ mOutputStream.write((val >>> 16) & 0xFF);
+ mOutputStream.write((val >>> 24) & 0xFF);
+ } else if (mByteOrder == ByteOrder.BIG_ENDIAN) {
+ mOutputStream.write((val >>> 24) & 0xFF);
+ mOutputStream.write((val >>> 16) & 0xFF);
+ mOutputStream.write((val >>> 8) & 0xFF);
+ mOutputStream.write((val >>> 0) & 0xFF);
+ }
+ }
+
+ public void writeUnsignedShort(int val) throws IOException {
+ writeShort((short) val);
+ }
+
+ public void writeUnsignedInt(long val) throws IOException {
+ writeInt((int) val);
+ }
+ }
+
+ // Swaps image data based on image size
+ private void swapBasedOnImageSize(@IfdType int firstIfdType, @IfdType int secondIfdType)
+ throws IOException {
+ if (mAttributes[firstIfdType].isEmpty() || mAttributes[secondIfdType].isEmpty()) {
+ if (DEBUG) {
+ Log.d(TAG, "Cannot perform swap since only one image data exists");
+ }
+ return;
+ }
+
+ ExifAttribute firstImageLengthAttribute =
+ (ExifAttribute) mAttributes[firstIfdType].get(TAG_IMAGE_LENGTH);
+ ExifAttribute firstImageWidthAttribute =
+ (ExifAttribute) mAttributes[firstIfdType].get(TAG_IMAGE_WIDTH);
+ ExifAttribute secondImageLengthAttribute =
+ (ExifAttribute) mAttributes[secondIfdType].get(TAG_IMAGE_LENGTH);
+ ExifAttribute secondImageWidthAttribute =
+ (ExifAttribute) mAttributes[secondIfdType].get(TAG_IMAGE_WIDTH);
+
+ if (firstImageLengthAttribute == null || firstImageWidthAttribute == null) {
+ if (DEBUG) {
+ Log.d(TAG, "First image does not contain valid size information");
+ }
+ } else if (secondImageLengthAttribute == null || secondImageWidthAttribute == null) {
+ if (DEBUG) {
+ Log.d(TAG, "Second image does not contain valid size information");
+ }
+ } else {
+ int firstImageLengthValue = firstImageLengthAttribute.getIntValue(mExifByteOrder);
+ int firstImageWidthValue = firstImageWidthAttribute.getIntValue(mExifByteOrder);
+ int secondImageLengthValue = secondImageLengthAttribute.getIntValue(mExifByteOrder);
+ int secondImageWidthValue = secondImageWidthAttribute.getIntValue(mExifByteOrder);
+
+ if (firstImageLengthValue < secondImageLengthValue &&
+ firstImageWidthValue < secondImageWidthValue) {
+ HashMap tempMap = mAttributes[firstIfdType];
+ mAttributes[firstIfdType] = mAttributes[secondIfdType];
+ mAttributes[secondIfdType] = tempMap;
+ }
+ }
+ }
+
+ // Checks if there is a match
+ private boolean containsMatch(byte[] mainBytes, byte[] findBytes) {
+ for (int i = 0; i < mainBytes.length - findBytes.length; i++) {
+ for (int j = 0; j < findBytes.length; j++) {
+ if (mainBytes[i + j] != findBytes[j]) {
+ break;
+ }
+ if (j == findBytes.length - 1) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+}
diff --git a/android/media/ExternalRingtonesCursorWrapper.java b/android/media/ExternalRingtonesCursorWrapper.java
new file mode 100644
index 00000000..dd4c77a0
--- /dev/null
+++ b/android/media/ExternalRingtonesCursorWrapper.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.ContentProvider;
+import android.database.Cursor;
+import android.database.CursorWrapper;
+import android.net.Uri;
+
+/**
+ * Cursor that adds the user id to fetched URIs. This is especially useful for {@link getCursor} as
+ * a managed profile should also list its parent's ringtones
+ *
+ * @hide
+ */
+public class ExternalRingtonesCursorWrapper extends CursorWrapper {
+
+ private int mUserId;
+
+ public ExternalRingtonesCursorWrapper(Cursor cursor, int userId) {
+ super(cursor);
+ mUserId = userId;
+ }
+
+ public String getString(int index) {
+ String result = super.getString(index);
+ if (index == RingtoneManager.URI_COLUMN_INDEX) {
+ result = ContentProvider.maybeAddUserId(Uri.parse(result), mUserId).toString();
+ }
+ return result;
+ }
+}
diff --git a/android/media/FaceDetector.java b/android/media/FaceDetector.java
new file mode 100644
index 00000000..61991e37
--- /dev/null
+++ b/android/media/FaceDetector.java
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.Bitmap;
+import android.graphics.PointF;
+import android.util.Log;
+
+import java.lang.IllegalArgumentException;
+
+/**
+ * Identifies the faces of people in a
+ * {@link android.graphics.Bitmap} graphic object.
+ */
+public class FaceDetector {
+
+ /**
+ * A Face contains all the information identifying the location
+ * of a face in a bitmap.
+ */
+ public class Face {
+ /** The minimum confidence factor of good face recognition */
+ public static final float CONFIDENCE_THRESHOLD = 0.4f;
+ /** The x-axis Euler angle of a face. */
+ public static final int EULER_X = 0;
+ /** The y-axis Euler angle of a face. */
+ public static final int EULER_Y = 1;
+ /** The z-axis Euler angle of a face. */
+ public static final int EULER_Z = 2;
+
+ /**
+ * Returns a confidence factor between 0 and 1. This indicates how
+ * certain what has been found is actually a face. A confidence
+ * factor above 0.3 is usually good enough.
+ */
+ public float confidence() {
+ return mConfidence;
+ }
+ /**
+ * Sets the position of the mid-point between the eyes.
+ * @param point the PointF coordinates (float values) of the
+ * face's mid-point
+ */
+ public void getMidPoint(PointF point) {
+ // don't return a PointF to avoid allocations
+ point.set(mMidPointX, mMidPointY);
+ }
+ /**
+ * Returns the distance between the eyes.
+ */
+ public float eyesDistance() {
+ return mEyesDist;
+ }
+ /**
+ * Returns the face's pose. That is, the rotations around either
+ * the X, Y or Z axis (the positions in 3-dimensional Euclidean space).
+ *
+ * @param euler the Euler axis to retrieve an angle from
+ * (<var>EULER_X</var>, <var>EULER_Y</var> or
+ * <var>EULER_Z</var>)
+ * @return the Euler angle of the of the face, for the given axis
+ */
+ public float pose(int euler) {
+ // don't use an array to avoid allocations
+ if (euler == EULER_X)
+ return mPoseEulerX;
+ else if (euler == EULER_Y)
+ return mPoseEulerY;
+ else if (euler == EULER_Z)
+ return mPoseEulerZ;
+ throw new IllegalArgumentException();
+ }
+
+ // private ctor, user not supposed to build this object
+ private Face() {
+ }
+ private float mConfidence;
+ private float mMidPointX;
+ private float mMidPointY;
+ private float mEyesDist;
+ private float mPoseEulerX;
+ private float mPoseEulerY;
+ private float mPoseEulerZ;
+ }
+
+
+ /**
+ * Creates a FaceDetector, configured with the size of the images to
+ * be analysed and the maximum number of faces that can be detected.
+ * These parameters cannot be changed once the object is constructed.
+ * Note that the width of the image must be even.
+ *
+ * @param width the width of the image
+ * @param height the height of the image
+ * @param maxFaces the maximum number of faces to identify
+ *
+ */
+ public FaceDetector(int width, int height, int maxFaces)
+ {
+ if (!sInitialized) {
+ return;
+ }
+ fft_initialize(width, height, maxFaces);
+ mWidth = width;
+ mHeight = height;
+ mMaxFaces = maxFaces;
+ mBWBuffer = new byte[width * height];
+ }
+
+ /**
+ * Finds all the faces found in a given {@link android.graphics.Bitmap}.
+ * The supplied array is populated with {@link FaceDetector.Face}s for each
+ * face found. The bitmap must be in 565 format (for now).
+ *
+ * @param bitmap the {@link android.graphics.Bitmap} graphic to be analyzed
+ * @param faces an array in which to place all found
+ * {@link FaceDetector.Face}s. The array must be sized equal
+ * to the <var>maxFaces</var> value set at initialization
+ * @return the number of faces found
+ * @throws IllegalArgumentException if the Bitmap dimensions don't match
+ * the dimensions defined at initialization or the given array
+ * is not sized equal to the <var>maxFaces</var> value defined
+ * at initialization
+ */
+ public int findFaces(Bitmap bitmap, Face[] faces)
+ {
+ if (!sInitialized) {
+ return 0;
+ }
+ if (bitmap.getWidth() != mWidth || bitmap.getHeight() != mHeight) {
+ throw new IllegalArgumentException(
+ "bitmap size doesn't match initialization");
+ }
+ if (faces.length < mMaxFaces) {
+ throw new IllegalArgumentException(
+ "faces[] smaller than maxFaces");
+ }
+
+ int numFaces = fft_detect(bitmap);
+ if (numFaces >= mMaxFaces)
+ numFaces = mMaxFaces;
+ for (int i=0 ; i<numFaces ; i++) {
+ if (faces[i] == null)
+ faces[i] = new Face();
+ fft_get_face(faces[i], i);
+ }
+ return numFaces;
+ }
+
+
+ /* no user serviceable parts here ... */
+ @Override
+ protected void finalize() throws Throwable {
+ fft_destroy();
+ }
+
+ /*
+ * We use a class initializer to allow the native code to cache some
+ * field offsets.
+ */
+ private static boolean sInitialized;
+ native private static void nativeClassInit();
+
+ static {
+ sInitialized = false;
+ try {
+ System.loadLibrary("FFTEm");
+ nativeClassInit();
+ sInitialized = true;
+ } catch (UnsatisfiedLinkError e) {
+ Log.d("FFTEm", "face detection library not found!");
+ }
+ }
+
+ native private int fft_initialize(int width, int height, int maxFaces);
+ native private int fft_detect(Bitmap bitmap);
+ native private void fft_get_face(Face face, int i);
+ native private void fft_destroy();
+
+ private long mFD;
+ private long mSDK;
+ private long mDCR;
+ private int mWidth;
+ private int mHeight;
+ private int mMaxFaces;
+ private byte mBWBuffer[];
+}
+
diff --git a/android/media/Image.java b/android/media/Image.java
new file mode 100644
index 00000000..fbe55614
--- /dev/null
+++ b/android/media/Image.java
@@ -0,0 +1,396 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.nio.ByteBuffer;
+import java.lang.AutoCloseable;
+
+import android.graphics.Rect;
+
+/**
+ * <p>A single complete image buffer to use with a media source such as a
+ * {@link MediaCodec} or a
+ * {@link android.hardware.camera2.CameraDevice CameraDevice}.</p>
+ *
+ * <p>This class allows for efficient direct application access to the pixel
+ * data of the Image through one or more
+ * {@link java.nio.ByteBuffer ByteBuffers}. Each buffer is encapsulated in a
+ * {@link Plane} that describes the layout of the pixel data in that plane. Due
+ * to this direct access, and unlike the {@link android.graphics.Bitmap Bitmap} class,
+ * Images are not directly usable as UI resources.</p>
+ *
+ * <p>Since Images are often directly produced or consumed by hardware
+ * components, they are a limited resource shared across the system, and should
+ * be closed as soon as they are no longer needed.</p>
+ *
+ * <p>For example, when using the {@link ImageReader} class to read out Images
+ * from various media sources, not closing old Image objects will prevent the
+ * availability of new Images once
+ * {@link ImageReader#getMaxImages the maximum outstanding image count} is
+ * reached. When this happens, the function acquiring new Images will typically
+ * throw an {@link IllegalStateException}.</p>
+ *
+ * @see ImageReader
+ */
+public abstract class Image implements AutoCloseable {
+ /**
+ * @hide
+ */
+ protected boolean mIsImageValid = false;
+
+ /**
+ * @hide
+ */
+ protected Image() {
+ }
+
+ /**
+ * Throw IllegalStateException if the image is invalid (already closed).
+ *
+ * @hide
+ */
+ protected void throwISEIfImageIsInvalid() {
+ if (!mIsImageValid) {
+ throw new IllegalStateException("Image is already closed");
+ }
+ }
+ /**
+ * Get the format for this image. This format determines the number of
+ * ByteBuffers needed to represent the image, and the general layout of the
+ * pixel data in each in ByteBuffer.
+ *
+ * <p>
+ * The format is one of the values from
+ * {@link android.graphics.ImageFormat ImageFormat}. The mapping between the
+ * formats and the planes is as follows:
+ * </p>
+ *
+ * <table>
+ * <tr>
+ * <th>Format</th>
+ * <th>Plane count</th>
+ * <th>Layout details</th>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#JPEG JPEG}</td>
+ * <td>1</td>
+ * <td>Compressed data, so row and pixel strides are 0. To uncompress, use
+ * {@link android.graphics.BitmapFactory#decodeByteArray BitmapFactory#decodeByteArray}.
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</td>
+ * <td>3</td>
+ * <td>A luminance plane followed by the Cb and Cr chroma planes.
+ * The chroma planes have half the width and height of the luminance
+ * plane (4:2:0 subsampling). Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#YUV_422_888 YUV_422_888}</td>
+ * <td>3</td>
+ * <td>A luminance plane followed by the Cb and Cr chroma planes.
+ * The chroma planes have half the width and the full height of the luminance
+ * plane (4:2:2 subsampling). Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#YUV_444_888 YUV_444_888}</td>
+ * <td>3</td>
+ * <td>A luminance plane followed by the Cb and Cr chroma planes.
+ * The chroma planes have the same width and height as that of the luminance
+ * plane (4:4:4 subsampling). Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#FLEX_RGB_888 FLEX_RGB_888}</td>
+ * <td>3</td>
+ * <td>A R (red) plane followed by the G (green) and B (blue) planes.
+ * All planes have the same widths and heights.
+ * Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#FLEX_RGBA_8888 FLEX_RGBA_8888}</td>
+ * <td>4</td>
+ * <td>A R (red) plane followed by the G (green), B (blue), and
+ * A (alpha) planes. All planes have the same widths and heights.
+ * Each pixel sample in each plane has 8 bits.
+ * Each plane has its own row stride and pixel stride.</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</td>
+ * <td>1</td>
+ * <td>A single plane of raw sensor image data, with 16 bits per color
+ * sample. The details of the layout need to be queried from the source of
+ * the raw sensor data, such as
+ * {@link android.hardware.camera2.CameraDevice CameraDevice}.
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE}</td>
+ * <td>1</td>
+ * <td>A single plane of raw sensor image data of private layout.
+ * The details of the layout is implementation specific. Row stride and
+ * pixel stride are undefined for this format. Calling {@link Plane#getRowStride()}
+ * or {@link Plane#getPixelStride()} on RAW_PRIVATE image will cause
+ * UnSupportedOperationException being thrown.
+ * </td>
+ * </tr>
+ * </table>
+ *
+ * @see android.graphics.ImageFormat
+ */
+ public abstract int getFormat();
+
+ /**
+ * The width of the image in pixels. For formats where some color channels
+ * are subsampled, this is the width of the largest-resolution plane.
+ */
+ public abstract int getWidth();
+
+ /**
+ * The height of the image in pixels. For formats where some color channels
+ * are subsampled, this is the height of the largest-resolution plane.
+ */
+ public abstract int getHeight();
+
+ /**
+ * Get the timestamp associated with this frame.
+ * <p>
+ * The timestamp is measured in nanoseconds, and is normally monotonically
+ * increasing. The timestamps for the images from different sources may have
+ * different timebases therefore may not be comparable. The specific meaning and
+ * timebase of the timestamp depend on the source providing images. See
+ * {@link android.hardware.Camera Camera},
+ * {@link android.hardware.camera2.CameraDevice CameraDevice},
+ * {@link MediaPlayer} and {@link MediaCodec} for more details.
+ * </p>
+ */
+ public abstract long getTimestamp();
+
+ /**
+ * Set the timestamp associated with this frame.
+ * <p>
+ * The timestamp is measured in nanoseconds, and is normally monotonically
+ * increasing. The timestamps for the images from different sources may have
+ * different timebases therefore may not be comparable. The specific meaning and
+ * timebase of the timestamp depend on the source providing images. See
+ * {@link android.hardware.Camera Camera},
+ * {@link android.hardware.camera2.CameraDevice CameraDevice},
+ * {@link MediaPlayer} and {@link MediaCodec} for more details.
+ * </p>
+ * <p>
+ * For images dequeued from {@link ImageWriter} via
+ * {@link ImageWriter#dequeueInputImage()}, it's up to the application to
+ * set the timestamps correctly before sending them back to the
+ * {@link ImageWriter}, or the timestamp will be generated automatically when
+ * {@link ImageWriter#queueInputImage queueInputImage()} is called.
+ * </p>
+ *
+ * @param timestamp The timestamp to be set for this image.
+ */
+ public void setTimestamp(long timestamp) {
+ throwISEIfImageIsInvalid();
+ return;
+ }
+
+ private Rect mCropRect;
+
+ /**
+ * Get the crop rectangle associated with this frame.
+ * <p>
+ * The crop rectangle specifies the region of valid pixels in the image,
+ * using coordinates in the largest-resolution plane.
+ */
+ public Rect getCropRect() {
+ throwISEIfImageIsInvalid();
+
+ if (mCropRect == null) {
+ return new Rect(0, 0, getWidth(), getHeight());
+ } else {
+ return new Rect(mCropRect); // return a copy
+ }
+ }
+
+ /**
+ * Set the crop rectangle associated with this frame.
+ * <p>
+ * The crop rectangle specifies the region of valid pixels in the image,
+ * using coordinates in the largest-resolution plane.
+ */
+ public void setCropRect(Rect cropRect) {
+ throwISEIfImageIsInvalid();
+
+ if (cropRect != null) {
+ cropRect = new Rect(cropRect); // make a copy
+ if (!cropRect.intersect(0, 0, getWidth(), getHeight())) {
+ cropRect.setEmpty();
+ }
+ }
+ mCropRect = cropRect;
+ }
+
+ /**
+ * Get the array of pixel planes for this Image. The number of planes is
+ * determined by the format of the Image. The application will get an empty
+ * array if the image format is {@link android.graphics.ImageFormat#PRIVATE
+ * PRIVATE}, because the image pixel data is not directly accessible. The
+ * application can check the image format by calling
+ * {@link Image#getFormat()}.
+ */
+ public abstract Plane[] getPlanes();
+
+ /**
+ * Free up this frame for reuse.
+ * <p>
+ * After calling this method, calling any methods on this {@code Image} will
+ * result in an {@link IllegalStateException}, and attempting to read from
+ * or write to {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Plane#getBuffer} call will have undefined behavior. If the image
+ * was obtained from {@link ImageWriter} via
+ * {@link ImageWriter#dequeueInputImage()}, after calling this method, any
+ * image data filled by the application will be lost and the image will be
+ * returned to {@link ImageWriter} for reuse. Images given to
+ * {@link ImageWriter#queueInputImage queueInputImage()} are automatically
+ * closed.
+ * </p>
+ */
+ @Override
+ public abstract void close();
+
+ /**
+ * <p>
+ * Check if the image can be attached to a new owner (e.g. {@link ImageWriter}).
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @return true if the image is attachable to a new owner, false if the image is still attached
+ * to its current owner, or the image is a stand-alone image and is not attachable to
+ * a new owner.
+ */
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+
+ return false;
+ }
+
+ /**
+ * <p>
+ * Get the owner of the {@link Image}.
+ * </p>
+ * <p>
+ * The owner of an {@link Image} could be {@link ImageReader}, {@link ImageWriter},
+ * {@link MediaCodec} etc. This method returns the owner that produces this image, or null
+ * if the image is stand-alone image or the owner is unknown.
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @return The owner of the Image.
+ */
+ Object getOwner() {
+ throwISEIfImageIsInvalid();
+
+ return null;
+ }
+
+ /**
+ * Get native context (buffer pointer) associated with this image.
+ * <p>
+ * This is a package private method that is only used internally. It can be
+ * used to get the native buffer pointer and passed to native, which may be
+ * passed to {@link ImageWriter#attachAndQueueInputImage} to avoid a reverse
+ * JNI call.
+ * </p>
+ *
+ * @return native context associated with this Image.
+ */
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+
+ return 0;
+ }
+
+ /**
+ * <p>A single color plane of image data.</p>
+ *
+ * <p>The number and meaning of the planes in an Image are determined by the
+ * format of the Image.</p>
+ *
+ * <p>Once the Image has been closed, any access to the the plane's
+ * ByteBuffer will fail.</p>
+ *
+ * @see #getFormat
+ */
+ public static abstract class Plane {
+ /**
+ * @hide
+ */
+ protected Plane() {
+ }
+
+ /**
+ * <p>The row stride for this color plane, in bytes.</p>
+ *
+ * <p>This is the distance between the start of two consecutive rows of
+ * pixels in the image. Note that row stried is undefined for some formats
+ * such as
+ * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
+ * and calling getRowStride on images of these formats will
+ * cause an UnsupportedOperationException being thrown.
+ * For formats where row stride is well defined, the row stride
+ * is always greater than 0.</p>
+ */
+ public abstract int getRowStride();
+ /**
+ * <p>The distance between adjacent pixel samples, in bytes.</p>
+ *
+ * <p>This is the distance between two consecutive pixel values in a row
+ * of pixels. It may be larger than the size of a single pixel to
+ * account for interleaved image data or padded formats.
+ * Note that pixel stride is undefined for some formats such as
+ * {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE},
+ * and calling getPixelStride on images of these formats will
+ * cause an UnsupportedOperationException being thrown.
+ * For formats where pixel stride is well defined, the pixel stride
+ * is always greater than 0.</p>
+ */
+ public abstract int getPixelStride();
+ /**
+ * <p>Get a direct {@link java.nio.ByteBuffer ByteBuffer}
+ * containing the frame data.</p>
+ *
+ * <p>In particular, the buffer returned will always have
+ * {@link java.nio.ByteBuffer#isDirect isDirect} return {@code true}, so
+ * the underlying data could be mapped as a pointer in JNI without doing
+ * any copies with {@code GetDirectBufferAddress}.</p>
+ *
+ * <p>For raw formats, each plane is only guaranteed to contain data
+ * up to the last pixel in the last row. In other words, the stride
+ * after the last row may not be mapped into the buffer. This is a
+ * necessary requirement for any interleaved format.</p>
+ *
+ * @return the byte buffer containing the image data for this plane.
+ */
+ public abstract ByteBuffer getBuffer();
+ }
+
+}
diff --git a/android/media/ImageReader.java b/android/media/ImageReader.java
new file mode 100644
index 00000000..c78c99f7
--- /dev/null
+++ b/android/media/ImageReader.java
@@ -0,0 +1,1049 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.hardware.HardwareBuffer;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+import android.view.Surface;
+
+import dalvik.system.VMRuntime;
+
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.NioUtils;
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * <p>The ImageReader class allows direct application access to image data
+ * rendered into a {@link android.view.Surface}</p>
+ *
+ * <p>Several Android media API classes accept Surface objects as targets to
+ * render to, including {@link MediaPlayer}, {@link MediaCodec},
+ * {@link android.hardware.camera2.CameraDevice}, {@link ImageWriter} and
+ * {@link android.renderscript.Allocation RenderScript Allocations}. The image
+ * sizes and formats that can be used with each source vary, and should be
+ * checked in the documentation for the specific API.</p>
+ *
+ * <p>The image data is encapsulated in {@link Image} objects, and multiple such
+ * objects can be accessed at the same time, up to the number specified by the
+ * {@code maxImages} constructor parameter. New images sent to an ImageReader
+ * through its {@link Surface} are queued until accessed through the {@link #acquireLatestImage}
+ * or {@link #acquireNextImage} call. Due to memory limits, an image source will
+ * eventually stall or drop Images in trying to render to the Surface if the
+ * ImageReader does not obtain and release Images at a rate equal to the
+ * production rate.</p>
+ */
+public class ImageReader implements AutoCloseable {
+
+ /**
+ * Returned by nativeImageSetup when acquiring the image was successful.
+ */
+ private static final int ACQUIRE_SUCCESS = 0;
+ /**
+ * Returned by nativeImageSetup when we couldn't acquire the buffer,
+ * because there were no buffers available to acquire.
+ */
+ private static final int ACQUIRE_NO_BUFS = 1;
+ /**
+ * Returned by nativeImageSetup when we couldn't acquire the buffer
+ * because the consumer has already acquired {@maxImages} and cannot
+ * acquire more than that.
+ */
+ private static final int ACQUIRE_MAX_IMAGES = 2;
+
+ /**
+ * Invalid consumer buffer usage flag. This usage flag will be ignored
+ * by the {@code ImageReader} instance is constructed with this value.
+ */
+ private static final long BUFFER_USAGE_UNKNOWN = 0;
+
+ /**
+ * <p>
+ * Create a new reader for images of the desired size and format.
+ * </p>
+ * <p>
+ * The {@code maxImages} parameter determines the maximum number of
+ * {@link Image} objects that can be be acquired from the
+ * {@code ImageReader} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary
+ * for the use case.
+ * </p>
+ * <p>
+ * The valid sizes and formats depend on the source of the image data.
+ * </p>
+ * <p>
+ * If the {@code format} is {@link ImageFormat#PRIVATE PRIVATE}, the created
+ * {@link ImageReader} will produce images that are not directly accessible
+ * by the application. The application can still acquire images from this
+ * {@link ImageReader}, and send them to the
+ * {@link android.hardware.camera2.CameraDevice camera} for reprocessing via
+ * {@link ImageWriter} interface. However, the {@link Image#getPlanes()
+ * getPlanes()} will return an empty array for {@link ImageFormat#PRIVATE
+ * PRIVATE} format images. The application can check if an existing reader's
+ * format by calling {@link #getImageFormat()}.
+ * </p>
+ * <p>
+ * {@link ImageFormat#PRIVATE PRIVATE} format {@link ImageReader
+ * ImageReaders} are more efficient to use when application access to image
+ * data is not necessary, compared to ImageReaders using other format such
+ * as {@link ImageFormat#YUV_420_888 YUV_420_888}.
+ * </p>
+ *
+ * @param width The default width in pixels of the Images that this reader
+ * will produce.
+ * @param height The default height in pixels of the Images that this reader
+ * will produce.
+ * @param format The format of the Image that this reader will produce. This
+ * must be one of the {@link android.graphics.ImageFormat} or
+ * {@link android.graphics.PixelFormat} constants. Note that not
+ * all formats are supported, like ImageFormat.NV21.
+ * @param maxImages The maximum number of images the user will want to
+ * access simultaneously. This should be as small as possible to
+ * limit memory use. Once maxImages Images are obtained by the
+ * user, one of them has to be released before a new Image will
+ * become available for access through
+ * {@link #acquireLatestImage()} or {@link #acquireNextImage()}.
+ * Must be greater than 0.
+ * @see Image
+ */
+ public static ImageReader newInstance(int width, int height, int format, int maxImages) {
+ return new ImageReader(width, height, format, maxImages, BUFFER_USAGE_UNKNOWN);
+ }
+
+ /**
+ * <p>
+ * Create a new reader for images of the desired size, format and consumer usage flag.
+ * </p>
+ * <p>
+ * The {@code maxImages} parameter determines the maximum number of {@link Image} objects that
+ * can be be acquired from the {@code ImageReader} simultaneously. Requesting more buffers will
+ * use up more memory, so it is important to use only the minimum number necessary for the use
+ * case.
+ * </p>
+ * <p>
+ * The valid sizes and formats depend on the source of the image data.
+ * </p>
+ * <p>
+ * The format and usage flag combination describes how the buffer will be used by
+ * consumer end-points. For example, if the application intends to send the images to
+ * {@link android.media.MediaCodec} or {@link android.media.MediaRecorder} for hardware video
+ * encoding, the format and usage flag combination needs to be
+ * {@link ImageFormat#PRIVATE PRIVATE} and {@link HardwareBuffer#USAGE0_VIDEO_ENCODE}. When an
+ * {@link ImageReader} object is created with a valid size and such format/usage flag
+ * combination, the application can send the {@link Image images} to an {@link ImageWriter} that
+ * is created with the input {@link android.view.Surface} provided by the
+ * {@link android.media.MediaCodec} or {@link android.media.MediaRecorder}.
+ * </p>
+ * <p>
+ * If the {@code format} is {@link ImageFormat#PRIVATE PRIVATE}, the created {@link ImageReader}
+ * will produce images that are not directly accessible by the application. The application can
+ * still acquire images from this {@link ImageReader}, and send them to the
+ * {@link android.hardware.camera2.CameraDevice camera} for reprocessing, or to the
+ * {@link android.media.MediaCodec} / {@link android.media.MediaRecorder} for hardware video
+ * encoding via {@link ImageWriter} interface. However, the {@link Image#getPlanes()
+ * getPlanes()} will return an empty array for {@link ImageFormat#PRIVATE PRIVATE} format
+ * images. The application can check if an existing reader's format by calling
+ * {@link #getImageFormat()}.
+ * </p>
+ * <p>
+ * {@link ImageFormat#PRIVATE PRIVATE} format {@link ImageReader ImageReaders} are more
+ * efficient to use when application access to image data is not necessary, compared to
+ * ImageReaders using other format such as {@link ImageFormat#YUV_420_888 YUV_420_888}.
+ * </p>
+ * <p>
+ * Note that not all format and usage flag combination is supported by the
+ * {@link ImageReader}. Below are the supported combinations by the {@link ImageReader}
+ * (assuming the consumer end-points support the such image consumption, e.g., hardware video
+ * encoding).
+ * <table>
+ * <tr>
+ * <th>Format</th>
+ * <th>Compatible usage flags</th>
+ * </tr>
+ * <tr>
+ * <td>non-{@link android.graphics.ImageFormat#PRIVATE PRIVATE} formats defined by
+ * {@link android.graphics.ImageFormat ImageFormat} or
+ * {@link android.graphics.PixelFormat PixelFormat}</td>
+ * <td>{@link HardwareBuffer#USAGE0_CPU_READ} or
+ * {@link HardwareBuffer#USAGE0_CPU_READ_OFTEN}</td>
+ * </tr>
+ * <tr>
+ * <td>{@link android.graphics.ImageFormat#PRIVATE}</td>
+ * <td>{@link HardwareBuffer#USAGE0_VIDEO_ENCODE} or
+ * {@link HardwareBuffer#USAGE0_GPU_SAMPLED_IMAGE}, or combined</td>
+ * </tr>
+ * </table>
+ * Using other combinations may result in {@link IllegalArgumentException}.
+ * </p>
+ * @param width The default width in pixels of the Images that this reader will produce.
+ * @param height The default height in pixels of the Images that this reader will produce.
+ * @param format The format of the Image that this reader will produce. This must be one of the
+ * {@link android.graphics.ImageFormat} or {@link android.graphics.PixelFormat}
+ * constants. Note that not all formats are supported, like ImageFormat.NV21.
+ * @param maxImages The maximum number of images the user will want to access simultaneously.
+ * This should be as small as possible to limit memory use. Once maxImages Images are
+ * obtained by the user, one of them has to be released before a new Image will
+ * become available for access through {@link #acquireLatestImage()} or
+ * {@link #acquireNextImage()}. Must be greater than 0.
+ * @param usage The intended usage of the images produced by this ImageReader. It needs
+ * to be one of the Usage0 defined by {@link HardwareBuffer}, or an
+ * {@link IllegalArgumentException} will be thrown.
+ * @see Image
+ * @see HardwareBuffer
+ * @hide
+ */
+ public static ImageReader newInstance(int width, int height, int format, int maxImages,
+ long usage) {
+ if (!isFormatUsageCombinationAllowed(format, usage)) {
+ throw new IllegalArgumentException("Format usage combination is not supported:"
+ + " format = " + format + ", usage = " + usage);
+ }
+ return new ImageReader(width, height, format, maxImages, usage);
+ }
+
+ /**
+ * @hide
+ */
+ protected ImageReader(int width, int height, int format, int maxImages, long usage) {
+ mWidth = width;
+ mHeight = height;
+ mFormat = format;
+ mMaxImages = maxImages;
+
+ if (width < 1 || height < 1) {
+ throw new IllegalArgumentException(
+ "The image dimensions must be positive");
+ }
+ if (mMaxImages < 1) {
+ throw new IllegalArgumentException(
+ "Maximum outstanding image count must be at least 1");
+ }
+
+ if (format == ImageFormat.NV21) {
+ throw new IllegalArgumentException(
+ "NV21 format is not supported");
+ }
+
+ mNumPlanes = ImageUtils.getNumPlanesForFormat(mFormat);
+
+ nativeInit(new WeakReference<>(this), width, height, format, maxImages, usage);
+
+ mSurface = nativeGetSurface();
+
+ mIsReaderValid = true;
+ // Estimate the native buffer allocation size and register it so it gets accounted for
+ // during GC. Note that this doesn't include the buffers required by the buffer queue
+ // itself and the buffers requested by the producer.
+ // Only include memory for 1 buffer, since actually accounting for the memory used is
+ // complex, and 1 buffer is enough for the VM to treat the ImageReader as being of some
+ // size.
+ mEstimatedNativeAllocBytes = ImageUtils.getEstimatedNativeAllocBytes(
+ width, height, format, /*buffer count*/ 1);
+ VMRuntime.getRuntime().registerNativeAllocation(mEstimatedNativeAllocBytes);
+ }
+
+ /**
+ * The default width of {@link Image Images}, in pixels.
+ *
+ * <p>The width may be overridden by the producer sending buffers to this
+ * ImageReader's Surface. If so, the actual width of the images can be
+ * found using {@link Image#getWidth}.</p>
+ *
+ * @return the expected width of an Image
+ */
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /**
+ * The default height of {@link Image Images}, in pixels.
+ *
+ * <p>The height may be overridden by the producer sending buffers to this
+ * ImageReader's Surface. If so, the actual height of the images can be
+ * found using {@link Image#getHeight}.</p>
+ *
+ * @return the expected height of an Image
+ */
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * The default {@link ImageFormat image format} of {@link Image Images}.
+ *
+ * <p>Some color formats may be overridden by the producer sending buffers to
+ * this ImageReader's Surface if the default color format allows. ImageReader
+ * guarantees that all {@link Image Images} acquired from ImageReader
+ * (for example, with {@link #acquireNextImage}) will have a "compatible"
+ * format to what was specified in {@link #newInstance}.
+ * As of now, each format is only compatible to itself.
+ * The actual format of the images can be found using {@link Image#getFormat}.</p>
+ *
+ * @return the expected format of an Image
+ *
+ * @see ImageFormat
+ */
+ public int getImageFormat() {
+ return mFormat;
+ }
+
+ /**
+ * Maximum number of images that can be acquired from the ImageReader by any time (for example,
+ * with {@link #acquireNextImage}).
+ *
+ * <p>An image is considered acquired after it's returned by a function from ImageReader, and
+ * until the Image is {@link Image#close closed} to release the image back to the ImageReader.
+ * </p>
+ *
+ * <p>Attempting to acquire more than {@code maxImages} concurrently will result in the
+ * acquire function throwing a {@link IllegalStateException}. Furthermore,
+ * while the max number of images have been acquired by the ImageReader user, the producer
+ * enqueueing additional images may stall until at least one image has been released. </p>
+ *
+ * @return Maximum number of images for this ImageReader.
+ *
+ * @see Image#close
+ */
+ public int getMaxImages() {
+ return mMaxImages;
+ }
+
+ /**
+ * <p>Get a {@link Surface} that can be used to produce {@link Image Images} for this
+ * {@code ImageReader}.</p>
+ *
+ * <p>Until valid image data is rendered into this {@link Surface}, the
+ * {@link #acquireNextImage} method will return {@code null}. Only one source
+ * can be producing data into this Surface at the same time, although the
+ * same {@link Surface} can be reused with a different API once the first source is
+ * disconnected from the {@link Surface}.</p>
+ *
+ * <p>Please note that holding on to the Surface object returned by this method is not enough
+ * to keep its parent ImageReader from being reclaimed. In that sense, a Surface acts like a
+ * {@link java.lang.ref.WeakReference weak reference} to the ImageReader that provides it.</p>
+ *
+ * @return A {@link Surface} to use for a drawing target for various APIs.
+ */
+ public Surface getSurface() {
+ return mSurface;
+ }
+
+ /**
+ * <p>
+ * Acquire the latest {@link Image} from the ImageReader's queue, dropping older
+ * {@link Image images}. Returns {@code null} if no new image is available.
+ * </p>
+ * <p>
+ * This operation will acquire all the images possible from the ImageReader,
+ * but {@link #close} all images that aren't the latest. This function is
+ * recommended to use over {@link #acquireNextImage} for most use-cases, as it's
+ * more suited for real-time processing.
+ * </p>
+ * <p>
+ * Note that {@link #getMaxImages maxImages} should be at least 2 for
+ * {@link #acquireLatestImage} to be any different than {@link #acquireNextImage} -
+ * discarding all-but-the-newest {@link Image} requires temporarily acquiring two
+ * {@link Image Images} at once. Or more generally, calling {@link #acquireLatestImage}
+ * with less than two images of margin, that is
+ * {@code (maxImages - currentAcquiredImages < 2)} will not discard as expected.
+ * </p>
+ * <p>
+ * This operation will fail by throwing an {@link IllegalStateException} if
+ * {@code maxImages} have been acquired with {@link #acquireLatestImage} or
+ * {@link #acquireNextImage}. In particular a sequence of {@link #acquireLatestImage}
+ * calls greater than {@link #getMaxImages} without calling {@link Image#close} in-between
+ * will exhaust the underlying queue. At such a time, {@link IllegalStateException}
+ * will be thrown until more images are
+ * released with {@link Image#close}.
+ * </p>
+ *
+ * @return latest frame of image data, or {@code null} if no image data is available.
+ * @throws IllegalStateException if too many images are currently acquired
+ */
+ public Image acquireLatestImage() {
+ Image image = acquireNextImage();
+ if (image == null) {
+ return null;
+ }
+ try {
+ for (;;) {
+ Image next = acquireNextImageNoThrowISE();
+ if (next == null) {
+ Image result = image;
+ image = null;
+ return result;
+ }
+ image.close();
+ image = next;
+ }
+ } finally {
+ if (image != null) {
+ image.close();
+ }
+ }
+ }
+
+ /**
+ * Don't throw IllegalStateException if there are too many images acquired.
+ *
+ * @return Image if acquiring succeeded, or null otherwise.
+ *
+ * @hide
+ */
+ public Image acquireNextImageNoThrowISE() {
+ SurfaceImage si = new SurfaceImage(mFormat);
+ return acquireNextSurfaceImage(si) == ACQUIRE_SUCCESS ? si : null;
+ }
+
+ /**
+ * Attempts to acquire the next image from the underlying native implementation.
+ *
+ * <p>
+ * Note that unexpected failures will throw at the JNI level.
+ * </p>
+ *
+ * @param si A blank SurfaceImage.
+ * @return One of the {@code ACQUIRE_*} codes that determine success or failure.
+ *
+ * @see #ACQUIRE_MAX_IMAGES
+ * @see #ACQUIRE_NO_BUFS
+ * @see #ACQUIRE_SUCCESS
+ */
+ private int acquireNextSurfaceImage(SurfaceImage si) {
+ synchronized (mCloseLock) {
+ // A null image will eventually be returned if ImageReader is already closed.
+ int status = ACQUIRE_NO_BUFS;
+ if (mIsReaderValid) {
+ status = nativeImageSetup(si);
+ }
+
+ switch (status) {
+ case ACQUIRE_SUCCESS:
+ si.mIsImageValid = true;
+ case ACQUIRE_NO_BUFS:
+ case ACQUIRE_MAX_IMAGES:
+ break;
+ default:
+ throw new AssertionError("Unknown nativeImageSetup return code " + status);
+ }
+
+ // Only keep track the successfully acquired image, as the native buffer is only mapped
+ // for such case.
+ if (status == ACQUIRE_SUCCESS) {
+ mAcquiredImages.add(si);
+ }
+ return status;
+ }
+ }
+
+ /**
+ * <p>
+ * Acquire the next Image from the ImageReader's queue. Returns {@code null} if
+ * no new image is available.
+ * </p>
+ *
+ * <p><i>Warning:</i> Consider using {@link #acquireLatestImage()} instead, as it will
+ * automatically release older images, and allow slower-running processing routines to catch
+ * up to the newest frame. Usage of {@link #acquireNextImage} is recommended for
+ * batch/background processing. Incorrectly using this function can cause images to appear
+ * with an ever-increasing delay, followed by a complete stall where no new images seem to
+ * appear.
+ * </p>
+ *
+ * <p>
+ * This operation will fail by throwing an {@link IllegalStateException} if
+ * {@code maxImages} have been acquired with {@link #acquireNextImage} or
+ * {@link #acquireLatestImage}. In particular a sequence of {@link #acquireNextImage} or
+ * {@link #acquireLatestImage} calls greater than {@link #getMaxImages maxImages} without
+ * calling {@link Image#close} in-between will exhaust the underlying queue. At such a time,
+ * {@link IllegalStateException} will be thrown until more images are released with
+ * {@link Image#close}.
+ * </p>
+ *
+ * @return a new frame of image data, or {@code null} if no image data is available.
+ * @throws IllegalStateException if {@code maxImages} images are currently acquired
+ * @see #acquireLatestImage
+ */
+ public Image acquireNextImage() {
+ // Initialize with reader format, but can be overwritten by native if the image
+ // format is different from the reader format.
+ SurfaceImage si = new SurfaceImage(mFormat);
+ int status = acquireNextSurfaceImage(si);
+
+ switch (status) {
+ case ACQUIRE_SUCCESS:
+ return si;
+ case ACQUIRE_NO_BUFS:
+ return null;
+ case ACQUIRE_MAX_IMAGES:
+ throw new IllegalStateException(
+ String.format(
+ "maxImages (%d) has already been acquired, " +
+ "call #close before acquiring more.", mMaxImages));
+ default:
+ throw new AssertionError("Unknown nativeImageSetup return code " + status);
+ }
+ }
+
+ /**
+ * <p>Return the frame to the ImageReader for reuse.</p>
+ */
+ private void releaseImage(Image i) {
+ if (! (i instanceof SurfaceImage) ) {
+ throw new IllegalArgumentException(
+ "This image was not produced by an ImageReader");
+ }
+ SurfaceImage si = (SurfaceImage) i;
+ if (si.mIsImageValid == false) {
+ return;
+ }
+
+ if (si.getReader() != this || !mAcquiredImages.contains(i)) {
+ throw new IllegalArgumentException(
+ "This image was not produced by this ImageReader");
+ }
+
+ si.clearSurfacePlanes();
+ nativeReleaseImage(i);
+ si.mIsImageValid = false;
+ mAcquiredImages.remove(i);
+ }
+
+ /**
+ * Register a listener to be invoked when a new image becomes available
+ * from the ImageReader.
+ *
+ * @param listener
+ * The listener that will be run.
+ * @param handler
+ * The handler on which the listener should be invoked, or null
+ * if the listener should be invoked on the calling thread's looper.
+ * @throws IllegalArgumentException
+ * If no handler specified and the calling thread has no looper.
+ */
+ public void setOnImageAvailableListener(OnImageAvailableListener listener, Handler handler) {
+ synchronized (mListenerLock) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper == null) {
+ throw new IllegalArgumentException(
+ "handler is null but the current thread is not a looper");
+ }
+ if (mListenerHandler == null || mListenerHandler.getLooper() != looper) {
+ mListenerHandler = new ListenerHandler(looper);
+ }
+ mListener = listener;
+ } else {
+ mListener = null;
+ mListenerHandler = null;
+ }
+ }
+ }
+
+ /**
+ * Callback interface for being notified that a new image is available.
+ *
+ * <p>
+ * The onImageAvailable is called per image basis, that is, callback fires for every new frame
+ * available from ImageReader.
+ * </p>
+ */
+ public interface OnImageAvailableListener {
+ /**
+ * Callback that is called when a new image is available from ImageReader.
+ *
+ * @param reader the ImageReader the callback is associated with.
+ * @see ImageReader
+ * @see Image
+ */
+ void onImageAvailable(ImageReader reader);
+ }
+
+ /**
+ * Free up all the resources associated with this ImageReader.
+ *
+ * <p>
+ * After calling this method, this ImageReader can not be used. Calling
+ * any methods on this ImageReader and Images previously provided by
+ * {@link #acquireNextImage} or {@link #acquireLatestImage}
+ * will result in an {@link IllegalStateException}, and attempting to read from
+ * {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Image.Plane#getBuffer Plane#getBuffer} call will
+ * have undefined behavior.
+ * </p>
+ */
+ @Override
+ public void close() {
+ setOnImageAvailableListener(null, null);
+ if (mSurface != null) mSurface.release();
+
+ /**
+ * Close all outstanding acquired images before closing the ImageReader. It is a good
+ * practice to close all the images as soon as it is not used to reduce system instantaneous
+ * memory pressure. CopyOnWrite list will use a copy of current list content. For the images
+ * being closed by other thread (e.g., GC thread), doubling the close call is harmless. For
+ * the image being acquired by other threads, mCloseLock is used to synchronize close and
+ * acquire operations.
+ */
+ synchronized (mCloseLock) {
+ mIsReaderValid = false;
+ for (Image image : mAcquiredImages) {
+ image.close();
+ }
+ mAcquiredImages.clear();
+
+ nativeClose();
+
+ if (mEstimatedNativeAllocBytes > 0) {
+ VMRuntime.getRuntime().registerNativeFree(mEstimatedNativeAllocBytes);
+ mEstimatedNativeAllocBytes = 0;
+ }
+ }
+ }
+
+ /**
+ * Discard any free buffers owned by this ImageReader.
+ *
+ * <p>
+ * Generally, the ImageReader caches buffers for reuse once they have been
+ * allocated, for best performance. However, sometimes it may be important to
+ * release all the cached, unused buffers to save on memory.
+ * </p>
+ * <p>
+ * Calling this method will discard all free cached buffers. This does not include any buffers
+ * associated with Images acquired from the ImageReader, any filled buffers waiting to be
+ * acquired, and any buffers currently in use by the source rendering buffers into the
+ * ImageReader's Surface.
+ * <p>
+ * The ImageReader continues to be usable after this call, but may need to reallocate buffers
+ * when more buffers are needed for rendering.
+ * </p>
+ * @hide
+ */
+ public void discardFreeBuffers() {
+ synchronized (mCloseLock) {
+ nativeDiscardFreeBuffers();
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * <p>
+ * Remove the ownership of this image from the ImageReader.
+ * </p>
+ * <p>
+ * After this call, the ImageReader no longer owns this image, and the image
+ * ownership can be transfered to another entity like {@link ImageWriter}
+ * via {@link ImageWriter#queueInputImage}. It's up to the new owner to
+ * release the resources held by this image. For example, if the ownership
+ * of this image is transfered to an {@link ImageWriter}, the image will be
+ * freed by the ImageWriter after the image data consumption is done.
+ * </p>
+ * <p>
+ * This method can be used to achieve zero buffer copy for use cases like
+ * {@link android.hardware.camera2.CameraDevice Camera2 API} PRIVATE and YUV
+ * reprocessing, where the application can select an output image from
+ * {@link ImageReader} and transfer this image directly to
+ * {@link ImageWriter}, where this image can be consumed by camera directly.
+ * For PRIVATE reprocessing, this is the only way to send input buffers to
+ * the {@link android.hardware.camera2.CameraDevice camera} for
+ * reprocessing.
+ * </p>
+ * <p>
+ * This is a package private method that is only used internally.
+ * </p>
+ *
+ * @param image The image to be detached from this ImageReader.
+ * @throws IllegalStateException If the ImageReader or image have been
+ * closed, or the has been detached, or has not yet been
+ * acquired.
+ */
+ void detachImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("input image must not be null");
+ }
+ if (!isImageOwnedbyMe(image)) {
+ throw new IllegalArgumentException("Trying to detach an image that is not owned by"
+ + " this ImageReader");
+ }
+
+ SurfaceImage si = (SurfaceImage) image;
+ si.throwISEIfImageIsInvalid();
+
+ if (si.isAttachable()) {
+ throw new IllegalStateException("Image was already detached from this ImageReader");
+ }
+
+ nativeDetachImage(image);
+ si.clearSurfacePlanes();
+ si.mPlanes = null;
+ si.setDetached(true);
+ }
+
+ private boolean isImageOwnedbyMe(Image image) {
+ if (!(image instanceof SurfaceImage)) {
+ return false;
+ }
+ SurfaceImage si = (SurfaceImage) image;
+ return si.getReader() == this;
+ }
+
+ private static boolean isFormatUsageCombinationAllowed(int format, long usage) {
+ if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
+ return false;
+ }
+
+ // Valid usage needs to be provided.
+ if (usage == BUFFER_USAGE_UNKNOWN) {
+ return false;
+ }
+
+ if (format == ImageFormat.PRIVATE) {
+ // Usage need to be either USAGE0_GPU_SAMPLED_IMAGE or USAGE0_VIDEO_ENCODE or combined.
+ boolean isAllowed = (usage == HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE);
+ isAllowed = isAllowed || (usage == HardwareBuffer.USAGE_VIDEO_ENCODE);
+ isAllowed = isAllowed || (usage ==
+ (HardwareBuffer.USAGE_VIDEO_ENCODE | HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE));
+ return isAllowed;
+ } else {
+ // Usage need to make the buffer CPU readable for explicit format.
+ return ((usage == HardwareBuffer.USAGE_CPU_READ_RARELY) ||
+ (usage == HardwareBuffer.USAGE_CPU_READ_OFTEN));
+ }
+ }
+
+ /**
+ * Called from Native code when an Event happens.
+ *
+ * This may be called from an arbitrary Binder thread, so access to the ImageReader must be
+ * synchronized appropriately.
+ */
+ private static void postEventFromNative(Object selfRef) {
+ @SuppressWarnings("unchecked")
+ WeakReference<ImageReader> weakSelf = (WeakReference<ImageReader>)selfRef;
+ final ImageReader ir = weakSelf.get();
+ if (ir == null) {
+ return;
+ }
+
+ final Handler handler;
+ synchronized (ir.mListenerLock) {
+ handler = ir.mListenerHandler;
+ }
+ if (handler != null) {
+ handler.sendEmptyMessage(0);
+ }
+ }
+
+ private final int mWidth;
+ private final int mHeight;
+ private final int mFormat;
+ private final int mMaxImages;
+ private final int mNumPlanes;
+ private final Surface mSurface;
+ private int mEstimatedNativeAllocBytes;
+
+ private final Object mListenerLock = new Object();
+ private final Object mCloseLock = new Object();
+ private boolean mIsReaderValid = false;
+ private OnImageAvailableListener mListener;
+ private ListenerHandler mListenerHandler;
+ // Keep track of the successfully acquired Images. This need to be thread safe as the images
+ // could be closed by different threads (e.g., application thread and GC thread).
+ private List<Image> mAcquiredImages = new CopyOnWriteArrayList<>();
+
+ /**
+ * This field is used by native code, do not access or modify.
+ */
+ private long mNativeContext;
+
+ /**
+ * This custom handler runs asynchronously so callbacks don't get queued behind UI messages.
+ */
+ private final class ListenerHandler extends Handler {
+ public ListenerHandler(Looper looper) {
+ super(looper, null, true /*async*/);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ OnImageAvailableListener listener;
+ synchronized (mListenerLock) {
+ listener = mListener;
+ }
+
+ // It's dangerous to fire onImageAvailable() callback when the ImageReader is being
+ // closed, as application could acquire next image in the onImageAvailable() callback.
+ boolean isReaderValid = false;
+ synchronized (mCloseLock) {
+ isReaderValid = mIsReaderValid;
+ }
+ if (listener != null && isReaderValid) {
+ listener.onImageAvailable(ImageReader.this);
+ }
+ }
+ }
+
+ private class SurfaceImage extends android.media.Image {
+ public SurfaceImage(int format) {
+ mFormat = format;
+ }
+
+ @Override
+ public void close() {
+ ImageReader.this.releaseImage(this);
+ }
+
+ public ImageReader getReader() {
+ return ImageReader.this;
+ }
+
+ @Override
+ public int getFormat() {
+ throwISEIfImageIsInvalid();
+ int readerFormat = ImageReader.this.getImageFormat();
+ // Assume opaque reader always produce opaque images.
+ mFormat = (readerFormat == ImageFormat.PRIVATE) ? readerFormat :
+ nativeGetFormat(readerFormat);
+ return mFormat;
+ }
+
+ @Override
+ public int getWidth() {
+ throwISEIfImageIsInvalid();
+ int width;
+ switch(getFormat()) {
+ case ImageFormat.JPEG:
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ case ImageFormat.RAW_PRIVATE:
+ width = ImageReader.this.getWidth();
+ break;
+ default:
+ width = nativeGetWidth();
+ }
+ return width;
+ }
+
+ @Override
+ public int getHeight() {
+ throwISEIfImageIsInvalid();
+ int height;
+ switch(getFormat()) {
+ case ImageFormat.JPEG:
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ case ImageFormat.RAW_PRIVATE:
+ height = ImageReader.this.getHeight();
+ break;
+ default:
+ height = nativeGetHeight();
+ }
+ return height;
+ }
+
+ @Override
+ public long getTimestamp() {
+ throwISEIfImageIsInvalid();
+ return mTimestamp;
+ }
+
+ @Override
+ public void setTimestamp(long timestampNs) {
+ throwISEIfImageIsInvalid();
+ mTimestamp = timestampNs;
+ }
+
+ @Override
+ public Plane[] getPlanes() {
+ throwISEIfImageIsInvalid();
+
+ if (mPlanes == null) {
+ mPlanes = nativeCreatePlanes(ImageReader.this.mNumPlanes, ImageReader.this.mFormat);
+ }
+ // Shallow copy is fine.
+ return mPlanes.clone();
+ }
+
+ @Override
+ protected final void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ @Override
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+ return mIsDetached.get();
+ }
+
+ @Override
+ ImageReader getOwner() {
+ throwISEIfImageIsInvalid();
+ return ImageReader.this;
+ }
+
+ @Override
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+ return mNativeBuffer;
+ }
+
+ private void setDetached(boolean detached) {
+ throwISEIfImageIsInvalid();
+ mIsDetached.getAndSet(detached);
+ }
+
+ private void clearSurfacePlanes() {
+ // Image#getPlanes may not be called before the image is closed.
+ if (mIsImageValid && mPlanes != null) {
+ for (int i = 0; i < mPlanes.length; i++) {
+ if (mPlanes[i] != null) {
+ mPlanes[i].clearBuffer();
+ mPlanes[i] = null;
+ }
+ }
+ }
+ }
+
+ private class SurfacePlane extends android.media.Image.Plane {
+ // SurfacePlane instance is created by native code when SurfaceImage#getPlanes() is
+ // called
+ private SurfacePlane(int rowStride, int pixelStride, ByteBuffer buffer) {
+ mRowStride = rowStride;
+ mPixelStride = pixelStride;
+ mBuffer = buffer;
+ /**
+ * Set the byteBuffer order according to host endianness (native
+ * order), otherwise, the byteBuffer order defaults to
+ * ByteOrder.BIG_ENDIAN.
+ */
+ mBuffer.order(ByteOrder.nativeOrder());
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ throwISEIfImageIsInvalid();
+ return mBuffer;
+ }
+
+ @Override
+ public int getPixelStride() {
+ SurfaceImage.this.throwISEIfImageIsInvalid();
+ if (ImageReader.this.mFormat == ImageFormat.RAW_PRIVATE) {
+ throw new UnsupportedOperationException(
+ "getPixelStride is not supported for RAW_PRIVATE plane");
+ }
+ return mPixelStride;
+ }
+
+ @Override
+ public int getRowStride() {
+ SurfaceImage.this.throwISEIfImageIsInvalid();
+ if (ImageReader.this.mFormat == ImageFormat.RAW_PRIVATE) {
+ throw new UnsupportedOperationException(
+ "getRowStride is not supported for RAW_PRIVATE plane");
+ }
+ return mRowStride;
+ }
+
+ private void clearBuffer() {
+ // Need null check first, as the getBuffer() may not be called before an image
+ // is closed.
+ if (mBuffer == null) {
+ return;
+ }
+
+ if (mBuffer.isDirect()) {
+ NioUtils.freeDirectBuffer(mBuffer);
+ }
+ mBuffer = null;
+ }
+
+ final private int mPixelStride;
+ final private int mRowStride;
+
+ private ByteBuffer mBuffer;
+ }
+
+ /**
+ * This field is used to keep track of native object and used by native code only.
+ * Don't modify.
+ */
+ private long mNativeBuffer;
+
+ /**
+ * This field is set by native code during nativeImageSetup().
+ */
+ private long mTimestamp;
+
+ private SurfacePlane[] mPlanes;
+ private int mFormat = ImageFormat.UNKNOWN;
+ // If this image is detached from the ImageReader.
+ private AtomicBoolean mIsDetached = new AtomicBoolean(false);
+
+ private synchronized native SurfacePlane[] nativeCreatePlanes(int numPlanes,
+ int readerFormat);
+ private synchronized native int nativeGetWidth();
+ private synchronized native int nativeGetHeight();
+ private synchronized native int nativeGetFormat(int readerFormat);
+ }
+
+ private synchronized native void nativeInit(Object weakSelf, int w, int h,
+ int fmt, int maxImgs, long consumerUsage);
+ private synchronized native void nativeClose();
+ private synchronized native void nativeReleaseImage(Image i);
+ private synchronized native Surface nativeGetSurface();
+ private synchronized native int nativeDetachImage(Image i);
+ private synchronized native void nativeDiscardFreeBuffers();
+
+ /**
+ * @return A return code {@code ACQUIRE_*}
+ *
+ * @see #ACQUIRE_SUCCESS
+ * @see #ACQUIRE_NO_BUFS
+ * @see #ACQUIRE_MAX_IMAGES
+ */
+ private synchronized native int nativeImageSetup(Image i);
+
+ /**
+ * We use a class initializer to allow the native code to cache some
+ * field offsets.
+ */
+ private static native void nativeClassInit();
+ static {
+ System.loadLibrary("media_jni");
+ nativeClassInit();
+ }
+}
diff --git a/android/media/ImageUtils.java b/android/media/ImageUtils.java
new file mode 100644
index 00000000..2a0e04eb
--- /dev/null
+++ b/android/media/ImageUtils.java
@@ -0,0 +1,276 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.media.Image.Plane;
+import android.util.Size;
+
+import libcore.io.Memory;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Package private utility class for hosting commonly used Image related methods.
+ */
+class ImageUtils {
+
+ /**
+ * Only a subset of the formats defined in
+ * {@link android.graphics.ImageFormat ImageFormat} and
+ * {@link android.graphics.PixelFormat PixelFormat} are supported by
+ * ImageReader. When reading RGB data from a surface, the formats defined in
+ * {@link android.graphics.PixelFormat PixelFormat} can be used; when
+ * reading YUV, JPEG or raw sensor data (for example, from the camera or video
+ * decoder), formats from {@link android.graphics.ImageFormat ImageFormat}
+ * are used.
+ */
+ public static int getNumPlanesForFormat(int format) {
+ switch (format) {
+ case ImageFormat.YV12:
+ case ImageFormat.YUV_420_888:
+ case ImageFormat.NV21:
+ return 3;
+ case ImageFormat.NV16:
+ return 2;
+ case PixelFormat.RGB_565:
+ case PixelFormat.RGBA_8888:
+ case PixelFormat.RGBX_8888:
+ case PixelFormat.RGB_888:
+ case ImageFormat.JPEG:
+ case ImageFormat.YUY2:
+ case ImageFormat.Y8:
+ case ImageFormat.Y16:
+ case ImageFormat.RAW_SENSOR:
+ case ImageFormat.RAW_PRIVATE:
+ case ImageFormat.RAW10:
+ case ImageFormat.RAW12:
+ case ImageFormat.DEPTH16:
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ case ImageFormat.RAW_DEPTH:
+ return 1;
+ case ImageFormat.PRIVATE:
+ return 0;
+ default:
+ throw new UnsupportedOperationException(
+ String.format("Invalid format specified %d", format));
+ }
+ }
+
+ /**
+ * <p>
+ * Copy source image data to destination Image.
+ * </p>
+ * <p>
+ * Only support the copy between two non-{@link ImageFormat#PRIVATE PRIVATE} format
+ * images with same properties (format, size, etc.). The data from the
+ * source image will be copied to the byteBuffers from the destination Image
+ * starting from position zero, and the destination image will be rewound to
+ * zero after copy is done.
+ * </p>
+ *
+ * @param src The source image to be copied from.
+ * @param dst The destination image to be copied to.
+ * @throws IllegalArgumentException If the source and destination images
+ * have different format, or one of the images is not copyable.
+ */
+ public static void imageCopy(Image src, Image dst) {
+ if (src == null || dst == null) {
+ throw new IllegalArgumentException("Images should be non-null");
+ }
+ if (src.getFormat() != dst.getFormat()) {
+ throw new IllegalArgumentException("Src and dst images should have the same format");
+ }
+ if (src.getFormat() == ImageFormat.PRIVATE ||
+ dst.getFormat() == ImageFormat.PRIVATE) {
+ throw new IllegalArgumentException("PRIVATE format images are not copyable");
+ }
+ if (src.getFormat() == ImageFormat.RAW_PRIVATE) {
+ throw new IllegalArgumentException(
+ "Copy of RAW_OPAQUE format has not been implemented");
+ }
+ if (src.getFormat() == ImageFormat.RAW_DEPTH) {
+ throw new IllegalArgumentException(
+ "Copy of RAW_DEPTH format has not been implemented");
+ }
+ if (!(dst.getOwner() instanceof ImageWriter)) {
+ throw new IllegalArgumentException("Destination image is not from ImageWriter. Only"
+ + " the images from ImageWriter are writable");
+ }
+ Size srcSize = new Size(src.getWidth(), src.getHeight());
+ Size dstSize = new Size(dst.getWidth(), dst.getHeight());
+ if (!srcSize.equals(dstSize)) {
+ throw new IllegalArgumentException("source image size " + srcSize + " is different"
+ + " with " + "destination image size " + dstSize);
+ }
+
+ Plane[] srcPlanes = src.getPlanes();
+ Plane[] dstPlanes = dst.getPlanes();
+ ByteBuffer srcBuffer = null;
+ ByteBuffer dstBuffer = null;
+ for (int i = 0; i < srcPlanes.length; i++) {
+ int srcRowStride = srcPlanes[i].getRowStride();
+ int dstRowStride = dstPlanes[i].getRowStride();
+ srcBuffer = srcPlanes[i].getBuffer();
+ dstBuffer = dstPlanes[i].getBuffer();
+ if (!(srcBuffer.isDirect() && dstBuffer.isDirect())) {
+ throw new IllegalArgumentException("Source and destination ByteBuffers must be"
+ + " direct byteBuffer!");
+ }
+ if (srcPlanes[i].getPixelStride() != dstPlanes[i].getPixelStride()) {
+ throw new IllegalArgumentException("Source plane image pixel stride " +
+ srcPlanes[i].getPixelStride() +
+ " must be same as destination image pixel stride " +
+ dstPlanes[i].getPixelStride());
+ }
+
+ int srcPos = srcBuffer.position();
+ srcBuffer.rewind();
+ dstBuffer.rewind();
+ if (srcRowStride == dstRowStride) {
+ // Fast path, just copy the content if the byteBuffer all together.
+ dstBuffer.put(srcBuffer);
+ } else {
+ // Source and destination images may have different alignment requirements,
+ // therefore may have different strides. Copy row by row for such case.
+ int srcOffset = srcBuffer.position();
+ int dstOffset = dstBuffer.position();
+ Size effectivePlaneSize = getEffectivePlaneSizeForImage(src, i);
+ int srcByteCount = effectivePlaneSize.getWidth() * srcPlanes[i].getPixelStride();
+ for (int row = 0; row < effectivePlaneSize.getHeight(); row++) {
+ if (row == effectivePlaneSize.getHeight() - 1) {
+ // Special case for NV21 backed YUV420_888: need handle the last row
+ // carefully to avoid memory corruption. Check if we have enough bytes to
+ // copy.
+ int remainingBytes = srcBuffer.remaining() - srcOffset;
+ if (srcByteCount > remainingBytes) {
+ srcByteCount = remainingBytes;
+ }
+ }
+ directByteBufferCopy(srcBuffer, srcOffset, dstBuffer, dstOffset, srcByteCount);
+ srcOffset += srcRowStride;
+ dstOffset += dstRowStride;
+ }
+ }
+
+ srcBuffer.position(srcPos);
+ dstBuffer.rewind();
+ }
+ }
+
+ /**
+ * Return the estimated native allocation size in bytes based on width, height, format,
+ * and number of images.
+ *
+ * <p>This is a very rough estimation and should only be used for native allocation
+ * registration in VM so it can be accounted for during GC.</p>
+ *
+ * @param width The width of the images.
+ * @param height The height of the images.
+ * @param format The format of the images.
+ * @param numImages The number of the images.
+ */
+ public static int getEstimatedNativeAllocBytes(int width, int height, int format,
+ int numImages) {
+ double estimatedBytePerPixel;
+ switch (format) {
+ // 10x compression from RGB_888
+ case ImageFormat.JPEG:
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ estimatedBytePerPixel = 0.3;
+ break;
+ case ImageFormat.Y8:
+ estimatedBytePerPixel = 1.0;
+ break;
+ case ImageFormat.RAW10:
+ estimatedBytePerPixel = 1.25;
+ break;
+ case ImageFormat.YV12:
+ case ImageFormat.YUV_420_888:
+ case ImageFormat.NV21:
+ case ImageFormat.RAW12:
+ case ImageFormat.PRIVATE: // A rough estimate because the real size is unknown.
+ estimatedBytePerPixel = 1.5;
+ break;
+ case ImageFormat.NV16:
+ case PixelFormat.RGB_565:
+ case ImageFormat.YUY2:
+ case ImageFormat.Y16:
+ case ImageFormat.RAW_DEPTH:
+ case ImageFormat.RAW_SENSOR:
+ case ImageFormat.RAW_PRIVATE: // round estimate, real size is unknown
+ case ImageFormat.DEPTH16:
+ estimatedBytePerPixel = 2.0;
+ break;
+ case PixelFormat.RGB_888:
+ estimatedBytePerPixel = 3.0;
+ break;
+ case PixelFormat.RGBA_8888:
+ case PixelFormat.RGBX_8888:
+ estimatedBytePerPixel = 4.0;
+ break;
+ default:
+ throw new UnsupportedOperationException(
+ String.format("Invalid format specified %d", format));
+ }
+
+ return (int)(width * height * estimatedBytePerPixel * numImages);
+ }
+
+ private static Size getEffectivePlaneSizeForImage(Image image, int planeIdx) {
+ switch (image.getFormat()) {
+ case ImageFormat.YV12:
+ case ImageFormat.YUV_420_888:
+ case ImageFormat.NV21:
+ if (planeIdx == 0) {
+ return new Size(image.getWidth(), image.getHeight());
+ } else {
+ return new Size(image.getWidth() / 2, image.getHeight() / 2);
+ }
+ case ImageFormat.NV16:
+ if (planeIdx == 0) {
+ return new Size(image.getWidth(), image.getHeight());
+ } else {
+ return new Size(image.getWidth(), image.getHeight() / 2);
+ }
+ case PixelFormat.RGB_565:
+ case PixelFormat.RGBA_8888:
+ case PixelFormat.RGBX_8888:
+ case PixelFormat.RGB_888:
+ case ImageFormat.JPEG:
+ case ImageFormat.YUY2:
+ case ImageFormat.Y8:
+ case ImageFormat.Y16:
+ case ImageFormat.RAW_SENSOR:
+ case ImageFormat.RAW10:
+ case ImageFormat.RAW12:
+ case ImageFormat.RAW_DEPTH:
+ return new Size(image.getWidth(), image.getHeight());
+ case ImageFormat.PRIVATE:
+ return new Size(0, 0);
+ default:
+ throw new UnsupportedOperationException(
+ String.format("Invalid image format %d", image.getFormat()));
+ }
+ }
+
+ private static void directByteBufferCopy(ByteBuffer srcBuffer, int srcOffset,
+ ByteBuffer dstBuffer, int dstOffset, int srcByteCount) {
+ Memory.memmove(dstBuffer, dstOffset, srcBuffer, srcOffset, srcByteCount);
+ }
+}
diff --git a/android/media/ImageWriter.java b/android/media/ImageWriter.java
new file mode 100644
index 00000000..2b7309f1
--- /dev/null
+++ b/android/media/ImageWriter.java
@@ -0,0 +1,877 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.graphics.Rect;
+import android.hardware.camera2.utils.SurfaceUtils;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Size;
+import android.view.Surface;
+
+import dalvik.system.VMRuntime;
+
+import java.lang.ref.WeakReference;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.NioUtils;
+import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+/**
+ * <p>
+ * The ImageWriter class allows an application to produce Image data into a
+ * {@link android.view.Surface}, and have it be consumed by another component
+ * like {@link android.hardware.camera2.CameraDevice CameraDevice}.
+ * </p>
+ * <p>
+ * Several Android API classes can provide input {@link android.view.Surface
+ * Surface} objects for ImageWriter to produce data into, including
+ * {@link MediaCodec MediaCodec} (encoder),
+ * {@link android.hardware.camera2.CameraCaptureSession CameraCaptureSession}
+ * (reprocessing input), {@link ImageReader}, etc.
+ * </p>
+ * <p>
+ * The input Image data is encapsulated in {@link Image} objects. To produce
+ * Image data into a destination {@link android.view.Surface Surface}, the
+ * application can get an input Image via {@link #dequeueInputImage} then write
+ * Image data into it. Multiple such {@link Image} objects can be dequeued at
+ * the same time and queued back in any order, up to the number specified by the
+ * {@code maxImages} constructor parameter.
+ * </p>
+ * <p>
+ * If the application already has an Image from {@link ImageReader}, the
+ * application can directly queue this Image into ImageWriter (via
+ * {@link #queueInputImage}), potentially with zero buffer copies. For the
+ * {@link ImageFormat#PRIVATE PRIVATE} format Images produced by
+ * {@link ImageReader}, this is the only way to send Image data to ImageWriter,
+ * as the Image data aren't accessible by the application.
+ * </p>
+ * Once new input Images are queued into an ImageWriter, it's up to the
+ * downstream components (e.g. {@link ImageReader} or
+ * {@link android.hardware.camera2.CameraDevice}) to consume the Images. If the
+ * downstream components cannot consume the Images at least as fast as the
+ * ImageWriter production rate, the {@link #dequeueInputImage} call will
+ * eventually block and the application will have to drop input frames.
+ * </p>
+ * <p>
+ * If the consumer component that provided the input {@link android.view.Surface Surface}
+ * abandons the {@link android.view.Surface Surface}, {@link #queueInputImage queueing}
+ * or {@link #dequeueInputImage dequeueing} an {@link Image} will throw an
+ * {@link IllegalStateException}.
+ * </p>
+ */
+public class ImageWriter implements AutoCloseable {
+ private final Object mListenerLock = new Object();
+ private OnImageReleasedListener mListener;
+ private ListenerHandler mListenerHandler;
+ private long mNativeContext;
+
+ // Field below is used by native code, do not access or modify.
+ private int mWriterFormat;
+
+ private final int mMaxImages;
+ // Keep track of the currently dequeued Image. This need to be thread safe as the images
+ // could be closed by different threads (e.g., application thread and GC thread).
+ private List<Image> mDequeuedImages = new CopyOnWriteArrayList<>();
+ private int mEstimatedNativeAllocBytes;
+
+ /**
+ * <p>
+ * Create a new ImageWriter.
+ * </p>
+ * <p>
+ * The {@code maxImages} parameter determines the maximum number of
+ * {@link Image} objects that can be be dequeued from the
+ * {@code ImageWriter} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary.
+ * </p>
+ * <p>
+ * The input Image size and format depend on the Surface that is provided by
+ * the downstream consumer end-point.
+ * </p>
+ *
+ * @param surface The destination Surface this writer produces Image data
+ * into.
+ * @param maxImages The maximum number of Images the user will want to
+ * access simultaneously for producing Image data. This should be
+ * as small as possible to limit memory use. Once maxImages
+ * Images are dequeued by the user, one of them has to be queued
+ * back before a new Image can be dequeued for access via
+ * {@link #dequeueInputImage()}.
+ * @return a new ImageWriter instance.
+ */
+ public static ImageWriter newInstance(Surface surface, int maxImages) {
+ return new ImageWriter(surface, maxImages, ImageFormat.UNKNOWN);
+ }
+
+ /**
+ * <p>
+ * Create a new ImageWriter with given number of max Images and format.
+ * </p>
+ * <p>
+ * The {@code maxImages} parameter determines the maximum number of
+ * {@link Image} objects that can be be dequeued from the
+ * {@code ImageWriter} simultaneously. Requesting more buffers will use up
+ * more memory, so it is important to use only the minimum number necessary.
+ * </p>
+ * <p>
+ * The format specifies the image format of this ImageWriter. The format
+ * from the {@code surface} will be overridden with this format. For example,
+ * if the surface is obtained from a {@link android.graphics.SurfaceTexture}, the default
+ * format may be {@link PixelFormat#RGBA_8888}. If the application creates an ImageWriter
+ * with this surface and {@link ImageFormat#PRIVATE}, this ImageWriter will be able to operate
+ * with {@link ImageFormat#PRIVATE} Images.
+ * </p>
+ * <p>
+ * Note that the consumer end-point may or may not be able to support Images with different
+ * format, for such case, the application should only use this method if the consumer is able
+ * to consume such images.
+ * </p>
+ * <p>
+ * The input Image size depends on the Surface that is provided by
+ * the downstream consumer end-point.
+ * </p>
+ *
+ * @param surface The destination Surface this writer produces Image data
+ * into.
+ * @param maxImages The maximum number of Images the user will want to
+ * access simultaneously for producing Image data. This should be
+ * as small as possible to limit memory use. Once maxImages
+ * Images are dequeued by the user, one of them has to be queued
+ * back before a new Image can be dequeued for access via
+ * {@link #dequeueInputImage()}.
+ * @param format The format of this ImageWriter. It can be any valid format specified by
+ * {@link ImageFormat} or {@link PixelFormat}.
+ *
+ * @return a new ImageWriter instance.
+ * @hide
+ */
+ public static ImageWriter newInstance(Surface surface, int maxImages, int format) {
+ if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
+ throw new IllegalArgumentException("Invalid format is specified: " + format);
+ }
+ return new ImageWriter(surface, maxImages, format);
+ }
+
+ /**
+ * @hide
+ */
+ protected ImageWriter(Surface surface, int maxImages, int format) {
+ if (surface == null || maxImages < 1) {
+ throw new IllegalArgumentException("Illegal input argument: surface " + surface
+ + ", maxImages: " + maxImages);
+ }
+
+ mMaxImages = maxImages;
+
+ if (format == ImageFormat.UNKNOWN) {
+ format = SurfaceUtils.getSurfaceFormat(surface);
+ }
+ // Note that the underlying BufferQueue is working in synchronous mode
+ // to avoid dropping any buffers.
+ mNativeContext = nativeInit(new WeakReference<>(this), surface, maxImages, format);
+
+ // Estimate the native buffer allocation size and register it so it gets accounted for
+ // during GC. Note that this doesn't include the buffers required by the buffer queue
+ // itself and the buffers requested by the producer.
+ // Only include memory for 1 buffer, since actually accounting for the memory used is
+ // complex, and 1 buffer is enough for the VM to treat the ImageWriter as being of some
+ // size.
+ Size surfSize = SurfaceUtils.getSurfaceSize(surface);
+ mEstimatedNativeAllocBytes =
+ ImageUtils.getEstimatedNativeAllocBytes(surfSize.getWidth(),surfSize.getHeight(),
+ format, /*buffer count*/ 1);
+ VMRuntime.getRuntime().registerNativeAllocation(mEstimatedNativeAllocBytes);
+ }
+
+ /**
+ * <p>
+ * Maximum number of Images that can be dequeued from the ImageWriter
+ * simultaneously (for example, with {@link #dequeueInputImage()}).
+ * </p>
+ * <p>
+ * An Image is considered dequeued after it's returned by
+ * {@link #dequeueInputImage()} from ImageWriter, and until the Image is
+ * sent back to ImageWriter via {@link #queueInputImage}, or
+ * {@link Image#close()}.
+ * </p>
+ * <p>
+ * Attempting to dequeue more than {@code maxImages} concurrently will
+ * result in the {@link #dequeueInputImage()} function throwing an
+ * {@link IllegalStateException}.
+ * </p>
+ *
+ * @return Maximum number of Images that can be dequeued from this
+ * ImageWriter.
+ * @see #dequeueInputImage
+ * @see #queueInputImage
+ * @see Image#close
+ */
+ public int getMaxImages() {
+ return mMaxImages;
+ }
+
+ /**
+ * <p>
+ * Dequeue the next available input Image for the application to produce
+ * data into.
+ * </p>
+ * <p>
+ * This method requests a new input Image from ImageWriter. The application
+ * owns this Image after this call. Once the application fills the Image
+ * data, it is expected to return this Image back to ImageWriter for
+ * downstream consumer components (e.g.
+ * {@link android.hardware.camera2.CameraDevice}) to consume. The Image can
+ * be returned to ImageWriter via {@link #queueInputImage} or
+ * {@link Image#close()}.
+ * </p>
+ * <p>
+ * This call will block if all available input images have been queued by
+ * the application and the downstream consumer has not yet consumed any.
+ * When an Image is consumed by the downstream consumer and released, an
+ * {@link OnImageReleasedListener#onImageReleased} callback will be fired,
+ * which indicates that there is one input Image available. For non-
+ * {@link ImageFormat#PRIVATE PRIVATE} formats (
+ * {@link ImageWriter#getFormat()} != {@link ImageFormat#PRIVATE}), it is
+ * recommended to dequeue the next Image only after this callback is fired,
+ * in the steady state.
+ * </p>
+ * <p>
+ * If the format of ImageWriter is {@link ImageFormat#PRIVATE PRIVATE} (
+ * {@link ImageWriter#getFormat()} == {@link ImageFormat#PRIVATE}), the
+ * image buffer is inaccessible to the application, and calling this method
+ * will result in an {@link IllegalStateException}. Instead, the application
+ * should acquire images from some other component (e.g. an
+ * {@link ImageReader}), and queue them directly to this ImageWriter via the
+ * {@link ImageWriter#queueInputImage queueInputImage()} method.
+ * </p>
+ *
+ * @return The next available input Image from this ImageWriter.
+ * @throws IllegalStateException if {@code maxImages} Images are currently
+ * dequeued, or the ImageWriter format is
+ * {@link ImageFormat#PRIVATE PRIVATE}, or the input
+ * {@link android.view.Surface Surface} has been abandoned by the
+ * consumer component that provided the {@link android.view.Surface Surface}.
+ * @see #queueInputImage
+ * @see Image#close
+ */
+ public Image dequeueInputImage() {
+ if (mWriterFormat == ImageFormat.PRIVATE) {
+ throw new IllegalStateException(
+ "PRIVATE format ImageWriter doesn't support this operation since the images are"
+ + " inaccessible to the application!");
+ }
+
+ if (mDequeuedImages.size() >= mMaxImages) {
+ throw new IllegalStateException("Already dequeued max number of Images " + mMaxImages);
+ }
+ WriterSurfaceImage newImage = new WriterSurfaceImage(this);
+ nativeDequeueInputImage(mNativeContext, newImage);
+ mDequeuedImages.add(newImage);
+ newImage.mIsImageValid = true;
+ return newImage;
+ }
+
+ /**
+ * <p>
+ * Queue an input {@link Image} back to ImageWriter for the downstream
+ * consumer to access.
+ * </p>
+ * <p>
+ * The input {@link Image} could be from ImageReader (acquired via
+ * {@link ImageReader#acquireNextImage} or
+ * {@link ImageReader#acquireLatestImage}), or from this ImageWriter
+ * (acquired via {@link #dequeueInputImage}). In the former case, the Image
+ * data will be moved to this ImageWriter. Note that the Image properties
+ * (size, format, strides, etc.) must be the same as the properties of the
+ * images dequeued from this ImageWriter, or this method will throw an
+ * {@link IllegalArgumentException}. In the latter case, the application has
+ * filled the input image with data. This method then passes the filled
+ * buffer to the downstream consumer. In both cases, it's up to the caller
+ * to ensure that the Image timestamp (in nanoseconds) is correctly set, as
+ * the downstream component may want to use it to indicate the Image data
+ * capture time.
+ * </p>
+ * <p>
+ * After this method is called and the downstream consumer consumes and
+ * releases the Image, an {@link OnImageReleasedListener#onImageReleased}
+ * callback will fire. The application can use this callback to avoid
+ * sending Images faster than the downstream consumer processing rate in
+ * steady state.
+ * </p>
+ * <p>
+ * Passing in an Image from some other component (e.g. an
+ * {@link ImageReader}) requires a free input Image from this ImageWriter as
+ * the destination. In this case, this call will block, as
+ * {@link #dequeueInputImage} does, if there are no free Images available.
+ * To avoid blocking, the application should ensure that there is at least
+ * one free Image available in this ImageWriter before calling this method.
+ * </p>
+ * <p>
+ * After this call, the input Image is no longer valid for further access,
+ * as if the Image is {@link Image#close closed}. Attempting to access the
+ * {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Image.Plane#getBuffer Plane#getBuffer} call will result in an
+ * {@link IllegalStateException}.
+ * </p>
+ *
+ * @param image The Image to be queued back to ImageWriter for future
+ * consumption.
+ * @throws IllegalStateException if the image was already queued previously,
+ * or the image was aborted previously, or the input
+ * {@link android.view.Surface Surface} has been abandoned by the
+ * consumer component that provided the
+ * {@link android.view.Surface Surface}.
+ * @see #dequeueInputImage()
+ */
+ public void queueInputImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+ boolean ownedByMe = isImageOwnedByMe(image);
+ if (ownedByMe && !(((WriterSurfaceImage) image).mIsImageValid)) {
+ throw new IllegalStateException("Image from ImageWriter is invalid");
+ }
+
+ // For images from other components, need to detach first, then attach.
+ if (!ownedByMe) {
+ if (!(image.getOwner() instanceof ImageReader)) {
+ throw new IllegalArgumentException("Only images from ImageReader can be queued to"
+ + " ImageWriter, other image source is not supported yet!");
+ }
+
+ ImageReader prevOwner = (ImageReader) image.getOwner();
+
+ prevOwner.detachImage(image);
+ attachAndQueueInputImage(image);
+ // This clears the native reference held by the original owner.
+ // When this Image is detached later by this ImageWriter, the
+ // native memory won't be leaked.
+ image.close();
+ return;
+ }
+
+ Rect crop = image.getCropRect();
+ nativeQueueInputImage(mNativeContext, image, image.getTimestamp(), crop.left, crop.top,
+ crop.right, crop.bottom);
+
+ /**
+ * Only remove and cleanup the Images that are owned by this
+ * ImageWriter. Images detached from other owners are only temporarily
+ * owned by this ImageWriter and will be detached immediately after they
+ * are released by downstream consumers, so there is no need to keep
+ * track of them in mDequeuedImages.
+ */
+ if (ownedByMe) {
+ mDequeuedImages.remove(image);
+ // Do not call close here, as close is essentially cancel image.
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+ wi.clearSurfacePlanes();
+ wi.mIsImageValid = false;
+ }
+ }
+
+ /**
+ * Get the ImageWriter format.
+ * <p>
+ * This format may be different than the Image format returned by
+ * {@link Image#getFormat()}. However, if the ImageWriter format is
+ * {@link ImageFormat#PRIVATE PRIVATE}, calling {@link #dequeueInputImage()}
+ * will result in an {@link IllegalStateException}.
+ * </p>
+ *
+ * @return The ImageWriter format.
+ */
+ public int getFormat() {
+ return mWriterFormat;
+ }
+
+ /**
+ * ImageWriter callback interface, used to to asynchronously notify the
+ * application of various ImageWriter events.
+ */
+ public interface OnImageReleasedListener {
+ /**
+ * <p>
+ * Callback that is called when an input Image is released back to
+ * ImageWriter after the data consumption.
+ * </p>
+ * <p>
+ * The client can use this callback to be notified that an input Image
+ * has been consumed and released by the downstream consumer. More
+ * specifically, this callback will be fired for below cases:
+ * <li>The application dequeues an input Image via the
+ * {@link ImageWriter#dequeueInputImage dequeueInputImage()} method,
+ * uses it, and then queues it back to this ImageWriter via the
+ * {@link ImageWriter#queueInputImage queueInputImage()} method. After
+ * the downstream consumer uses and releases this image to this
+ * ImageWriter, this callback will be fired. This image will be
+ * available to be dequeued after this callback.</li>
+ * <li>The application obtains an Image from some other component (e.g.
+ * an {@link ImageReader}), uses it, and then queues it to this
+ * ImageWriter via {@link ImageWriter#queueInputImage queueInputImage()}.
+ * After the downstream consumer uses and releases this image to this
+ * ImageWriter, this callback will be fired.</li>
+ * </p>
+ *
+ * @param writer the ImageWriter the callback is associated with.
+ * @see ImageWriter
+ * @see Image
+ */
+ void onImageReleased(ImageWriter writer);
+ }
+
+ /**
+ * Register a listener to be invoked when an input Image is returned to the
+ * ImageWriter.
+ *
+ * @param listener The listener that will be run.
+ * @param handler The handler on which the listener should be invoked, or
+ * null if the listener should be invoked on the calling thread's
+ * looper.
+ * @throws IllegalArgumentException If no handler specified and the calling
+ * thread has no looper.
+ */
+ public void setOnImageReleasedListener(OnImageReleasedListener listener, Handler handler) {
+ synchronized (mListenerLock) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper == null) {
+ throw new IllegalArgumentException(
+ "handler is null but the current thread is not a looper");
+ }
+ if (mListenerHandler == null || mListenerHandler.getLooper() != looper) {
+ mListenerHandler = new ListenerHandler(looper);
+ }
+ mListener = listener;
+ } else {
+ mListener = null;
+ mListenerHandler = null;
+ }
+ }
+ }
+
+ /**
+ * Free up all the resources associated with this ImageWriter.
+ * <p>
+ * After calling this method, this ImageWriter cannot be used. Calling any
+ * methods on this ImageWriter and Images previously provided by
+ * {@link #dequeueInputImage()} will result in an
+ * {@link IllegalStateException}, and attempting to write into
+ * {@link ByteBuffer ByteBuffers} returned by an earlier
+ * {@link Image.Plane#getBuffer Plane#getBuffer} call will have undefined
+ * behavior.
+ * </p>
+ */
+ @Override
+ public void close() {
+ setOnImageReleasedListener(null, null);
+ for (Image image : mDequeuedImages) {
+ image.close();
+ }
+ mDequeuedImages.clear();
+ nativeClose(mNativeContext);
+ mNativeContext = 0;
+
+ if (mEstimatedNativeAllocBytes > 0) {
+ VMRuntime.getRuntime().registerNativeFree(mEstimatedNativeAllocBytes);
+ mEstimatedNativeAllocBytes = 0;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * <p>
+ * Attach and queue input Image to this ImageWriter.
+ * </p>
+ * <p>
+ * When the format of an Image is {@link ImageFormat#PRIVATE PRIVATE}, or
+ * the source Image is so large that copying its data is too expensive, this
+ * method can be used to migrate the source Image into ImageWriter without a
+ * data copy, and then queue it to this ImageWriter. The source Image must
+ * be detached from its previous owner already, or this call will throw an
+ * {@link IllegalStateException}.
+ * </p>
+ * <p>
+ * After this call, the ImageWriter takes ownership of this Image. This
+ * ownership will automatically be removed from this writer after the
+ * consumer releases this Image, that is, after
+ * {@link OnImageReleasedListener#onImageReleased}. The caller is responsible for
+ * closing this Image through {@link Image#close()} to free up the resources
+ * held by this Image.
+ * </p>
+ *
+ * @param image The source Image to be attached and queued into this
+ * ImageWriter for downstream consumer to use.
+ * @throws IllegalStateException if the Image is not detached from its
+ * previous owner, or the Image is already attached to this
+ * ImageWriter, or the source Image is invalid.
+ */
+ private void attachAndQueueInputImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+ if (isImageOwnedByMe(image)) {
+ throw new IllegalArgumentException(
+ "Can not attach an image that is owned ImageWriter already");
+ }
+ /**
+ * Throw ISE if the image is not attachable, which means that it is
+ * either owned by other entity now, or completely non-attachable (some
+ * stand-alone images are not backed by native gralloc buffer, thus not
+ * attachable).
+ */
+ if (!image.isAttachable()) {
+ throw new IllegalStateException("Image was not detached from last owner, or image "
+ + " is not detachable");
+ }
+
+ // TODO: what if attach failed, throw RTE or detach a slot then attach?
+ // need do some cleanup to make sure no orphaned
+ // buffer caused leak.
+ Rect crop = image.getCropRect();
+ nativeAttachAndQueueImage(mNativeContext, image.getNativeContext(), image.getFormat(),
+ image.getTimestamp(), crop.left, crop.top, crop.right, crop.bottom);
+ }
+
+ /**
+ * This custom handler runs asynchronously so callbacks don't get queued
+ * behind UI messages.
+ */
+ private final class ListenerHandler extends Handler {
+ public ListenerHandler(Looper looper) {
+ super(looper, null, true /* async */);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ OnImageReleasedListener listener;
+ synchronized (mListenerLock) {
+ listener = mListener;
+ }
+ if (listener != null) {
+ listener.onImageReleased(ImageWriter.this);
+ }
+ }
+ }
+
+ /**
+ * Called from Native code when an Event happens. This may be called from an
+ * arbitrary Binder thread, so access to the ImageWriter must be
+ * synchronized appropriately.
+ */
+ private static void postEventFromNative(Object selfRef) {
+ @SuppressWarnings("unchecked")
+ WeakReference<ImageWriter> weakSelf = (WeakReference<ImageWriter>) selfRef;
+ final ImageWriter iw = weakSelf.get();
+ if (iw == null) {
+ return;
+ }
+
+ final Handler handler;
+ synchronized (iw.mListenerLock) {
+ handler = iw.mListenerHandler;
+ }
+ if (handler != null) {
+ handler.sendEmptyMessage(0);
+ }
+ }
+
+ /**
+ * <p>
+ * Abort the Images that were dequeued from this ImageWriter, and return
+ * them to this writer for reuse.
+ * </p>
+ * <p>
+ * This method is used for the cases where the application dequeued the
+ * Image, may have filled the data, but does not want the downstream
+ * component to consume it. The Image will be returned to this ImageWriter
+ * for reuse after this call, and the ImageWriter will immediately have an
+ * Image available to be dequeued. This aborted Image will be invisible to
+ * the downstream consumer, as if nothing happened.
+ * </p>
+ *
+ * @param image The Image to be aborted.
+ * @see #dequeueInputImage()
+ * @see Image#close()
+ */
+ private void abortImage(Image image) {
+ if (image == null) {
+ throw new IllegalArgumentException("image shouldn't be null");
+ }
+
+ if (!mDequeuedImages.contains(image)) {
+ throw new IllegalStateException("It is illegal to abort some image that is not"
+ + " dequeued yet");
+ }
+
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+ if (!wi.mIsImageValid) {
+ return;
+ }
+
+ /**
+ * We only need abort Images that are owned and dequeued by ImageWriter.
+ * For attached Images, no need to abort, as there are only two cases:
+ * attached + queued successfully, and attach failed. Neither of the
+ * cases need abort.
+ */
+ cancelImage(mNativeContext, image);
+ mDequeuedImages.remove(image);
+ wi.clearSurfacePlanes();
+ wi.mIsImageValid = false;
+ }
+
+ private boolean isImageOwnedByMe(Image image) {
+ if (!(image instanceof WriterSurfaceImage)) {
+ return false;
+ }
+ WriterSurfaceImage wi = (WriterSurfaceImage) image;
+ if (wi.getOwner() != this) {
+ return false;
+ }
+
+ return true;
+ }
+
+ private static class WriterSurfaceImage extends android.media.Image {
+ private ImageWriter mOwner;
+ // This field is used by native code, do not access or modify.
+ private long mNativeBuffer;
+ private int mNativeFenceFd = -1;
+ private SurfacePlane[] mPlanes;
+ private int mHeight = -1;
+ private int mWidth = -1;
+ private int mFormat = -1;
+ // When this default timestamp is used, timestamp for the input Image
+ // will be generated automatically when queueInputBuffer is called.
+ private final long DEFAULT_TIMESTAMP = Long.MIN_VALUE;
+ private long mTimestamp = DEFAULT_TIMESTAMP;
+
+ public WriterSurfaceImage(ImageWriter writer) {
+ mOwner = writer;
+ }
+
+ @Override
+ public int getFormat() {
+ throwISEIfImageIsInvalid();
+
+ if (mFormat == -1) {
+ mFormat = nativeGetFormat();
+ }
+ return mFormat;
+ }
+
+ @Override
+ public int getWidth() {
+ throwISEIfImageIsInvalid();
+
+ if (mWidth == -1) {
+ mWidth = nativeGetWidth();
+ }
+
+ return mWidth;
+ }
+
+ @Override
+ public int getHeight() {
+ throwISEIfImageIsInvalid();
+
+ if (mHeight == -1) {
+ mHeight = nativeGetHeight();
+ }
+
+ return mHeight;
+ }
+
+ @Override
+ public long getTimestamp() {
+ throwISEIfImageIsInvalid();
+
+ return mTimestamp;
+ }
+
+ @Override
+ public void setTimestamp(long timestamp) {
+ throwISEIfImageIsInvalid();
+
+ mTimestamp = timestamp;
+ }
+
+ @Override
+ public Plane[] getPlanes() {
+ throwISEIfImageIsInvalid();
+
+ if (mPlanes == null) {
+ int numPlanes = ImageUtils.getNumPlanesForFormat(getFormat());
+ mPlanes = nativeCreatePlanes(numPlanes, getOwner().getFormat());
+ }
+
+ return mPlanes.clone();
+ }
+
+ @Override
+ boolean isAttachable() {
+ throwISEIfImageIsInvalid();
+ // Don't allow Image to be detached from ImageWriter for now, as no
+ // detach API is exposed.
+ return false;
+ }
+
+ @Override
+ ImageWriter getOwner() {
+ throwISEIfImageIsInvalid();
+
+ return mOwner;
+ }
+
+ @Override
+ long getNativeContext() {
+ throwISEIfImageIsInvalid();
+
+ return mNativeBuffer;
+ }
+
+ @Override
+ public void close() {
+ if (mIsImageValid) {
+ getOwner().abortImage(this);
+ }
+ }
+
+ @Override
+ protected final void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private void clearSurfacePlanes() {
+ if (mIsImageValid && mPlanes != null) {
+ for (int i = 0; i < mPlanes.length; i++) {
+ if (mPlanes[i] != null) {
+ mPlanes[i].clearBuffer();
+ mPlanes[i] = null;
+ }
+ }
+ }
+ }
+
+ private class SurfacePlane extends android.media.Image.Plane {
+ private ByteBuffer mBuffer;
+ final private int mPixelStride;
+ final private int mRowStride;
+
+ // SurfacePlane instance is created by native code when SurfaceImage#getPlanes() is
+ // called
+ private SurfacePlane(int rowStride, int pixelStride, ByteBuffer buffer) {
+ mRowStride = rowStride;
+ mPixelStride = pixelStride;
+ mBuffer = buffer;
+ /**
+ * Set the byteBuffer order according to host endianness (native
+ * order), otherwise, the byteBuffer order defaults to
+ * ByteOrder.BIG_ENDIAN.
+ */
+ mBuffer.order(ByteOrder.nativeOrder());
+ }
+
+ @Override
+ public int getRowStride() {
+ throwISEIfImageIsInvalid();
+ return mRowStride;
+ }
+
+ @Override
+ public int getPixelStride() {
+ throwISEIfImageIsInvalid();
+ return mPixelStride;
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ throwISEIfImageIsInvalid();
+ return mBuffer;
+ }
+
+ private void clearBuffer() {
+ // Need null check first, as the getBuffer() may not be called
+ // before an Image is closed.
+ if (mBuffer == null) {
+ return;
+ }
+
+ if (mBuffer.isDirect()) {
+ NioUtils.freeDirectBuffer(mBuffer);
+ }
+ mBuffer = null;
+ }
+
+ }
+
+ // Create the SurfacePlane object and fill the information
+ private synchronized native SurfacePlane[] nativeCreatePlanes(int numPlanes, int writerFmt);
+
+ private synchronized native int nativeGetWidth();
+
+ private synchronized native int nativeGetHeight();
+
+ private synchronized native int nativeGetFormat();
+ }
+
+ // Native implemented ImageWriter methods.
+ private synchronized native long nativeInit(Object weakSelf, Surface surface, int maxImgs,
+ int format);
+
+ private synchronized native void nativeClose(long nativeCtx);
+
+ private synchronized native void nativeDequeueInputImage(long nativeCtx, Image wi);
+
+ private synchronized native void nativeQueueInputImage(long nativeCtx, Image image,
+ long timestampNs, int left, int top, int right, int bottom);
+
+ private synchronized native int nativeAttachAndQueueImage(long nativeCtx,
+ long imageNativeBuffer, int imageFormat, long timestampNs, int left,
+ int top, int right, int bottom);
+
+ private synchronized native void cancelImage(long nativeCtx, Image image);
+
+ /**
+ * We use a class initializer to allow the native code to cache some field
+ * offsets.
+ */
+ private static native void nativeClassInit();
+
+ static {
+ System.loadLibrary("media_jni");
+ nativeClassInit();
+ }
+}
diff --git a/android/media/JetPlayer.java b/android/media/JetPlayer.java
new file mode 100644
index 00000000..7735e785
--- /dev/null
+++ b/android/media/JetPlayer.java
@@ -0,0 +1,590 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+
+import java.io.FileDescriptor;
+import java.lang.ref.WeakReference;
+import java.lang.CloneNotSupportedException;
+
+import android.content.res.AssetFileDescriptor;
+import android.os.Looper;
+import android.os.Handler;
+import android.os.Message;
+import android.util.AndroidRuntimeException;
+import android.util.Log;
+
+/**
+ * JetPlayer provides access to JET content playback and control.
+ *
+ * <p>Please refer to the JET Creator User Manual for a presentation of the JET interactive
+ * music concept and how to use the JetCreator tool to create content to be player by JetPlayer.
+ *
+ * <p>Use of the JetPlayer class is based around the playback of a number of JET segments
+ * sequentially added to a playback FIFO queue. The rendering of the MIDI content stored in each
+ * segment can be dynamically affected by two mechanisms:
+ * <ul>
+ * <li>tracks in a segment can be muted or unmuted at any moment, individually or through
+ * a mask (to change the mute state of multiple tracks at once)</li>
+ * <li>parts of tracks in a segment can be played at predefined points in the segment, in order
+ * to maintain synchronization with the other tracks in the segment. This is achieved through
+ * the notion of "clips", which can be triggered at any time, but that will play only at the
+ * right time, as authored in the corresponding JET file.</li>
+ * </ul>
+ * As a result of the rendering and playback of the JET segments, the user of the JetPlayer instance
+ * can receive notifications from the JET engine relative to:
+ * <ul>
+ * <li>the playback state,</li>
+ * <li>the number of segments left to play in the queue,</li>
+ * <li>application controller events (CC80-83) to mark points in the MIDI segments.</li>
+ * </ul>
+ * Use {@link #getJetPlayer()} to construct a JetPlayer instance. JetPlayer is a singleton class.
+ * </p>
+ *
+ * <div class="special reference">
+ * <h3>Developer Guides</h3>
+ * <p>For more information about how to use JetPlayer, read the
+ * <a href="{@docRoot}guide/topics/media/jetplayer.html">JetPlayer</a> developer guide.</p></div>
+ */
+public class JetPlayer
+{
+ //--------------------------------------------
+ // Constants
+ //------------------------
+ /**
+ * The maximum number of simultaneous tracks. Use {@link #getMaxTracks()} to
+ * access this value.
+ */
+ private static int MAXTRACKS = 32;
+
+ // to keep in sync with the JetPlayer class constants
+ // defined in frameworks/base/include/media/JetPlayer.h
+ private static final int JET_EVENT = 1;
+ private static final int JET_USERID_UPDATE = 2;
+ private static final int JET_NUMQUEUEDSEGMENT_UPDATE = 3;
+ private static final int JET_PAUSE_UPDATE = 4;
+
+ // to keep in sync with external/sonivox/arm-wt-22k/lib_src/jet_data.h
+ // Encoding of event information on 32 bits
+ private static final int JET_EVENT_VAL_MASK = 0x0000007f; // mask for value
+ private static final int JET_EVENT_CTRL_MASK = 0x00003f80; // mask for controller
+ private static final int JET_EVENT_CHAN_MASK = 0x0003c000; // mask for channel
+ private static final int JET_EVENT_TRACK_MASK = 0x00fc0000; // mask for track number
+ private static final int JET_EVENT_SEG_MASK = 0xff000000; // mask for segment ID
+ private static final int JET_EVENT_CTRL_SHIFT = 7; // shift to get controller number to bit 0
+ private static final int JET_EVENT_CHAN_SHIFT = 14; // shift to get MIDI channel to bit 0
+ private static final int JET_EVENT_TRACK_SHIFT = 18; // shift to get track ID to bit 0
+ private static final int JET_EVENT_SEG_SHIFT = 24; // shift to get segment ID to bit 0
+
+ // to keep in sync with values used in external/sonivox/arm-wt-22k/Android.mk
+ // Jet rendering audio parameters
+ private static final int JET_OUTPUT_RATE = 22050; // _SAMPLE_RATE_22050 in Android.mk
+ private static final int JET_OUTPUT_CHANNEL_CONFIG =
+ AudioFormat.CHANNEL_OUT_STEREO; // NUM_OUTPUT_CHANNELS=2 in Android.mk
+
+
+ //--------------------------------------------
+ // Member variables
+ //------------------------
+ /**
+ * Handler for jet events and status updates coming from the native code
+ */
+ private NativeEventHandler mEventHandler = null;
+
+ /**
+ * Looper associated with the thread that creates the AudioTrack instance
+ */
+ private Looper mInitializationLooper = null;
+
+ /**
+ * Lock to protect the event listener updates against event notifications
+ */
+ private final Object mEventListenerLock = new Object();
+
+ private OnJetEventListener mJetEventListener = null;
+
+ private static JetPlayer singletonRef;
+
+
+ //--------------------------------
+ // Used exclusively by native code
+ //--------------------
+ /**
+ * Accessed by native methods: provides access to C++ JetPlayer object
+ */
+ @SuppressWarnings("unused")
+ private long mNativePlayerInJavaObj;
+
+
+ //--------------------------------------------
+ // Constructor, finalize
+ //------------------------
+ /**
+ * Factory method for the JetPlayer class.
+ * @return the singleton JetPlayer instance
+ */
+ public static JetPlayer getJetPlayer() {
+ if (singletonRef == null) {
+ singletonRef = new JetPlayer();
+ }
+ return singletonRef;
+ }
+
+ /**
+ * Cloning a JetPlayer instance is not supported. Calling clone() will generate an exception.
+ */
+ public Object clone() throws CloneNotSupportedException {
+ // JetPlayer is a singleton class,
+ // so you can't clone a JetPlayer instance
+ throw new CloneNotSupportedException();
+ }
+
+
+ private JetPlayer() {
+
+ // remember which looper is associated with the JetPlayer instanciation
+ if ((mInitializationLooper = Looper.myLooper()) == null) {
+ mInitializationLooper = Looper.getMainLooper();
+ }
+
+ int buffSizeInBytes = AudioTrack.getMinBufferSize(JET_OUTPUT_RATE,
+ JET_OUTPUT_CHANNEL_CONFIG, AudioFormat.ENCODING_PCM_16BIT);
+
+ if ((buffSizeInBytes != AudioTrack.ERROR)
+ && (buffSizeInBytes != AudioTrack.ERROR_BAD_VALUE)) {
+
+ native_setup(new WeakReference<JetPlayer>(this),
+ JetPlayer.getMaxTracks(),
+ // bytes to frame conversion:
+ // 1200 == minimum buffer size in frames on generation 1 hardware
+ Math.max(1200, buffSizeInBytes /
+ (AudioFormat.getBytesPerSample(AudioFormat.ENCODING_PCM_16BIT) *
+ 2 /*channels*/)));
+ }
+ }
+
+
+ protected void finalize() {
+ native_finalize();
+ }
+
+
+ /**
+ * Stops the current JET playback, and releases all associated native resources.
+ * The object can no longer be used and the reference should be set to null
+ * after a call to release().
+ */
+ public void release() {
+ native_release();
+ singletonRef = null;
+ }
+
+
+ //--------------------------------------------
+ // Getters
+ //------------------------
+ /**
+ * Returns the maximum number of simultaneous MIDI tracks supported by JetPlayer
+ */
+ public static int getMaxTracks() {
+ return JetPlayer.MAXTRACKS;
+ }
+
+
+ //--------------------------------------------
+ // Jet functionality
+ //------------------------
+ /**
+ * Loads a .jet file from a given path.
+ * @param path the path to the .jet file, for instance "/sdcard/mygame/music.jet".
+ * @return true if loading the .jet file was successful, false if loading failed.
+ */
+ public boolean loadJetFile(String path) {
+ return native_loadJetFromFile(path);
+ }
+
+
+ /**
+ * Loads a .jet file from an asset file descriptor.
+ * @param afd the asset file descriptor.
+ * @return true if loading the .jet file was successful, false if loading failed.
+ */
+ public boolean loadJetFile(AssetFileDescriptor afd) {
+ long len = afd.getLength();
+ if (len < 0) {
+ throw new AndroidRuntimeException("no length for fd");
+ }
+ return native_loadJetFromFileD(
+ afd.getFileDescriptor(), afd.getStartOffset(), len);
+ }
+
+ /**
+ * Closes the resource containing the JET content.
+ * @return true if successfully closed, false otherwise.
+ */
+ public boolean closeJetFile() {
+ return native_closeJetFile();
+ }
+
+
+ /**
+ * Starts playing the JET segment queue.
+ * @return true if rendering and playback is successfully started, false otherwise.
+ */
+ public boolean play() {
+ return native_playJet();
+ }
+
+
+ /**
+ * Pauses the playback of the JET segment queue.
+ * @return true if rendering and playback is successfully paused, false otherwise.
+ */
+ public boolean pause() {
+ return native_pauseJet();
+ }
+
+
+ /**
+ * Queues the specified segment in the JET queue.
+ * @param segmentNum the identifier of the segment.
+ * @param libNum the index of the sound bank associated with the segment. Use -1 to indicate
+ * that no sound bank (DLS file) is associated with this segment, in which case JET will use
+ * the General MIDI library.
+ * @param repeatCount the number of times the segment will be repeated. 0 means the segment will
+ * only play once. -1 means the segment will repeat indefinitely.
+ * @param transpose the amount of pitch transposition. Set to 0 for normal playback.
+ * Range is -12 to +12.
+ * @param muteFlags a bitmask to specify which MIDI tracks will be muted during playback. Bit 0
+ * affects track 0, bit 1 affects track 1 etc.
+ * @param userID a value specified by the application that uniquely identifies the segment.
+ * this value is received in the
+ * {@link OnJetEventListener#onJetUserIdUpdate(JetPlayer, int, int)} event listener method.
+ * Normally, the application will keep a byte value that is incremented each time a new
+ * segment is queued up. This can be used to look up any special characteristics of that
+ * track including trigger clips and mute flags.
+ * @return true if the segment was successfully queued, false if the queue is full or if the
+ * parameters are invalid.
+ */
+ public boolean queueJetSegment(int segmentNum, int libNum, int repeatCount,
+ int transpose, int muteFlags, byte userID) {
+ return native_queueJetSegment(segmentNum, libNum, repeatCount,
+ transpose, muteFlags, userID);
+ }
+
+
+ /**
+ * Queues the specified segment in the JET queue.
+ * @param segmentNum the identifier of the segment.
+ * @param libNum the index of the soundbank associated with the segment. Use -1 to indicate that
+ * no sound bank (DLS file) is associated with this segment, in which case JET will use
+ * the General MIDI library.
+ * @param repeatCount the number of times the segment will be repeated. 0 means the segment will
+ * only play once. -1 means the segment will repeat indefinitely.
+ * @param transpose the amount of pitch transposition. Set to 0 for normal playback.
+ * Range is -12 to +12.
+ * @param muteArray an array of booleans to specify which MIDI tracks will be muted during
+ * playback. The value at index 0 affects track 0, value at index 1 affects track 1 etc.
+ * The length of the array must be {@link #getMaxTracks()} for the call to succeed.
+ * @param userID a value specified by the application that uniquely identifies the segment.
+ * this value is received in the
+ * {@link OnJetEventListener#onJetUserIdUpdate(JetPlayer, int, int)} event listener method.
+ * Normally, the application will keep a byte value that is incremented each time a new
+ * segment is queued up. This can be used to look up any special characteristics of that
+ * track including trigger clips and mute flags.
+ * @return true if the segment was successfully queued, false if the queue is full or if the
+ * parameters are invalid.
+ */
+ public boolean queueJetSegmentMuteArray(int segmentNum, int libNum, int repeatCount,
+ int transpose, boolean[] muteArray, byte userID) {
+ if (muteArray.length != JetPlayer.getMaxTracks()) {
+ return false;
+ }
+ return native_queueJetSegmentMuteArray(segmentNum, libNum, repeatCount,
+ transpose, muteArray, userID);
+ }
+
+
+ /**
+ * Modifies the mute flags.
+ * @param muteFlags a bitmask to specify which MIDI tracks are muted. Bit 0 affects track 0,
+ * bit 1 affects track 1 etc.
+ * @param sync if false, the new mute flags will be applied as soon as possible by the JET
+ * render and playback engine. If true, the mute flags will be updated at the start of the
+ * next segment. If the segment is repeated, the flags will take effect the next time
+ * segment is repeated.
+ * @return true if the mute flags were successfully updated, false otherwise.
+ */
+ public boolean setMuteFlags(int muteFlags, boolean sync) {
+ return native_setMuteFlags(muteFlags, sync);
+ }
+
+
+ /**
+ * Modifies the mute flags for the current active segment.
+ * @param muteArray an array of booleans to specify which MIDI tracks are muted. The value at
+ * index 0 affects track 0, value at index 1 affects track 1 etc.
+ * The length of the array must be {@link #getMaxTracks()} for the call to succeed.
+ * @param sync if false, the new mute flags will be applied as soon as possible by the JET
+ * render and playback engine. If true, the mute flags will be updated at the start of the
+ * next segment. If the segment is repeated, the flags will take effect the next time
+ * segment is repeated.
+ * @return true if the mute flags were successfully updated, false otherwise.
+ */
+ public boolean setMuteArray(boolean[] muteArray, boolean sync) {
+ if(muteArray.length != JetPlayer.getMaxTracks())
+ return false;
+ return native_setMuteArray(muteArray, sync);
+ }
+
+
+ /**
+ * Mutes or unmutes a single track.
+ * @param trackId the index of the track to mute.
+ * @param muteFlag set to true to mute, false to unmute.
+ * @param sync if false, the new mute flags will be applied as soon as possible by the JET
+ * render and playback engine. If true, the mute flag will be updated at the start of the
+ * next segment. If the segment is repeated, the flag will take effect the next time
+ * segment is repeated.
+ * @return true if the mute flag was successfully updated, false otherwise.
+ */
+ public boolean setMuteFlag(int trackId, boolean muteFlag, boolean sync) {
+ return native_setMuteFlag(trackId, muteFlag, sync);
+ }
+
+
+ /**
+ * Schedules the playback of a clip.
+ * This will automatically update the mute flags in sync with the JET Clip Marker (controller
+ * 103). The parameter clipID must be in the range of 0-63. After the call to triggerClip, when
+ * JET next encounters a controller event 103 with bits 0-5 of the value equal to clipID and
+ * bit 6 set to 1, it will automatically unmute the track containing the controller event.
+ * When JET encounters the complementary controller event 103 with bits 0-5 of the value equal
+ * to clipID and bit 6 set to 0, it will mute the track again.
+ * @param clipId the identifier of the clip to trigger.
+ * @return true if the clip was successfully triggered, false otherwise.
+ */
+ public boolean triggerClip(int clipId) {
+ return native_triggerClip(clipId);
+ }
+
+
+ /**
+ * Empties the segment queue, and clears all clips that are scheduled for playback.
+ * @return true if the queue was successfully cleared, false otherwise.
+ */
+ public boolean clearQueue() {
+ return native_clearQueue();
+ }
+
+
+ //---------------------------------------------------------
+ // Internal class to handle events posted from native code
+ //------------------------
+ private class NativeEventHandler extends Handler
+ {
+ private JetPlayer mJet;
+
+ public NativeEventHandler(JetPlayer jet, Looper looper) {
+ super(looper);
+ mJet = jet;
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ OnJetEventListener listener = null;
+ synchronized (mEventListenerLock) {
+ listener = mJet.mJetEventListener;
+ }
+ switch(msg.what) {
+ case JET_EVENT:
+ if (listener != null) {
+ // call the appropriate listener after decoding the event parameters
+ // encoded in msg.arg1
+ mJetEventListener.onJetEvent(
+ mJet,
+ (short)((msg.arg1 & JET_EVENT_SEG_MASK) >> JET_EVENT_SEG_SHIFT),
+ (byte) ((msg.arg1 & JET_EVENT_TRACK_MASK) >> JET_EVENT_TRACK_SHIFT),
+ // JETCreator channel numbers start at 1, but the index starts at 0
+ // in the .jet files
+ (byte)(((msg.arg1 & JET_EVENT_CHAN_MASK) >> JET_EVENT_CHAN_SHIFT) + 1),
+ (byte) ((msg.arg1 & JET_EVENT_CTRL_MASK) >> JET_EVENT_CTRL_SHIFT),
+ (byte) (msg.arg1 & JET_EVENT_VAL_MASK) );
+ }
+ return;
+ case JET_USERID_UPDATE:
+ if (listener != null) {
+ listener.onJetUserIdUpdate(mJet, msg.arg1, msg.arg2);
+ }
+ return;
+ case JET_NUMQUEUEDSEGMENT_UPDATE:
+ if (listener != null) {
+ listener.onJetNumQueuedSegmentUpdate(mJet, msg.arg1);
+ }
+ return;
+ case JET_PAUSE_UPDATE:
+ if (listener != null)
+ listener.onJetPauseUpdate(mJet, msg.arg1);
+ return;
+
+ default:
+ loge("Unknown message type " + msg.what);
+ return;
+ }
+ }
+ }
+
+
+ //--------------------------------------------
+ // Jet event listener
+ //------------------------
+ /**
+ * Sets the listener JetPlayer notifies when a JET event is generated by the rendering and
+ * playback engine.
+ * Notifications will be received in the same thread as the one in which the JetPlayer
+ * instance was created.
+ * @param listener
+ */
+ public void setEventListener(OnJetEventListener listener) {
+ setEventListener(listener, null);
+ }
+
+ /**
+ * Sets the listener JetPlayer notifies when a JET event is generated by the rendering and
+ * playback engine.
+ * Use this method to receive JET events in the Handler associated with another
+ * thread than the one in which you created the JetPlayer instance.
+ * @param listener
+ * @param handler the Handler that will receive the event notification messages.
+ */
+ public void setEventListener(OnJetEventListener listener, Handler handler) {
+ synchronized(mEventListenerLock) {
+
+ mJetEventListener = listener;
+
+ if (listener != null) {
+ if (handler != null) {
+ mEventHandler = new NativeEventHandler(this, handler.getLooper());
+ } else {
+ // no given handler, use the looper the AudioTrack was created in
+ mEventHandler = new NativeEventHandler(this, mInitializationLooper);
+ }
+ } else {
+ mEventHandler = null;
+ }
+
+ }
+ }
+
+
+ /**
+ * Handles the notification when the JET engine generates an event.
+ */
+ public interface OnJetEventListener {
+ /**
+ * Callback for when the JET engine generates a new event.
+ *
+ * @param player the JET player the event is coming from
+ * @param segment 8 bit unsigned value
+ * @param track 6 bit unsigned value
+ * @param channel 4 bit unsigned value
+ * @param controller 7 bit unsigned value
+ * @param value 7 bit unsigned value
+ */
+ void onJetEvent(JetPlayer player,
+ short segment, byte track, byte channel, byte controller, byte value);
+ /**
+ * Callback for when JET's currently playing segment's userID is updated.
+ *
+ * @param player the JET player the status update is coming from
+ * @param userId the ID of the currently playing segment
+ * @param repeatCount the repetition count for the segment (0 means it plays once)
+ */
+ void onJetUserIdUpdate(JetPlayer player, int userId, int repeatCount);
+
+ /**
+ * Callback for when JET's number of queued segments is updated.
+ *
+ * @param player the JET player the status update is coming from
+ * @param nbSegments the number of segments in the JET queue
+ */
+ void onJetNumQueuedSegmentUpdate(JetPlayer player, int nbSegments);
+
+ /**
+ * Callback for when JET pause state is updated.
+ *
+ * @param player the JET player the status update is coming from
+ * @param paused indicates whether JET is paused (1) or not (0)
+ */
+ void onJetPauseUpdate(JetPlayer player, int paused);
+ }
+
+
+ //--------------------------------------------
+ // Native methods
+ //------------------------
+ private native final boolean native_setup(Object Jet_this,
+ int maxTracks, int trackBufferSize);
+ private native final void native_finalize();
+ private native final void native_release();
+ private native final boolean native_loadJetFromFile(String pathToJetFile);
+ private native final boolean native_loadJetFromFileD(FileDescriptor fd, long offset, long len);
+ private native final boolean native_closeJetFile();
+ private native final boolean native_playJet();
+ private native final boolean native_pauseJet();
+ private native final boolean native_queueJetSegment(int segmentNum, int libNum,
+ int repeatCount, int transpose, int muteFlags, byte userID);
+ private native final boolean native_queueJetSegmentMuteArray(int segmentNum, int libNum,
+ int repeatCount, int transpose, boolean[] muteArray, byte userID);
+ private native final boolean native_setMuteFlags(int muteFlags, boolean sync);
+ private native final boolean native_setMuteArray(boolean[]muteArray, boolean sync);
+ private native final boolean native_setMuteFlag(int trackId, boolean muteFlag, boolean sync);
+ private native final boolean native_triggerClip(int clipId);
+ private native final boolean native_clearQueue();
+
+ //---------------------------------------------------------
+ // Called exclusively by native code
+ //--------------------
+ @SuppressWarnings("unused")
+ private static void postEventFromNative(Object jetplayer_ref,
+ int what, int arg1, int arg2) {
+ //logd("Event posted from the native side: event="+ what + " args="+ arg1+" "+arg2);
+ JetPlayer jet = (JetPlayer)((WeakReference)jetplayer_ref).get();
+
+ if ((jet != null) && (jet.mEventHandler != null)) {
+ Message m =
+ jet.mEventHandler.obtainMessage(what, arg1, arg2, null);
+ jet.mEventHandler.sendMessage(m);
+ }
+
+ }
+
+
+ //---------------------------------------------------------
+ // Utils
+ //--------------------
+ private final static String TAG = "JetPlayer-J";
+
+ private static void logd(String msg) {
+ Log.d(TAG, "[ android.media.JetPlayer ] " + msg);
+ }
+
+ private static void loge(String msg) {
+ Log.e(TAG, "[ android.media.JetPlayer ] " + msg);
+ }
+
+}
diff --git a/android/media/MediaActionSound.java b/android/media/MediaActionSound.java
new file mode 100644
index 00000000..983ca754
--- /dev/null
+++ b/android/media/MediaActionSound.java
@@ -0,0 +1,289 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.AudioManager;
+import android.media.SoundPool;
+import android.util.Log;
+
+/**
+ * <p>A class for producing sounds that match those produced by various actions
+ * taken by the media and camera APIs. </p>
+ *
+ * <p>This class is recommended for use with the {@link android.hardware.camera2} API, since the
+ * camera2 API does not play any sounds on its own for any capture or video recording actions.</p>
+ *
+ * <p>With the older {@link android.hardware.Camera} API, use this class to play an appropriate
+ * camera operation sound when implementing a custom still or video recording mechanism (through the
+ * Camera preview callbacks with
+ * {@link android.hardware.Camera#setPreviewCallback Camera.setPreviewCallback}, or through GPU
+ * processing with {@link android.hardware.Camera#setPreviewTexture Camera.setPreviewTexture}, for
+ * example), or when implementing some other camera-like function in your application.</p>
+ *
+ * <p>There is no need to play sounds when using
+ * {@link android.hardware.Camera#takePicture Camera.takePicture} or
+ * {@link android.media.MediaRecorder} for still images or video, respectively,
+ * as the Android framework will play the appropriate sounds when needed for
+ * these calls.</p>
+ *
+ */
+public class MediaActionSound {
+ private static final int NUM_MEDIA_SOUND_STREAMS = 1;
+
+ private SoundPool mSoundPool;
+ private SoundState[] mSounds;
+
+ private static final String[] SOUND_FILES = {
+ "/system/media/audio/ui/camera_click.ogg",
+ "/system/media/audio/ui/camera_focus.ogg",
+ "/system/media/audio/ui/VideoRecord.ogg",
+ "/system/media/audio/ui/VideoStop.ogg"
+ };
+
+ private static final String TAG = "MediaActionSound";
+ /**
+ * The sound used by
+ * {@link android.hardware.Camera#takePicture Camera.takePicture} to
+ * indicate still image capture.
+ * @see #play
+ */
+ public static final int SHUTTER_CLICK = 0;
+
+ /**
+ * A sound to indicate that focusing has completed. Because deciding
+ * when this occurs is application-dependent, this sound is not used by
+ * any methods in the media or camera APIs.
+ * @see #play
+ */
+ public static final int FOCUS_COMPLETE = 1;
+
+ /**
+ * The sound used by
+ * {@link android.media.MediaRecorder#start MediaRecorder.start()} to
+ * indicate the start of video recording.
+ * @see #play
+ */
+ public static final int START_VIDEO_RECORDING = 2;
+
+ /**
+ * The sound used by
+ * {@link android.media.MediaRecorder#stop MediaRecorder.stop()} to
+ * indicate the end of video recording.
+ * @see #play
+ */
+ public static final int STOP_VIDEO_RECORDING = 3;
+
+ /**
+ * States for SoundState.
+ * STATE_NOT_LOADED : sample not loaded
+ * STATE_LOADING : sample being loaded: waiting for load completion callback
+ * STATE_LOADING_PLAY_REQUESTED : sample being loaded and playback request received
+ * STATE_LOADED : sample loaded, ready for playback
+ */
+ private static final int STATE_NOT_LOADED = 0;
+ private static final int STATE_LOADING = 1;
+ private static final int STATE_LOADING_PLAY_REQUESTED = 2;
+ private static final int STATE_LOADED = 3;
+
+ private class SoundState {
+ public final int name;
+ public int id;
+ public int state;
+
+ public SoundState(int name) {
+ this.name = name;
+ id = 0; // 0 is an invalid sample ID.
+ state = STATE_NOT_LOADED;
+ }
+ }
+ /**
+ * Construct a new MediaActionSound instance. Only a single instance is
+ * needed for playing any platform media action sound; you do not need a
+ * separate instance for each sound type.
+ */
+ public MediaActionSound() {
+ mSoundPool = new SoundPool.Builder()
+ .setMaxStreams(NUM_MEDIA_SOUND_STREAMS)
+ .setAudioAttributes(new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_ASSISTANCE_SONIFICATION)
+ .setFlags(AudioAttributes.FLAG_AUDIBILITY_ENFORCED)
+ .setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION)
+ .build())
+ .build();
+ mSoundPool.setOnLoadCompleteListener(mLoadCompleteListener);
+ mSounds = new SoundState[SOUND_FILES.length];
+ for (int i = 0; i < mSounds.length; i++) {
+ mSounds[i] = new SoundState(i);
+ }
+ }
+
+ private int loadSound(SoundState sound) {
+ int id = mSoundPool.load(SOUND_FILES[sound.name], 1);
+ if (id > 0) {
+ sound.state = STATE_LOADING;
+ sound.id = id;
+ }
+ return id;
+ }
+
+ /**
+ * Preload a predefined platform sound to minimize latency when the sound is
+ * played later by {@link #play}.
+ * @param soundName The type of sound to preload, selected from
+ * SHUTTER_CLICK, FOCUS_COMPLETE, START_VIDEO_RECORDING, or
+ * STOP_VIDEO_RECORDING.
+ * @see #play
+ * @see #SHUTTER_CLICK
+ * @see #FOCUS_COMPLETE
+ * @see #START_VIDEO_RECORDING
+ * @see #STOP_VIDEO_RECORDING
+ */
+ public void load(int soundName) {
+ if (soundName < 0 || soundName >= SOUND_FILES.length) {
+ throw new RuntimeException("Unknown sound requested: " + soundName);
+ }
+ SoundState sound = mSounds[soundName];
+ synchronized (sound) {
+ switch (sound.state) {
+ case STATE_NOT_LOADED:
+ if (loadSound(sound) <= 0) {
+ Log.e(TAG, "load() error loading sound: " + soundName);
+ }
+ break;
+ default:
+ Log.e(TAG, "load() called in wrong state: " + sound + " for sound: "+ soundName);
+ break;
+ }
+ }
+ }
+
+ /**
+ * <p>Play one of the predefined platform sounds for media actions.</p>
+ *
+ * <p>Use this method to play a platform-specific sound for various media
+ * actions. The sound playback is done asynchronously, with the same
+ * behavior and content as the sounds played by
+ * {@link android.hardware.Camera#takePicture Camera.takePicture},
+ * {@link android.media.MediaRecorder#start MediaRecorder.start}, and
+ * {@link android.media.MediaRecorder#stop MediaRecorder.stop}.</p>
+ *
+ * <p>With the {@link android.hardware.camera2 camera2} API, this method can be used to play
+ * standard camera operation sounds with the appropriate system behavior for such sounds.</p>
+
+ * <p>With the older {@link android.hardware.Camera} API, using this method makes it easy to
+ * match the default device sounds when recording or capturing data through the preview
+ * callbacks, or when implementing custom camera-like features in your application.</p>
+ *
+ * <p>If the sound has not been loaded by {@link #load} before calling play,
+ * play will load the sound at the cost of some additional latency before
+ * sound playback begins. </p>
+ *
+ * @param soundName The type of sound to play, selected from
+ * SHUTTER_CLICK, FOCUS_COMPLETE, START_VIDEO_RECORDING, or
+ * STOP_VIDEO_RECORDING.
+ * @see android.hardware.Camera#takePicture
+ * @see android.media.MediaRecorder
+ * @see #SHUTTER_CLICK
+ * @see #FOCUS_COMPLETE
+ * @see #START_VIDEO_RECORDING
+ * @see #STOP_VIDEO_RECORDING
+ */
+ public void play(int soundName) {
+ if (soundName < 0 || soundName >= SOUND_FILES.length) {
+ throw new RuntimeException("Unknown sound requested: " + soundName);
+ }
+ SoundState sound = mSounds[soundName];
+ synchronized (sound) {
+ switch (sound.state) {
+ case STATE_NOT_LOADED:
+ loadSound(sound);
+ if (loadSound(sound) <= 0) {
+ Log.e(TAG, "play() error loading sound: " + soundName);
+ break;
+ }
+ // FALL THROUGH
+
+ case STATE_LOADING:
+ sound.state = STATE_LOADING_PLAY_REQUESTED;
+ break;
+ case STATE_LOADED:
+ mSoundPool.play(sound.id, 1.0f, 1.0f, 0, 0, 1.0f);
+ break;
+ default:
+ Log.e(TAG, "play() called in wrong state: " + sound.state + " for sound: "+ soundName);
+ break;
+ }
+ }
+ }
+
+ private SoundPool.OnLoadCompleteListener mLoadCompleteListener =
+ new SoundPool.OnLoadCompleteListener() {
+ public void onLoadComplete(SoundPool soundPool,
+ int sampleId, int status) {
+ for (SoundState sound : mSounds) {
+ if (sound.id != sampleId) {
+ continue;
+ }
+ int playSoundId = 0;
+ synchronized (sound) {
+ if (status != 0) {
+ sound.state = STATE_NOT_LOADED;
+ sound.id = 0;
+ Log.e(TAG, "OnLoadCompleteListener() error: " + status +
+ " loading sound: "+ sound.name);
+ return;
+ }
+ switch (sound.state) {
+ case STATE_LOADING:
+ sound.state = STATE_LOADED;
+ break;
+ case STATE_LOADING_PLAY_REQUESTED:
+ playSoundId = sound.id;
+ sound.state = STATE_LOADED;
+ break;
+ default:
+ Log.e(TAG, "OnLoadCompleteListener() called in wrong state: "
+ + sound.state + " for sound: "+ sound.name);
+ break;
+ }
+ }
+ if (playSoundId != 0) {
+ soundPool.play(playSoundId, 1.0f, 1.0f, 0, 0, 1.0f);
+ }
+ break;
+ }
+ }
+ };
+
+ /**
+ * Free up all audio resources used by this MediaActionSound instance. Do
+ * not call any other methods on a MediaActionSound instance after calling
+ * release().
+ */
+ public void release() {
+ if (mSoundPool != null) {
+ for (SoundState sound : mSounds) {
+ synchronized (sound) {
+ sound.state = STATE_NOT_LOADED;
+ sound.id = 0;
+ }
+ }
+ mSoundPool.release();
+ mSoundPool = null;
+ }
+ }
+}
diff --git a/android/media/MediaCas.java b/android/media/MediaCas.java
new file mode 100644
index 00000000..12352e7f
--- /dev/null
+++ b/android/media/MediaCas.java
@@ -0,0 +1,606 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.hardware.cas.V1_0.*;
+import android.media.MediaCasException.*;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.IHwBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.Process;
+import android.os.RemoteException;
+import android.util.Log;
+import android.util.Singleton;
+
+import java.util.ArrayList;
+
+/**
+ * MediaCas can be used to obtain keys for descrambling protected media streams, in
+ * conjunction with {@link android.media.MediaDescrambler}. The MediaCas APIs are
+ * designed to support conditional access such as those in the ISO/IEC13818-1.
+ * The CA system is identified by a 16-bit integer CA_system_id. The scrambling
+ * algorithms are usually proprietary and implemented by vendor-specific CA plugins
+ * installed on the device.
+ * <p>
+ * The app is responsible for constructing a MediaCas object for the CA system it
+ * intends to use. The app can query if a certain CA system is supported using static
+ * method {@link #isSystemIdSupported}. It can also obtain the entire list of supported
+ * CA systems using static method {@link #enumeratePlugins}.
+ * <p>
+ * Once the MediaCas object is constructed, the app should properly provision it by
+ * using method {@link #provision} and/or {@link #processEmm}. The EMMs (Entitlement
+ * management messages) can be distributed out-of-band, or in-band with the stream.
+ * <p>
+ * To descramble elementary streams, the app first calls {@link #openSession} to
+ * generate a {@link Session} object that will uniquely identify a session. A session
+ * provides a context for subsequent key updates and descrambling activities. The ECMs
+ * (Entitlement control messages) are sent to the session via method
+ * {@link Session#processEcm}.
+ * <p>
+ * The app next constructs a MediaDescrambler object, and initializes it with the
+ * session using {@link MediaDescrambler#setMediaCasSession}. This ties the
+ * descrambler to the session, and the descrambler can then be used to descramble
+ * content secured with the session's key, either during extraction, or during decoding
+ * with {@link android.media.MediaCodec}.
+ * <p>
+ * If the app handles sample extraction using its own extractor, it can use
+ * MediaDescrambler to descramble samples into clear buffers (if the session's license
+ * doesn't require secure decoders), or descramble a small amount of data to retrieve
+ * information necessary for the downstream pipeline to process the sample (if the
+ * session's license requires secure decoders).
+ * <p>
+ * If the session requires a secure decoder, a MediaDescrambler needs to be provided to
+ * MediaCodec to descramble samples queued by {@link MediaCodec#queueSecureInputBuffer}
+ * into protected buffers. The app should use {@link MediaCodec#configure(MediaFormat,
+ * android.view.Surface, int, MediaDescrambler)} instead of the normal {@link
+ * MediaCodec#configure(MediaFormat, android.view.Surface, MediaCrypto, int)} method
+ * to configure MediaCodec.
+ * <p>
+ * <h3>Using Android's MediaExtractor</h3>
+ * <p>
+ * If the app uses {@link MediaExtractor}, it can delegate the CAS session
+ * management to MediaExtractor by calling {@link MediaExtractor#setMediaCas}.
+ * MediaExtractor will take over and call {@link #openSession}, {@link #processEmm}
+ * and/or {@link Session#processEcm}, etc.. if necessary.
+ * <p>
+ * When using {@link MediaExtractor}, the app would still need a MediaDescrambler
+ * to use with {@link MediaCodec} if the licensing requires a secure decoder. The
+ * session associated with the descrambler of a track can be retrieved by calling
+ * {@link MediaExtractor#getCasInfo}, and used to initialize a MediaDescrambler
+ * object for MediaCodec.
+ * <p>
+ * <h3>Listeners</h3>
+ * <p>The app may register a listener to receive events from the CA system using
+ * method {@link #setEventListener}. The exact format of the event is scheme-specific
+ * and is not specified by this API.
+ */
+public final class MediaCas implements AutoCloseable {
+ private static final String TAG = "MediaCas";
+ private ICas mICas;
+ private EventListener mListener;
+ private HandlerThread mHandlerThread;
+ private EventHandler mEventHandler;
+
+ private static final Singleton<IMediaCasService> gDefault =
+ new Singleton<IMediaCasService>() {
+ @Override
+ protected IMediaCasService create() {
+ try {
+ return IMediaCasService.getService();
+ } catch (RemoteException e) {}
+ return null;
+ }
+ };
+
+ static IMediaCasService getService() {
+ return gDefault.get();
+ }
+
+ private void validateInternalStates() {
+ if (mICas == null) {
+ throw new IllegalStateException();
+ }
+ }
+
+ private void cleanupAndRethrowIllegalState() {
+ mICas = null;
+ throw new IllegalStateException();
+ }
+
+ private class EventHandler extends Handler
+ {
+ private static final int MSG_CAS_EVENT = 0;
+
+ public EventHandler(Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (msg.what == MSG_CAS_EVENT) {
+ mListener.onEvent(MediaCas.this, msg.arg1, msg.arg2,
+ toBytes((ArrayList<Byte>) msg.obj));
+ }
+ }
+ }
+
+ private final ICasListener.Stub mBinder = new ICasListener.Stub() {
+ @Override
+ public void onEvent(int event, int arg, @Nullable ArrayList<Byte> data)
+ throws RemoteException {
+ mEventHandler.sendMessage(mEventHandler.obtainMessage(
+ EventHandler.MSG_CAS_EVENT, event, arg, data));
+ }
+ };
+
+ /**
+ * Describe a CAS plugin with its CA_system_ID and string name.
+ *
+ * Returned as results of {@link #enumeratePlugins}.
+ *
+ */
+ public static class PluginDescriptor {
+ private final int mCASystemId;
+ private final String mName;
+
+ private PluginDescriptor() {
+ mCASystemId = 0xffff;
+ mName = null;
+ }
+
+ PluginDescriptor(@NonNull HidlCasPluginDescriptor descriptor) {
+ mCASystemId = descriptor.caSystemId;
+ mName = descriptor.name;
+ }
+
+ public int getSystemId() {
+ return mCASystemId;
+ }
+
+ @NonNull
+ public String getName() {
+ return mName;
+ }
+
+ @Override
+ public String toString() {
+ return "PluginDescriptor {" + mCASystemId + ", " + mName + "}";
+ }
+ }
+
+ private ArrayList<Byte> toByteArray(@NonNull byte[] data, int offset, int length) {
+ ArrayList<Byte> byteArray = new ArrayList<Byte>(length);
+ for (int i = 0; i < length; i++) {
+ byteArray.add(Byte.valueOf(data[offset + i]));
+ }
+ return byteArray;
+ }
+
+ private ArrayList<Byte> toByteArray(@Nullable byte[] data) {
+ if (data == null) {
+ return new ArrayList<Byte>();
+ }
+ return toByteArray(data, 0, data.length);
+ }
+
+ private byte[] toBytes(@NonNull ArrayList<Byte> byteArray) {
+ byte[] data = null;
+ if (byteArray != null) {
+ data = new byte[byteArray.size()];
+ for (int i = 0; i < data.length; i++) {
+ data[i] = byteArray.get(i);
+ }
+ }
+ return data;
+ }
+ /**
+ * Class for an open session with the CA system.
+ */
+ public final class Session implements AutoCloseable {
+ final ArrayList<Byte> mSessionId;
+
+ Session(@NonNull ArrayList<Byte> sessionId) {
+ mSessionId = sessionId;
+ }
+
+ /**
+ * Set the private data for a session.
+ *
+ * @param data byte array of the private data.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void setPrivateData(@NonNull byte[] data)
+ throws MediaCasException {
+ validateInternalStates();
+
+ try {
+ MediaCasException.throwExceptionIfNeeded(
+ mICas.setSessionPrivateData(mSessionId, toByteArray(data, 0, data.length)));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+
+
+ /**
+ * Send a received ECM packet to the specified session of the CA system.
+ *
+ * @param data byte array of the ECM data.
+ * @param offset position within data where the ECM data begins.
+ * @param length length of the data (starting from offset).
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void processEcm(@NonNull byte[] data, int offset, int length)
+ throws MediaCasException {
+ validateInternalStates();
+
+ try {
+ MediaCasException.throwExceptionIfNeeded(
+ mICas.processEcm(mSessionId, toByteArray(data, offset, length)));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+
+ /**
+ * Send a received ECM packet to the specified session of the CA system.
+ * This is similar to {@link Session#processEcm(byte[], int, int)}
+ * except that the entire byte array is sent.
+ *
+ * @param data byte array of the ECM data.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void processEcm(@NonNull byte[] data) throws MediaCasException {
+ processEcm(data, 0, data.length);
+ }
+
+ /**
+ * Close the session.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ @Override
+ public void close() {
+ validateInternalStates();
+
+ try {
+ MediaCasStateException.throwExceptionIfNeeded(
+ mICas.closeSession(mSessionId));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+ }
+
+ Session createFromSessionId(@NonNull ArrayList<Byte> sessionId) {
+ if (sessionId == null || sessionId.size() == 0) {
+ return null;
+ }
+ return new Session(sessionId);
+ }
+
+ /**
+ * Query if a certain CA system is supported on this device.
+ *
+ * @param CA_system_id the id of the CA system.
+ *
+ * @return Whether the specified CA system is supported on this device.
+ */
+ public static boolean isSystemIdSupported(int CA_system_id) {
+ IMediaCasService service = getService();
+
+ if (service != null) {
+ try {
+ return service.isSystemIdSupported(CA_system_id);
+ } catch (RemoteException e) {
+ }
+ }
+ return false;
+ }
+
+ /**
+ * List all available CA plugins on the device.
+ *
+ * @return an array of descriptors for the available CA plugins.
+ */
+ public static PluginDescriptor[] enumeratePlugins() {
+ IMediaCasService service = getService();
+
+ if (service != null) {
+ try {
+ ArrayList<HidlCasPluginDescriptor> descriptors =
+ service.enumeratePlugins();
+ if (descriptors.size() == 0) {
+ return null;
+ }
+ PluginDescriptor[] results = new PluginDescriptor[descriptors.size()];
+ for (int i = 0; i < results.length; i++) {
+ results[i] = new PluginDescriptor(descriptors.get(i));
+ }
+ return results;
+ } catch (RemoteException e) {
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Instantiate a CA system of the specified system id.
+ *
+ * @param CA_system_id The system id of the CA system.
+ *
+ * @throws UnsupportedCasException if the device does not support the
+ * specified CA system.
+ */
+ public MediaCas(int CA_system_id) throws UnsupportedCasException {
+ try {
+ mICas = getService().createPlugin(CA_system_id, mBinder);
+ } catch(Exception e) {
+ Log.e(TAG, "Failed to create plugin: " + e);
+ mICas = null;
+ } finally {
+ if (mICas == null) {
+ throw new UnsupportedCasException(
+ "Unsupported CA_system_id " + CA_system_id);
+ }
+ }
+ }
+
+ IHwBinder getBinder() {
+ validateInternalStates();
+
+ return mICas.asBinder();
+ }
+
+ /**
+ * An interface registered by the caller to {@link #setEventListener}
+ * to receives scheme-specific notifications from a MediaCas instance.
+ */
+ public interface EventListener {
+ /**
+ * Notify the listener of a scheme-specific event from the CA system.
+ *
+ * @param MediaCas the MediaCas object to receive this event.
+ * @param event an integer whose meaning is scheme-specific.
+ * @param arg an integer whose meaning is scheme-specific.
+ * @param data a byte array of data whose format and meaning are
+ * scheme-specific.
+ */
+ void onEvent(MediaCas MediaCas, int event, int arg, @Nullable byte[] data);
+ }
+
+ /**
+ * Set an event listener to receive notifications from the MediaCas instance.
+ *
+ * @param listener the event listener to be set.
+ * @param handler the handler whose looper the event listener will be called on.
+ * If handler is null, we'll try to use current thread's looper, or the main
+ * looper. If neither are available, an internal thread will be created instead.
+ */
+ public void setEventListener(
+ @Nullable EventListener listener, @Nullable Handler handler) {
+ mListener = listener;
+
+ if (mListener == null) {
+ mEventHandler = null;
+ return;
+ }
+
+ Looper looper = (handler != null) ? handler.getLooper() : null;
+ if (looper == null
+ && (looper = Looper.myLooper()) == null
+ && (looper = Looper.getMainLooper()) == null) {
+ if (mHandlerThread == null || !mHandlerThread.isAlive()) {
+ mHandlerThread = new HandlerThread("MediaCasEventThread",
+ Process.THREAD_PRIORITY_FOREGROUND);
+ mHandlerThread.start();
+ }
+ looper = mHandlerThread.getLooper();
+ }
+ mEventHandler = new EventHandler(looper);
+ }
+
+ /**
+ * Send the private data for the CA system.
+ *
+ * @param data byte array of the private data.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void setPrivateData(@NonNull byte[] data) throws MediaCasException {
+ validateInternalStates();
+
+ try {
+ MediaCasException.throwExceptionIfNeeded(
+ mICas.setPrivateData(toByteArray(data, 0, data.length)));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+
+ private class OpenSessionCallback implements ICas.openSessionCallback {
+ public Session mSession;
+ public int mStatus;
+ @Override
+ public void onValues(int status, ArrayList<Byte> sessionId) {
+ mStatus = status;
+ mSession = createFromSessionId(sessionId);
+ }
+ }
+ /**
+ * Open a session to descramble one or more streams scrambled by the
+ * conditional access system.
+ *
+ * @return session the newly opened session.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public Session openSession() throws MediaCasException {
+ validateInternalStates();
+
+ try {
+ OpenSessionCallback cb = new OpenSessionCallback();
+ mICas.openSession(cb);
+ MediaCasException.throwExceptionIfNeeded(cb.mStatus);
+ return cb.mSession;
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ return null;
+ }
+
+ /**
+ * Send a received EMM packet to the CA system.
+ *
+ * @param data byte array of the EMM data.
+ * @param offset position within data where the EMM data begins.
+ * @param length length of the data (starting from offset).
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void processEmm(@NonNull byte[] data, int offset, int length)
+ throws MediaCasException {
+ validateInternalStates();
+
+ try {
+ MediaCasException.throwExceptionIfNeeded(
+ mICas.processEmm(toByteArray(data, offset, length)));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+
+ /**
+ * Send a received EMM packet to the CA system. This is similar to
+ * {@link #processEmm(byte[], int, int)} except that the entire byte
+ * array is sent.
+ *
+ * @param data byte array of the EMM data.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void processEmm(@NonNull byte[] data) throws MediaCasException {
+ processEmm(data, 0, data.length);
+ }
+
+ /**
+ * Send an event to a CA system. The format of the event is scheme-specific
+ * and is opaque to the framework.
+ *
+ * @param event an integer denoting a scheme-specific event to be sent.
+ * @param arg a scheme-specific integer argument for the event.
+ * @param data a byte array containing scheme-specific data for the event.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void sendEvent(int event, int arg, @Nullable byte[] data)
+ throws MediaCasException {
+ validateInternalStates();
+
+ try {
+ MediaCasException.throwExceptionIfNeeded(
+ mICas.sendEvent(event, arg, toByteArray(data)));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+
+ /**
+ * Initiate a provisioning operation for a CA system.
+ *
+ * @param provisionString string containing information needed for the
+ * provisioning operation, the format of which is scheme and implementation
+ * specific.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void provision(@NonNull String provisionString) throws MediaCasException {
+ validateInternalStates();
+
+ try {
+ MediaCasException.throwExceptionIfNeeded(
+ mICas.provision(provisionString));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+
+ /**
+ * Notify the CA system to refresh entitlement keys.
+ *
+ * @param refreshType the type of the refreshment.
+ * @param refreshData private data associated with the refreshment.
+ *
+ * @throws IllegalStateException if the MediaCas instance is not valid.
+ * @throws MediaCasException for CAS-specific errors.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public void refreshEntitlements(int refreshType, @Nullable byte[] refreshData)
+ throws MediaCasException {
+ validateInternalStates();
+
+ try {
+ MediaCasException.throwExceptionIfNeeded(
+ mICas.refreshEntitlements(refreshType, toByteArray(refreshData)));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+
+ @Override
+ public void close() {
+ if (mICas != null) {
+ try {
+ mICas.release();
+ } catch (RemoteException e) {
+ } finally {
+ mICas = null;
+ }
+ }
+ }
+
+ @Override
+ protected void finalize() {
+ close();
+ }
+} \ No newline at end of file
diff --git a/android/media/MediaCasException.java b/android/media/MediaCasException.java
new file mode 100644
index 00000000..35fb104b
--- /dev/null
+++ b/android/media/MediaCasException.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.hardware.cas.V1_0.Status;
+
+/**
+ * Base class for MediaCas exceptions
+ */
+public class MediaCasException extends Exception {
+ private MediaCasException(String detailMessage) {
+ super(detailMessage);
+ }
+
+ static void throwExceptionIfNeeded(int error) throws MediaCasException {
+ if (error == Status.OK) {
+ return;
+ }
+
+ if (error == Status.ERROR_CAS_NOT_PROVISIONED) {
+ throw new NotProvisionedException(null);
+ } else if (error == Status.ERROR_CAS_RESOURCE_BUSY) {
+ throw new ResourceBusyException(null);
+ } else if (error == Status.ERROR_CAS_DEVICE_REVOKED) {
+ throw new DeniedByServerException(null);
+ } else {
+ MediaCasStateException.throwExceptionIfNeeded(error);
+ }
+ }
+
+ /**
+ * Exception thrown when an attempt is made to construct a MediaCas object
+ * using a CA_system_id that is not supported by the device
+ */
+ public static final class UnsupportedCasException extends MediaCasException {
+ /** @hide */
+ public UnsupportedCasException(String detailMessage) {
+ super(detailMessage);
+ }
+ }
+
+ /**
+ * Exception thrown when an operation on a MediaCas object is attempted
+ * before it's provisioned successfully.
+ */
+ public static final class NotProvisionedException extends MediaCasException {
+ /** @hide */
+ public NotProvisionedException(String detailMessage) {
+ super(detailMessage);
+ }
+ }
+
+ /**
+ * Exception thrown when the provisioning server or key server denies a
+ * license for a device.
+ */
+ public static final class DeniedByServerException extends MediaCasException {
+ /** @hide */
+ public DeniedByServerException(String detailMessage) {
+ super(detailMessage);
+ }
+ }
+
+ /**
+ * Exception thrown when an operation on a MediaCas object is attempted
+ * and hardware resources are not available, due to being in use.
+ */
+ public static final class ResourceBusyException extends MediaCasException {
+ /** @hide */
+ public ResourceBusyException(String detailMessage) {
+ super(detailMessage);
+ }
+ }
+}
diff --git a/android/media/MediaCasStateException.java b/android/media/MediaCasStateException.java
new file mode 100644
index 00000000..26c57923
--- /dev/null
+++ b/android/media/MediaCasStateException.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+
+import android.hardware.cas.V1_0.Status;
+
+/**
+ * Base class for MediaCas runtime exceptions
+ */
+public class MediaCasStateException extends IllegalStateException {
+ private final int mErrorCode;
+ private final String mDiagnosticInfo;
+
+ private MediaCasStateException(int err, @Nullable String msg, @Nullable String diagnosticInfo) {
+ super(msg);
+ mErrorCode = err;
+ mDiagnosticInfo = diagnosticInfo;
+ }
+
+ static void throwExceptionIfNeeded(int err) {
+ throwExceptionIfNeeded(err, null /* msg */);
+ }
+
+ static void throwExceptionIfNeeded(int err, @Nullable String msg) {
+ if (err == Status.OK) {
+ return;
+ }
+ if (err == Status.BAD_VALUE) {
+ throw new IllegalArgumentException();
+ }
+
+ String diagnosticInfo = "";
+ switch (err) {
+ case Status.ERROR_CAS_UNKNOWN:
+ diagnosticInfo = "General CAS error";
+ break;
+ case Status.ERROR_CAS_NO_LICENSE:
+ diagnosticInfo = "No license";
+ break;
+ case Status.ERROR_CAS_LICENSE_EXPIRED:
+ diagnosticInfo = "License expired";
+ break;
+ case Status.ERROR_CAS_SESSION_NOT_OPENED:
+ diagnosticInfo = "Session not opened";
+ break;
+ case Status.ERROR_CAS_CANNOT_HANDLE:
+ diagnosticInfo = "Unsupported scheme or data format";
+ break;
+ case Status.ERROR_CAS_INVALID_STATE:
+ diagnosticInfo = "Invalid CAS state";
+ break;
+ case Status.ERROR_CAS_INSUFFICIENT_OUTPUT_PROTECTION:
+ diagnosticInfo = "Insufficient output protection";
+ break;
+ case Status.ERROR_CAS_TAMPER_DETECTED:
+ diagnosticInfo = "Tamper detected";
+ break;
+ case Status.ERROR_CAS_DECRYPT_UNIT_NOT_INITIALIZED:
+ diagnosticInfo = "Not initialized";
+ break;
+ case Status.ERROR_CAS_DECRYPT:
+ diagnosticInfo = "Decrypt error";
+ break;
+ default:
+ diagnosticInfo = "Unknown CAS state exception";
+ break;
+ }
+ throw new MediaCasStateException(err, msg,
+ String.format("%s (err=%d)", diagnosticInfo, err));
+ }
+
+ /**
+ * Retrieve the associated error code
+ *
+ * @hide
+ */
+ public int getErrorCode() {
+ return mErrorCode;
+ }
+
+ /**
+ * Retrieve a developer-readable diagnostic information string
+ * associated with the exception. Do not show this to end-users,
+ * since this string will not be localized or generally comprehensible
+ * to end-users.
+ */
+ @NonNull
+ public String getDiagnosticInfo() {
+ return mDiagnosticInfo;
+ }
+}
diff --git a/android/media/MediaCodec.java b/android/media/MediaCodec.java
new file mode 100644
index 00000000..3d5f6bc9
--- /dev/null
+++ b/android/media/MediaCodec.java
@@ -0,0 +1,3748 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.media.MediaCodecInfo.CodecCapabilities;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.IHwBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.PersistableBundle;
+import android.view.Surface;
+
+import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.ReadOnlyBufferException;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ MediaCodec class can be used to access low-level media codecs, i.e. encoder/decoder components.
+ It is part of the Android low-level multimedia support infrastructure (normally used together
+ with {@link MediaExtractor}, {@link MediaSync}, {@link MediaMuxer}, {@link MediaCrypto},
+ {@link MediaDrm}, {@link Image}, {@link Surface}, and {@link AudioTrack}.)
+ <p>
+ <center><object style="width: 540px; height: 205px;" type="image/svg+xml"
+ data="../../../images/media/mediacodec_buffers.svg"><img
+ src="../../../images/media/mediacodec_buffers.png" style="width: 540px; height: 205px"
+ alt="MediaCodec buffer flow diagram"></object></center>
+ <p>
+ In broad terms, a codec processes input data to generate output data. It processes data
+ asynchronously and uses a set of input and output buffers. At a simplistic level, you request
+ (or receive) an empty input buffer, fill it up with data and send it to the codec for
+ processing. The codec uses up the data and transforms it into one of its empty output buffers.
+ Finally, you request (or receive) a filled output buffer, consume its contents and release it
+ back to the codec.
+
+ <h3>Data Types</h3>
+ <p>
+ Codecs operate on three kinds of data: compressed data, raw audio data and raw video data.
+ All three kinds of data can be processed using {@link ByteBuffer ByteBuffers}, but you should use
+ a {@link Surface} for raw video data to improve codec performance. Surface uses native video
+ buffers without mapping or copying them to ByteBuffers; thus, it is much more efficient.
+ You normally cannot access the raw video data when using a Surface, but you can use the
+ {@link ImageReader} class to access unsecured decoded (raw) video frames. This may still be more
+ efficient than using ByteBuffers, as some native buffers may be mapped into {@linkplain
+ ByteBuffer#isDirect direct} ByteBuffers. When using ByteBuffer mode, you can access raw video
+ frames using the {@link Image} class and {@link #getInputImage getInput}/{@link #getOutputImage
+ OutputImage(int)}.
+
+ <h4>Compressed Buffers</h4>
+ <p>
+ Input buffers (for decoders) and output buffers (for encoders) contain compressed data according
+ to the {@linkplain MediaFormat#KEY_MIME format's type}. For video types this is normally a single
+ compressed video frame. For audio data this is normally a single access unit (an encoded audio
+ segment typically containing a few milliseconds of audio as dictated by the format type), but
+ this requirement is slightly relaxed in that a buffer may contain multiple encoded access units
+ of audio. In either case, buffers do not start or end on arbitrary byte boundaries, but rather on
+ frame/access unit boundaries unless they are flagged with {@link #BUFFER_FLAG_PARTIAL_FRAME}.
+
+ <h4>Raw Audio Buffers</h4>
+ <p>
+ Raw audio buffers contain entire frames of PCM audio data, which is one sample for each channel
+ in channel order. Each sample is a {@linkplain AudioFormat#ENCODING_PCM_16BIT 16-bit signed
+ integer in native byte order}.
+
+ <pre class=prettyprint>
+ short[] getSamplesForChannel(MediaCodec codec, int bufferId, int channelIx) {
+ ByteBuffer outputBuffer = codec.getOutputBuffer(bufferId);
+ MediaFormat format = codec.getOutputFormat(bufferId);
+ ShortBuffer samples = outputBuffer.order(ByteOrder.nativeOrder()).asShortBuffer();
+ int numChannels = formet.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
+ if (channelIx &lt; 0 || channelIx &gt;= numChannels) {
+ return null;
+ }
+ short[] res = new short[samples.remaining() / numChannels];
+ for (int i = 0; i &lt; res.length; ++i) {
+ res[i] = samples.get(i * numChannels + channelIx);
+ }
+ return res;
+ }</pre>
+
+ <h4>Raw Video Buffers</h4>
+ <p>
+ In ByteBuffer mode video buffers are laid out according to their {@linkplain
+ MediaFormat#KEY_COLOR_FORMAT color format}. You can get the supported color formats as an array
+ from {@link #getCodecInfo}{@code .}{@link MediaCodecInfo#getCapabilitiesForType
+ getCapabilitiesForType(&hellip;)}{@code .}{@link CodecCapabilities#colorFormats colorFormats}.
+ Video codecs may support three kinds of color formats:
+ <ul>
+ <li><strong>native raw video format:</strong> This is marked by {@link
+ CodecCapabilities#COLOR_FormatSurface} and it can be used with an input or output Surface.</li>
+ <li><strong>flexible YUV buffers</strong> (such as {@link
+ CodecCapabilities#COLOR_FormatYUV420Flexible}): These can be used with an input/output Surface,
+ as well as in ByteBuffer mode, by using {@link #getInputImage getInput}/{@link #getOutputImage
+ OutputImage(int)}.</li>
+ <li><strong>other, specific formats:</strong> These are normally only supported in ByteBuffer
+ mode. Some color formats are vendor specific. Others are defined in {@link CodecCapabilities}.
+ For color formats that are equivalent to a flexible format, you can still use {@link
+ #getInputImage getInput}/{@link #getOutputImage OutputImage(int)}.</li>
+ </ul>
+ <p>
+ All video codecs support flexible YUV 4:2:0 buffers since {@link
+ android.os.Build.VERSION_CODES#LOLLIPOP_MR1}.
+
+ <h4>Accessing Raw Video ByteBuffers on Older Devices</h4>
+ <p>
+ Prior to {@link android.os.Build.VERSION_CODES#LOLLIPOP} and {@link Image} support, you need to
+ use the {@link MediaFormat#KEY_STRIDE} and {@link MediaFormat#KEY_SLICE_HEIGHT} output format
+ values to understand the layout of the raw output buffers.
+ <p class=note>
+ Note that on some devices the slice-height is advertised as 0. This could mean either that the
+ slice-height is the same as the frame height, or that the slice-height is the frame height
+ aligned to some value (usually a power of 2). Unfortunately, there is no standard and simple way
+ to tell the actual slice height in this case. Furthermore, the vertical stride of the {@code U}
+ plane in planar formats is also not specified or defined, though usually it is half of the slice
+ height.
+ <p>
+ The {@link MediaFormat#KEY_WIDTH} and {@link MediaFormat#KEY_HEIGHT} keys specify the size of the
+ video frames; however, for most encondings the video (picture) only occupies a portion of the
+ video frame. This is represented by the 'crop rectangle'.
+ <p>
+ You need to use the following keys to get the crop rectangle of raw output images from the
+ {@linkplain #getOutputFormat output format}. If these keys are not present, the video occupies the
+ entire video frame.The crop rectangle is understood in the context of the output frame
+ <em>before</em> applying any {@linkplain MediaFormat#KEY_ROTATION rotation}.
+ <table style="width: 0%">
+ <thead>
+ <tr>
+ <th>Format Key</th>
+ <th>Type</th>
+ <th>Description</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr>
+ <td>{@code "crop-left"}</td>
+ <td>Integer</td>
+ <td>The left-coordinate (x) of the crop rectangle</td>
+ </tr><tr>
+ <td>{@code "crop-top"}</td>
+ <td>Integer</td>
+ <td>The top-coordinate (y) of the crop rectangle</td>
+ </tr><tr>
+ <td>{@code "crop-right"}</td>
+ <td>Integer</td>
+ <td>The right-coordinate (x) <strong>MINUS 1</strong> of the crop rectangle</td>
+ </tr><tr>
+ <td>{@code "crop-bottom"}</td>
+ <td>Integer</td>
+ <td>The bottom-coordinate (y) <strong>MINUS 1</strong> of the crop rectangle</td>
+ </tr><tr>
+ <td colspan=3>
+ The right and bottom coordinates can be understood as the coordinates of the right-most
+ valid column/bottom-most valid row of the cropped output image.
+ </td>
+ </tr>
+ </tbody>
+ </table>
+ <p>
+ The size of the video frame (before rotation) can be calculated as such:
+ <pre class=prettyprint>
+ MediaFormat format = decoder.getOutputFormat(&hellip;);
+ int width = format.getInteger(MediaFormat.KEY_WIDTH);
+ if (format.containsKey("crop-left") && format.containsKey("crop-right")) {
+ width = format.getInteger("crop-right") + 1 - format.getInteger("crop-left");
+ }
+ int height = format.getInteger(MediaFormat.KEY_HEIGHT);
+ if (format.containsKey("crop-top") && format.containsKey("crop-bottom")) {
+ height = format.getInteger("crop-bottom") + 1 - format.getInteger("crop-top");
+ }
+ </pre>
+ <p class=note>
+ Also note that the meaning of {@link BufferInfo#offset BufferInfo.offset} was not consistent across
+ devices. On some devices the offset pointed to the top-left pixel of the crop rectangle, while on
+ most devices it pointed to the top-left pixel of the entire frame.
+
+ <h3>States</h3>
+ <p>
+ During its life a codec conceptually exists in one of three states: Stopped, Executing or
+ Released. The Stopped collective state is actually the conglomeration of three states:
+ Uninitialized, Configured and Error, whereas the Executing state conceptually progresses through
+ three sub-states: Flushed, Running and End-of-Stream.
+ <p>
+ <center><object style="width: 516px; height: 353px;" type="image/svg+xml"
+ data="../../../images/media/mediacodec_states.svg"><img
+ src="../../../images/media/mediacodec_states.png" style="width: 519px; height: 356px"
+ alt="MediaCodec state diagram"></object></center>
+ <p>
+ When you create a codec using one of the factory methods, the codec is in the Uninitialized
+ state. First, you need to configure it via {@link #configure configure(&hellip;)}, which brings
+ it to the Configured state, then call {@link #start} to move it to the Executing state. In this
+ state you can process data through the buffer queue manipulation described above.
+ <p>
+ The Executing state has three sub-states: Flushed, Running and End-of-Stream. Immediately after
+ {@link #start} the codec is in the Flushed sub-state, where it holds all the buffers. As soon
+ as the first input buffer is dequeued, the codec moves to the Running sub-state, where it spends
+ most of its life. When you queue an input buffer with the {@linkplain #BUFFER_FLAG_END_OF_STREAM
+ end-of-stream marker}, the codec transitions to the End-of-Stream sub-state. In this state the
+ codec no longer accepts further input buffers, but still generates output buffers until the
+ end-of-stream is reached on the output. You can move back to the Flushed sub-state at any time
+ while in the Executing state using {@link #flush}.
+ <p>
+ Call {@link #stop} to return the codec to the Uninitialized state, whereupon it may be configured
+ again. When you are done using a codec, you must release it by calling {@link #release}.
+ <p>
+ On rare occasions the codec may encounter an error and move to the Error state. This is
+ communicated using an invalid return value from a queuing operation, or sometimes via an
+ exception. Call {@link #reset} to make the codec usable again. You can call it from any state to
+ move the codec back to the Uninitialized state. Otherwise, call {@link #release} to move to the
+ terminal Released state.
+
+ <h3>Creation</h3>
+ <p>
+ Use {@link MediaCodecList} to create a MediaCodec for a specific {@link MediaFormat}. When
+ decoding a file or a stream, you can get the desired format from {@link
+ MediaExtractor#getTrackFormat MediaExtractor.getTrackFormat}. Inject any specific features that
+ you want to add using {@link MediaFormat#setFeatureEnabled MediaFormat.setFeatureEnabled}, then
+ call {@link MediaCodecList#findDecoderForFormat MediaCodecList.findDecoderForFormat} to get the
+ name of a codec that can handle that specific media format. Finally, create the codec using
+ {@link #createByCodecName}.
+ <p class=note>
+ <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP}, the format to
+ {@code MediaCodecList.findDecoder}/{@code EncoderForFormat} must not contain a {@linkplain
+ MediaFormat#KEY_FRAME_RATE frame rate}. Use
+ <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code>
+ to clear any existing frame rate setting in the format.
+ <p>
+ You can also create the preferred codec for a specific MIME type using {@link
+ #createDecoderByType createDecoder}/{@link #createEncoderByType EncoderByType(String)}.
+ This, however, cannot be used to inject features, and may create a codec that cannot handle the
+ specific desired media format.
+
+ <h4>Creating secure decoders</h4>
+ <p>
+ On versions {@link android.os.Build.VERSION_CODES#KITKAT_WATCH} and earlier, secure codecs might
+ not be listed in {@link MediaCodecList}, but may still be available on the system. Secure codecs
+ that exist can be instantiated by name only, by appending {@code ".secure"} to the name of a
+ regular codec (the name of all secure codecs must end in {@code ".secure"}.) {@link
+ #createByCodecName} will throw an {@code IOException} if the codec is not present on the system.
+ <p>
+ From {@link android.os.Build.VERSION_CODES#LOLLIPOP} onwards, you should use the {@link
+ CodecCapabilities#FEATURE_SecurePlayback} feature in the media format to create a secure decoder.
+
+ <h3>Initialization</h3>
+ <p>
+ After creating the codec, you can set a callback using {@link #setCallback setCallback} if you
+ want to process data asynchronously. Then, {@linkplain #configure configure} the codec using the
+ specific media format. This is when you can specify the output {@link Surface} for video
+ producers &ndash; codecs that generate raw video data (e.g. video decoders). This is also when
+ you can set the decryption parameters for secure codecs (see {@link MediaCrypto}). Finally, since
+ some codecs can operate in multiple modes, you must specify whether you want it to work as a
+ decoder or an encoder.
+ <p>
+ Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, you can query the resulting input and
+ output format in the Configured state. You can use this to verify the resulting configuration,
+ e.g. color formats, before starting the codec.
+ <p>
+ If you want to process raw input video buffers natively with a video consumer &ndash; a codec
+ that processes raw video input, such as a video encoder &ndash; create a destination Surface for
+ your input data using {@link #createInputSurface} after configuration. Alternately, set up the
+ codec to use a previously created {@linkplain #createPersistentInputSurface persistent input
+ surface} by calling {@link #setInputSurface}.
+
+ <h4 id=CSD><a name="CSD"></a>Codec-specific Data</h4>
+ <p>
+ Some formats, notably AAC audio and MPEG4, H.264 and H.265 video formats require the actual data
+ to be prefixed by a number of buffers containing setup data, or codec specific data. When
+ processing such compressed formats, this data must be submitted to the codec after {@link
+ #start} and before any frame data. Such data must be marked using the flag {@link
+ #BUFFER_FLAG_CODEC_CONFIG} in a call to {@link #queueInputBuffer queueInputBuffer}.
+ <p>
+ Codec-specific data can also be included in the format passed to {@link #configure configure} in
+ ByteBuffer entries with keys "csd-0", "csd-1", etc. These keys are always included in the track
+ {@link MediaFormat} obtained from the {@link MediaExtractor#getTrackFormat MediaExtractor}.
+ Codec-specific data in the format is automatically submitted to the codec upon {@link #start};
+ you <strong>MUST NOT</strong> submit this data explicitly. If the format did not contain codec
+ specific data, you can choose to submit it using the specified number of buffers in the correct
+ order, according to the format requirements. In case of H.264 AVC, you can also concatenate all
+ codec-specific data and submit it as a single codec-config buffer.
+ <p>
+ Android uses the following codec-specific data buffers. These are also required to be set in
+ the track format for proper {@link MediaMuxer} track configuration. Each parameter set and the
+ codec-specific-data sections marked with (<sup>*</sup>) must start with a start code of
+ {@code "\x00\x00\x00\x01"}.
+ <p>
+ <style>td.NA { background: #ccc; } .mid > tr > td { vertical-align: middle; }</style>
+ <table>
+ <thead>
+ <th>Format</th>
+ <th>CSD buffer #0</th>
+ <th>CSD buffer #1</th>
+ <th>CSD buffer #2</th>
+ </thead>
+ <tbody class=mid>
+ <tr>
+ <td>AAC</td>
+ <td>Decoder-specific information from ESDS<sup>*</sup></td>
+ <td class=NA>Not Used</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ <tr>
+ <td>VORBIS</td>
+ <td>Identification header</td>
+ <td>Setup header</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ <tr>
+ <td>OPUS</td>
+ <td>Identification header</td>
+ <td>Pre-skip in nanosecs<br>
+ (unsigned 64-bit {@linkplain ByteOrder#nativeOrder native-order} integer.)<br>
+ This overrides the pre-skip value in the identification header.</td>
+ <td>Seek Pre-roll in nanosecs<br>
+ (unsigned 64-bit {@linkplain ByteOrder#nativeOrder native-order} integer.)</td>
+ </tr>
+ <tr>
+ <td>FLAC</td>
+ <td>mandatory metadata block (called the STREAMINFO block),<br>
+ optionally followed by any number of other metadata blocks</td>
+ <td class=NA>Not Used</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ <tr>
+ <td>MPEG-4</td>
+ <td>Decoder-specific information from ESDS<sup>*</sup></td>
+ <td class=NA>Not Used</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ <tr>
+ <td>H.264 AVC</td>
+ <td>SPS (Sequence Parameter Sets<sup>*</sup>)</td>
+ <td>PPS (Picture Parameter Sets<sup>*</sup>)</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ <tr>
+ <td>H.265 HEVC</td>
+ <td>VPS (Video Parameter Sets<sup>*</sup>) +<br>
+ SPS (Sequence Parameter Sets<sup>*</sup>) +<br>
+ PPS (Picture Parameter Sets<sup>*</sup>)</td>
+ <td class=NA>Not Used</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ <tr>
+ <td>VP9</td>
+ <td>VP9 <a href="http://wiki.webmproject.org/vp9-codecprivate">CodecPrivate</a> Data
+ (optional)</td>
+ <td class=NA>Not Used</td>
+ <td class=NA>Not Used</td>
+ </tr>
+ </tbody>
+ </table>
+
+ <p class=note>
+ <strong>Note:</strong> care must be taken if the codec is flushed immediately or shortly
+ after start, before any output buffer or output format change has been returned, as the codec
+ specific data may be lost during the flush. You must resubmit the data using buffers marked with
+ {@link #BUFFER_FLAG_CODEC_CONFIG} after such flush to ensure proper codec operation.
+ <p>
+ Encoders (or codecs that generate compressed data) will create and return the codec specific data
+ before any valid output buffer in output buffers marked with the {@linkplain
+ #BUFFER_FLAG_CODEC_CONFIG codec-config flag}. Buffers containing codec-specific-data have no
+ meaningful timestamps.
+
+ <h3>Data Processing</h3>
+ <p>
+ Each codec maintains a set of input and output buffers that are referred to by a buffer-ID in
+ API calls. After a successful call to {@link #start} the client "owns" neither input nor output
+ buffers. In synchronous mode, call {@link #dequeueInputBuffer dequeueInput}/{@link
+ #dequeueOutputBuffer OutputBuffer(&hellip;)} to obtain (get ownership of) an input or output
+ buffer from the codec. In asynchronous mode, you will automatically receive available buffers via
+ the {@link Callback#onInputBufferAvailable MediaCodec.Callback.onInput}/{@link
+ Callback#onOutputBufferAvailable OutputBufferAvailable(&hellip;)} callbacks.
+ <p>
+ Upon obtaining an input buffer, fill it with data and submit it to the codec using {@link
+ #queueInputBuffer queueInputBuffer} &ndash; or {@link #queueSecureInputBuffer
+ queueSecureInputBuffer} if using decryption. Do not submit multiple input buffers with the same
+ timestamp (unless it is <a href="#CSD">codec-specific data</a> marked as such).
+ <p>
+ The codec in turn will return a read-only output buffer via the {@link
+ Callback#onOutputBufferAvailable onOutputBufferAvailable} callback in asynchronous mode, or in
+ response to a {@link #dequeueOutputBuffer dequeuOutputBuffer} call in synchronous mode. After the
+ output buffer has been processed, call one of the {@link #releaseOutputBuffer
+ releaseOutputBuffer} methods to return the buffer to the codec.
+ <p>
+ While you are not required to resubmit/release buffers immediately to the codec, holding onto
+ input and/or output buffers may stall the codec, and this behavior is device dependent.
+ <strong>Specifically, it is possible that a codec may hold off on generating output buffers until
+ <em>all</em> outstanding buffers have been released/resubmitted.</strong> Therefore, try to
+ hold onto to available buffers as little as possible.
+ <p>
+ Depending on the API version, you can process data in three ways:
+ <table>
+ <thead>
+ <tr>
+ <th>Processing Mode</th>
+ <th>API version <= 20<br>Jelly Bean/KitKat</th>
+ <th>API version >= 21<br>Lollipop and later</th>
+ </tr>
+ </thead>
+ <tbody>
+ <tr>
+ <td>Synchronous API using buffer arrays</td>
+ <td>Supported</td>
+ <td>Deprecated</td>
+ </tr>
+ <tr>
+ <td>Synchronous API using buffers</td>
+ <td class=NA>Not Available</td>
+ <td>Supported</td>
+ </tr>
+ <tr>
+ <td>Asynchronous API using buffers</td>
+ <td class=NA>Not Available</td>
+ <td>Supported</td>
+ </tr>
+ </tbody>
+ </table>
+
+ <h4>Asynchronous Processing using Buffers</h4>
+ <p>
+ Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, the preferred method is to process data
+ asynchronously by setting a callback before calling {@link #configure configure}. Asynchronous
+ mode changes the state transitions slightly, because you must call {@link #start} after {@link
+ #flush} to transition the codec to the Running sub-state and start receiving input buffers.
+ Similarly, upon an initial call to {@code start} the codec will move directly to the Running
+ sub-state and start passing available input buffers via the callback.
+ <p>
+ <center><object style="width: 516px; height: 353px;" type="image/svg+xml"
+ data="../../../images/media/mediacodec_async_states.svg"><img
+ src="../../../images/media/mediacodec_async_states.png" style="width: 516px; height: 353px"
+ alt="MediaCodec state diagram for asynchronous operation"></object></center>
+ <p>
+ MediaCodec is typically used like this in asynchronous mode:
+ <pre class=prettyprint>
+ MediaCodec codec = MediaCodec.createByCodecName(name);
+ MediaFormat mOutputFormat; // member variable
+ codec.setCallback(new MediaCodec.Callback() {
+ {@literal @Override}
+ void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
+ ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
+ // fill inputBuffer with valid data
+ &hellip;
+ codec.queueInputBuffer(inputBufferId, &hellip;);
+ }
+
+ {@literal @Override}
+ void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, &hellip;) {
+ ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
+ MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
+ // bufferFormat is equivalent to mOutputFormat
+ // outputBuffer is ready to be processed or rendered.
+ &hellip;
+ codec.releaseOutputBuffer(outputBufferId, &hellip;);
+ }
+
+ {@literal @Override}
+ void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
+ // Subsequent data will conform to new format.
+ // Can ignore if using getOutputFormat(outputBufferId)
+ mOutputFormat = format; // option B
+ }
+
+ {@literal @Override}
+ void onError(&hellip;) {
+ &hellip;
+ }
+ });
+ codec.configure(format, &hellip;);
+ mOutputFormat = codec.getOutputFormat(); // option B
+ codec.start();
+ // wait for processing to complete
+ codec.stop();
+ codec.release();</pre>
+
+ <h4>Synchronous Processing using Buffers</h4>
+ <p>
+ Since {@link android.os.Build.VERSION_CODES#LOLLIPOP}, you should retrieve input and output
+ buffers using {@link #getInputBuffer getInput}/{@link #getOutputBuffer OutputBuffer(int)} and/or
+ {@link #getInputImage getInput}/{@link #getOutputImage OutputImage(int)} even when using the
+ codec in synchronous mode. This allows certain optimizations by the framework, e.g. when
+ processing dynamic content. This optimization is disabled if you call {@link #getInputBuffers
+ getInput}/{@link #getOutputBuffers OutputBuffers()}.
+
+ <p class=note>
+ <strong>Note:</strong> do not mix the methods of using buffers and buffer arrays at the same
+ time. Specifically, only call {@code getInput}/{@code OutputBuffers} directly after {@link
+ #start} or after having dequeued an output buffer ID with the value of {@link
+ #INFO_OUTPUT_FORMAT_CHANGED}.
+ <p>
+ MediaCodec is typically used like this in synchronous mode:
+ <pre>
+ MediaCodec codec = MediaCodec.createByCodecName(name);
+ codec.configure(format, &hellip;);
+ MediaFormat outputFormat = codec.getOutputFormat(); // option B
+ codec.start();
+ for (;;) {
+ int inputBufferId = codec.dequeueInputBuffer(timeoutUs);
+ if (inputBufferId &gt;= 0) {
+ ByteBuffer inputBuffer = codec.getInputBuffer(&hellip;);
+ // fill inputBuffer with valid data
+ &hellip;
+ codec.queueInputBuffer(inputBufferId, &hellip;);
+ }
+ int outputBufferId = codec.dequeueOutputBuffer(&hellip;);
+ if (outputBufferId &gt;= 0) {
+ ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
+ MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
+ // bufferFormat is identical to outputFormat
+ // outputBuffer is ready to be processed or rendered.
+ &hellip;
+ codec.releaseOutputBuffer(outputBufferId, &hellip;);
+ } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ // Subsequent data will conform to new format.
+ // Can ignore if using getOutputFormat(outputBufferId)
+ outputFormat = codec.getOutputFormat(); // option B
+ }
+ }
+ codec.stop();
+ codec.release();</pre>
+
+ <h4>Synchronous Processing using Buffer Arrays (deprecated)</h4>
+ <p>
+ In versions {@link android.os.Build.VERSION_CODES#KITKAT_WATCH} and before, the set of input and
+ output buffers are represented by the {@code ByteBuffer[]} arrays. After a successful call to
+ {@link #start}, retrieve the buffer arrays using {@link #getInputBuffers getInput}/{@link
+ #getOutputBuffers OutputBuffers()}. Use the buffer ID-s as indices into these arrays (when
+ non-negative), as demonstrated in the sample below. Note that there is no inherent correlation
+ between the size of the arrays and the number of input and output buffers used by the system,
+ although the array size provides an upper bound.
+ <pre>
+ MediaCodec codec = MediaCodec.createByCodecName(name);
+ codec.configure(format, &hellip;);
+ codec.start();
+ ByteBuffer[] inputBuffers = codec.getInputBuffers();
+ ByteBuffer[] outputBuffers = codec.getOutputBuffers();
+ for (;;) {
+ int inputBufferId = codec.dequeueInputBuffer(&hellip;);
+ if (inputBufferId &gt;= 0) {
+ // fill inputBuffers[inputBufferId] with valid data
+ &hellip;
+ codec.queueInputBuffer(inputBufferId, &hellip;);
+ }
+ int outputBufferId = codec.dequeueOutputBuffer(&hellip;);
+ if (outputBufferId &gt;= 0) {
+ // outputBuffers[outputBufferId] is ready to be processed or rendered.
+ &hellip;
+ codec.releaseOutputBuffer(outputBufferId, &hellip;);
+ } else if (outputBufferId == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
+ outputBuffers = codec.getOutputBuffers();
+ } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ // Subsequent data will conform to new format.
+ MediaFormat format = codec.getOutputFormat();
+ }
+ }
+ codec.stop();
+ codec.release();</pre>
+
+ <h4>End-of-stream Handling</h4>
+ <p>
+ When you reach the end of the input data, you must signal it to the codec by specifying the
+ {@link #BUFFER_FLAG_END_OF_STREAM} flag in the call to {@link #queueInputBuffer
+ queueInputBuffer}. You can do this on the last valid input buffer, or by submitting an additional
+ empty input buffer with the end-of-stream flag set. If using an empty buffer, the timestamp will
+ be ignored.
+ <p>
+ The codec will continue to return output buffers until it eventually signals the end of the
+ output stream by specifying the same end-of-stream flag in the {@link BufferInfo} set in {@link
+ #dequeueOutputBuffer dequeueOutputBuffer} or returned via {@link Callback#onOutputBufferAvailable
+ onOutputBufferAvailable}. This can be set on the last valid output buffer, or on an empty buffer
+ after the last valid output buffer. The timestamp of such empty buffer should be ignored.
+ <p>
+ Do not submit additional input buffers after signaling the end of the input stream, unless the
+ codec has been flushed, or stopped and restarted.
+
+ <h4>Using an Output Surface</h4>
+ <p>
+ The data processing is nearly identical to the ByteBuffer mode when using an output {@link
+ Surface}; however, the output buffers will not be accessible, and are represented as {@code null}
+ values. E.g. {@link #getOutputBuffer getOutputBuffer}/{@link #getOutputImage Image(int)} will
+ return {@code null} and {@link #getOutputBuffers} will return an array containing only {@code
+ null}-s.
+ <p>
+ When using an output Surface, you can select whether or not to render each output buffer on the
+ surface. You have three choices:
+ <ul>
+ <li><strong>Do not render the buffer:</strong> Call {@link #releaseOutputBuffer(int, boolean)
+ releaseOutputBuffer(bufferId, false)}.</li>
+ <li><strong>Render the buffer with the default timestamp:</strong> Call {@link
+ #releaseOutputBuffer(int, boolean) releaseOutputBuffer(bufferId, true)}.</li>
+ <li><strong>Render the buffer with a specific timestamp:</strong> Call {@link
+ #releaseOutputBuffer(int, long) releaseOutputBuffer(bufferId, timestamp)}.</li>
+ </ul>
+ <p>
+ Since {@link android.os.Build.VERSION_CODES#M}, the default timestamp is the {@linkplain
+ BufferInfo#presentationTimeUs presentation timestamp} of the buffer (converted to nanoseconds).
+ It was not defined prior to that.
+ <p>
+ Also since {@link android.os.Build.VERSION_CODES#M}, you can change the output Surface
+ dynamically using {@link #setOutputSurface setOutputSurface}.
+
+ <h4>Transformations When Rendering onto Surface</h4>
+
+ If the codec is configured into Surface mode, any crop rectangle, {@linkplain
+ MediaFormat#KEY_ROTATION rotation} and {@linkplain #setVideoScalingMode video scaling
+ mode} will be automatically applied with one exception:
+ <p class=note>
+ Prior to the {@link android.os.Build.VERSION_CODES#M} release, software decoders may not
+ have applied the rotation when being rendered onto a Surface. Unfortunately, there is no standard
+ and simple way to identify software decoders, or if they apply the rotation other than by trying
+ it out.
+ <p>
+ There are also some caveats.
+ <p class=note>
+ Note that the pixel aspect ratio is not considered when displaying the output onto the
+ Surface. This means that if you are using {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT} mode, you
+ must position the output Surface so that it has the proper final display aspect ratio. Conversely,
+ you can only use {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING} mode for content with
+ square pixels (pixel aspect ratio or 1:1).
+ <p class=note>
+ Note also that as of {@link android.os.Build.VERSION_CODES#N} release, {@link
+ #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING} mode may not work correctly for videos rotated
+ by 90 or 270 degrees.
+ <p class=note>
+ When setting the video scaling mode, note that it must be reset after each time the output
+ buffers change. Since the {@link #INFO_OUTPUT_BUFFERS_CHANGED} event is deprecated, you can
+ do this after each time the output format changes.
+
+ <h4>Using an Input Surface</h4>
+ <p>
+ When using an input Surface, there are no accessible input buffers, as buffers are automatically
+ passed from the input surface to the codec. Calling {@link #dequeueInputBuffer
+ dequeueInputBuffer} will throw an {@code IllegalStateException}, and {@link #getInputBuffers}
+ returns a bogus {@code ByteBuffer[]} array that <strong>MUST NOT</strong> be written into.
+ <p>
+ Call {@link #signalEndOfInputStream} to signal end-of-stream. The input surface will stop
+ submitting data to the codec immediately after this call.
+ <p>
+
+ <h3>Seeking &amp; Adaptive Playback Support</h3>
+ <p>
+ Video decoders (and in general codecs that consume compressed video data) behave differently
+ regarding seek and format change whether or not they support and are configured for adaptive
+ playback. You can check if a decoder supports {@linkplain
+ CodecCapabilities#FEATURE_AdaptivePlayback adaptive playback} via {@link
+ CodecCapabilities#isFeatureSupported CodecCapabilities.isFeatureSupported(String)}. Adaptive
+ playback support for video decoders is only activated if you configure the codec to decode onto a
+ {@link Surface}.
+
+ <h4 id=KeyFrames><a name="KeyFrames"></a>Stream Boundary and Key Frames</h4>
+ <p>
+ It is important that the input data after {@link #start} or {@link #flush} starts at a suitable
+ stream boundary: the first frame must a key frame. A <em>key frame</em> can be decoded
+ completely on its own (for most codecs this means an I-frame), and no frames that are to be
+ displayed after a key frame refer to frames before the key frame.
+ <p>
+ The following table summarizes suitable key frames for various video formats.
+ <table>
+ <thead>
+ <tr>
+ <th>Format</th>
+ <th>Suitable key frame</th>
+ </tr>
+ </thead>
+ <tbody class=mid>
+ <tr>
+ <td>VP9/VP8</td>
+ <td>a suitable intraframe where no subsequent frames refer to frames prior to this frame.<br>
+ <i>(There is no specific name for such key frame.)</i></td>
+ </tr>
+ <tr>
+ <td>H.265 HEVC</td>
+ <td>IDR or CRA</td>
+ </tr>
+ <tr>
+ <td>H.264 AVC</td>
+ <td>IDR</td>
+ </tr>
+ <tr>
+ <td>MPEG-4<br>H.263<br>MPEG-2</td>
+ <td>a suitable I-frame where no subsequent frames refer to frames prior to this frame.<br>
+ <i>(There is no specific name for such key frame.)</td>
+ </tr>
+ </tbody>
+ </table>
+
+ <h4>For decoders that do not support adaptive playback (including when not decoding onto a
+ Surface)</h4>
+ <p>
+ In order to start decoding data that is not adjacent to previously submitted data (i.e. after a
+ seek) you <strong>MUST</strong> flush the decoder. Since all output buffers are immediately
+ revoked at the point of the flush, you may want to first signal then wait for the end-of-stream
+ before you call {@code flush}. It is important that the input data after a flush starts at a
+ suitable stream boundary/key frame.
+ <p class=note>
+ <strong>Note:</strong> the format of the data submitted after a flush must not change; {@link
+ #flush} does not support format discontinuities; for that, a full {@link #stop} - {@link
+ #configure configure(&hellip;)} - {@link #start} cycle is necessary.
+
+ <p class=note>
+ <strong>Also note:</strong> if you flush the codec too soon after {@link #start} &ndash;
+ generally, before the first output buffer or output format change is received &ndash; you
+ will need to resubmit the codec-specific-data to the codec. See the <a
+ href="#CSD">codec-specific-data section</a> for more info.
+
+ <h4>For decoders that support and are configured for adaptive playback</h4>
+ <p>
+ In order to start decoding data that is not adjacent to previously submitted data (i.e. after a
+ seek) it is <em>not necessary</em> to flush the decoder; however, input data after the
+ discontinuity must start at a suitable stream boundary/key frame.
+ <p>
+ For some video formats - namely H.264, H.265, VP8 and VP9 - it is also possible to change the
+ picture size or configuration mid-stream. To do this you must package the entire new
+ codec-specific configuration data together with the key frame into a single buffer (including
+ any start codes), and submit it as a <strong>regular</strong> input buffer.
+ <p>
+ You will receive an {@link #INFO_OUTPUT_FORMAT_CHANGED} return value from {@link
+ #dequeueOutputBuffer dequeueOutputBuffer} or a {@link Callback#onOutputBufferAvailable
+ onOutputFormatChanged} callback just after the picture-size change takes place and before any
+ frames with the new size have been returned.
+ <p class=note>
+ <strong>Note:</strong> just as the case for codec-specific data, be careful when calling
+ {@link #flush} shortly after you have changed the picture size. If you have not received
+ confirmation of the picture size change, you will need to repeat the request for the new picture
+ size.
+
+ <h3>Error handling</h3>
+ <p>
+ The factory methods {@link #createByCodecName createByCodecName} and {@link #createDecoderByType
+ createDecoder}/{@link #createEncoderByType EncoderByType} throw {@code IOException} on failure
+ which you must catch or declare to pass up. MediaCodec methods throw {@code
+ IllegalStateException} when the method is called from a codec state that does not allow it; this
+ is typically due to incorrect application API usage. Methods involving secure buffers may throw
+ {@link CryptoException}, which has further error information obtainable from {@link
+ CryptoException#getErrorCode}.
+ <p>
+ Internal codec errors result in a {@link CodecException}, which may be due to media content
+ corruption, hardware failure, resource exhaustion, and so forth, even when the application is
+ correctly using the API. The recommended action when receiving a {@code CodecException}
+ can be determined by calling {@link CodecException#isRecoverable} and {@link
+ CodecException#isTransient}:
+ <ul>
+ <li><strong>recoverable errors:</strong> If {@code isRecoverable()} returns true, then call
+ {@link #stop}, {@link #configure configure(&hellip;)}, and {@link #start} to recover.</li>
+ <li><strong>transient errors:</strong> If {@code isTransient()} returns true, then resources are
+ temporarily unavailable and the method may be retried at a later time.</li>
+ <li><strong>fatal errors:</strong> If both {@code isRecoverable()} and {@code isTransient()}
+ return false, then the {@code CodecException} is fatal and the codec must be {@linkplain #reset
+ reset} or {@linkplain #release released}.</li>
+ </ul>
+ <p>
+ Both {@code isRecoverable()} and {@code isTransient()} do not return true at the same time.
+
+ <h2 id=History><a name="History"></a>Valid API Calls and API History</h2>
+ <p>
+ This sections summarizes the valid API calls in each state and the API history of the MediaCodec
+ class. For API version numbers, see {@link android.os.Build.VERSION_CODES}.
+
+ <style>
+ .api > tr > th, .api > tr > td { text-align: center; padding: 4px 4px; }
+ .api > tr > th { vertical-align: bottom; }
+ .api > tr > td { vertical-align: middle; }
+ .sml > tr > th, .sml > tr > td { text-align: center; padding: 2px 4px; }
+ .fn { text-align: left; }
+ .fn > code > a { font: 14px/19px Roboto Condensed, sans-serif; }
+ .deg45 {
+ white-space: nowrap; background: none; border: none; vertical-align: bottom;
+ width: 30px; height: 83px;
+ }
+ .deg45 > div {
+ transform: skew(-45deg, 0deg) translate(1px, -67px);
+ transform-origin: bottom left 0;
+ width: 30px; height: 20px;
+ }
+ .deg45 > div > div { border: 1px solid #ddd; background: #999; height: 90px; width: 42px; }
+ .deg45 > div > div > div { transform: skew(45deg, 0deg) translate(-55px, 55px) rotate(-45deg); }
+ </style>
+
+ <table align="right" style="width: 0%">
+ <thead>
+ <tr><th>Symbol</th><th>Meaning</th></tr>
+ </thead>
+ <tbody class=sml>
+ <tr><td>&#9679;</td><td>Supported</td></tr>
+ <tr><td>&#8277;</td><td>Semantics changed</td></tr>
+ <tr><td>&#9675;</td><td>Experimental support</td></tr>
+ <tr><td>[ ]</td><td>Deprecated</td></tr>
+ <tr><td>&#9099;</td><td>Restricted to surface input mode</td></tr>
+ <tr><td>&#9094;</td><td>Restricted to surface output mode</td></tr>
+ <tr><td>&#9639;</td><td>Restricted to ByteBuffer input mode</td></tr>
+ <tr><td>&#8617;</td><td>Restricted to synchronous mode</td></tr>
+ <tr><td>&#8644;</td><td>Restricted to asynchronous mode</td></tr>
+ <tr><td>( )</td><td>Can be called, but shouldn't</td></tr>
+ </tbody>
+ </table>
+
+ <table style="width: 100%;">
+ <thead class=api>
+ <tr>
+ <th class=deg45><div><div style="background:#4285f4"><div>Uninitialized</div></div></div></th>
+ <th class=deg45><div><div style="background:#f4b400"><div>Configured</div></div></div></th>
+ <th class=deg45><div><div style="background:#e67c73"><div>Flushed</div></div></div></th>
+ <th class=deg45><div><div style="background:#0f9d58"><div>Running</div></div></div></th>
+ <th class=deg45><div><div style="background:#f7cb4d"><div>End of Stream</div></div></div></th>
+ <th class=deg45><div><div style="background:#db4437"><div>Error</div></div></div></th>
+ <th class=deg45><div><div style="background:#666"><div>Released</div></div></div></th>
+ <th></th>
+ <th colspan="8">SDK Version</th>
+ </tr>
+ <tr>
+ <th colspan="7">State</th>
+ <th>Method</th>
+ <th>16</th>
+ <th>17</th>
+ <th>18</th>
+ <th>19</th>
+ <th>20</th>
+ <th>21</th>
+ <th>22</th>
+ <th>23</th>
+ </tr>
+ </thead>
+ <tbody class=api>
+ <tr>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td class=fn>{@link #createByCodecName createByCodecName}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td class=fn>{@link #createDecoderByType createDecoderByType}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td class=fn>{@link #createEncoderByType createEncoderByType}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td class=fn>{@link #createPersistentInputSurface createPersistentInputSurface}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #configure configure}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>18+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #createInputSurface createInputSurface}</td>
+ <td></td>
+ <td></td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #dequeueInputBuffer dequeueInputBuffer}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#8277;&#9639;&#8617;</td>
+ <td>&#9639;&#8617;</td>
+ <td>&#9639;&#8617;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #dequeueOutputBuffer dequeueOutputBuffer}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;&#8617;</td>
+ <td>&#8617;</td>
+ <td>&#8617;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #flush flush}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>-</td>
+ <td class=fn>{@link #getCodecInfo getCodecInfo}</td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>(21+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getInputBuffer getInputBuffer}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getInputBuffers getInputBuffers}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>[&#8277;&#8617;]</td>
+ <td>[&#8617;]</td>
+ <td>[&#8617;]</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>21+</td>
+ <td>(21+)</td>
+ <td>(21+)</td>
+ <td>(21+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getInputFormat getInputFormat}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>(21+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getInputImage getInputImage}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9675;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>-</td>
+ <td class=fn>{@link #getName getName}</td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputBuffer getOutputBuffer}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputBuffers getOutputBuffers}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>[&#8277;&#8617;]</td>
+ <td>[&#8617;]</td>
+ <td>[&#8617;]</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>21+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputFormat()}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputFormat(int)}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>(21+)</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #getOutputImage getOutputImage}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9675;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #queueInputBuffer queueInputBuffer}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #queueSecureInputBuffer queueSecureInputBuffer}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td class=fn>{@link #release release}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #releaseOutputBuffer(int, boolean)}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #releaseOutputBuffer(int, long)}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ </tr>
+ <tr>
+ <td>21+</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>21+</td>
+ <td>-</td>
+ <td class=fn>{@link #reset reset}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>21+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #setCallback(Callback) setCallback}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>{@link #setCallback(Callback, Handler) &#8277;}</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>23+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #setInputSurface setInputSurface}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9099;</td>
+ </tr>
+ <tr>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>(23+)</td>
+ <td>(23+)</td>
+ <td class=fn>{@link #setOnFrameRenderedListener setOnFrameRenderedListener}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9675; &#9094;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>23+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #setOutputSurface setOutputSurface}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9094;</td>
+ </tr>
+ <tr>
+ <td>19+</td>
+ <td>19+</td>
+ <td>19+</td>
+ <td>19+</td>
+ <td>19+</td>
+ <td>(19+)</td>
+ <td>-</td>
+ <td class=fn>{@link #setParameters setParameters}</td>
+ <td></td>
+ <td></td>
+ <td></td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>(16+)</td>
+ <td>(16+)</td>
+ <td>16+</td>
+ <td>(16+)</td>
+ <td>(16+)</td>
+ <td>-</td>
+ <td class=fn>{@link #setVideoScalingMode setVideoScalingMode}</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ <td>&#9094;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>18+</td>
+ <td>18+</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #signalEndOfInputStream signalEndOfInputStream}</td>
+ <td></td>
+ <td></td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ <td>&#9099;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>16+</td>
+ <td>21+(&#8644;)</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #start start}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#8277;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <tr>
+ <td>-</td>
+ <td>-</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>16+</td>
+ <td>-</td>
+ <td>-</td>
+ <td class=fn>{@link #stop stop}</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ </tbody>
+ </table>
+ */
+final public class MediaCodec {
+ /**
+ * Per buffer metadata includes an offset and size specifying
+ * the range of valid data in the associated codec (output) buffer.
+ */
+ public final static class BufferInfo {
+ /**
+ * Update the buffer metadata information.
+ *
+ * @param newOffset the start-offset of the data in the buffer.
+ * @param newSize the amount of data (in bytes) in the buffer.
+ * @param newTimeUs the presentation timestamp in microseconds.
+ * @param newFlags buffer flags associated with the buffer. This
+ * should be a combination of {@link #BUFFER_FLAG_KEY_FRAME} and
+ * {@link #BUFFER_FLAG_END_OF_STREAM}.
+ */
+ public void set(
+ int newOffset, int newSize, long newTimeUs, @BufferFlag int newFlags) {
+ offset = newOffset;
+ size = newSize;
+ presentationTimeUs = newTimeUs;
+ flags = newFlags;
+ }
+
+ /**
+ * The start-offset of the data in the buffer.
+ */
+ public int offset;
+
+ /**
+ * The amount of data (in bytes) in the buffer. If this is {@code 0},
+ * the buffer has no data in it and can be discarded. The only
+ * use of a 0-size buffer is to carry the end-of-stream marker.
+ */
+ public int size;
+
+ /**
+ * The presentation timestamp in microseconds for the buffer.
+ * This is derived from the presentation timestamp passed in
+ * with the corresponding input buffer. This should be ignored for
+ * a 0-sized buffer.
+ */
+ public long presentationTimeUs;
+
+ /**
+ * Buffer flags associated with the buffer. A combination of
+ * {@link #BUFFER_FLAG_KEY_FRAME} and {@link #BUFFER_FLAG_END_OF_STREAM}.
+ *
+ * <p>Encoded buffers that are key frames are marked with
+ * {@link #BUFFER_FLAG_KEY_FRAME}.
+ *
+ * <p>The last output buffer corresponding to the input buffer
+ * marked with {@link #BUFFER_FLAG_END_OF_STREAM} will also be marked
+ * with {@link #BUFFER_FLAG_END_OF_STREAM}. In some cases this could
+ * be an empty buffer, whose sole purpose is to carry the end-of-stream
+ * marker.
+ */
+ @BufferFlag
+ public int flags;
+
+ /** @hide */
+ @NonNull
+ public BufferInfo dup() {
+ BufferInfo copy = new BufferInfo();
+ copy.set(offset, size, presentationTimeUs, flags);
+ return copy;
+ }
+ };
+
+ // The follow flag constants MUST stay in sync with their equivalents
+ // in MediaCodec.h !
+
+ /**
+ * This indicates that the (encoded) buffer marked as such contains
+ * the data for a key frame.
+ *
+ * @deprecated Use {@link #BUFFER_FLAG_KEY_FRAME} instead.
+ */
+ public static final int BUFFER_FLAG_SYNC_FRAME = 1;
+
+ /**
+ * This indicates that the (encoded) buffer marked as such contains
+ * the data for a key frame.
+ */
+ public static final int BUFFER_FLAG_KEY_FRAME = 1;
+
+ /**
+ * This indicated that the buffer marked as such contains codec
+ * initialization / codec specific data instead of media data.
+ */
+ public static final int BUFFER_FLAG_CODEC_CONFIG = 2;
+
+ /**
+ * This signals the end of stream, i.e. no buffers will be available
+ * after this, unless of course, {@link #flush} follows.
+ */
+ public static final int BUFFER_FLAG_END_OF_STREAM = 4;
+
+ /**
+ * This indicates that the buffer only contains part of a frame,
+ * and the decoder should batch the data until a buffer without
+ * this flag appears before decoding the frame.
+ */
+ public static final int BUFFER_FLAG_PARTIAL_FRAME = 8;
+
+ /** @hide */
+ @IntDef(
+ flag = true,
+ value = {
+ BUFFER_FLAG_SYNC_FRAME,
+ BUFFER_FLAG_KEY_FRAME,
+ BUFFER_FLAG_CODEC_CONFIG,
+ BUFFER_FLAG_END_OF_STREAM,
+ BUFFER_FLAG_PARTIAL_FRAME,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface BufferFlag {}
+
+ private EventHandler mEventHandler;
+ private EventHandler mOnFrameRenderedHandler;
+ private EventHandler mCallbackHandler;
+ private Callback mCallback;
+ private OnFrameRenderedListener mOnFrameRenderedListener;
+ private Object mListenerLock = new Object();
+
+ private static final int EVENT_CALLBACK = 1;
+ private static final int EVENT_SET_CALLBACK = 2;
+ private static final int EVENT_FRAME_RENDERED = 3;
+
+ private static final int CB_INPUT_AVAILABLE = 1;
+ private static final int CB_OUTPUT_AVAILABLE = 2;
+ private static final int CB_ERROR = 3;
+ private static final int CB_OUTPUT_FORMAT_CHANGE = 4;
+
+ private class EventHandler extends Handler {
+ private MediaCodec mCodec;
+
+ public EventHandler(@NonNull MediaCodec codec, @NonNull Looper looper) {
+ super(looper);
+ mCodec = codec;
+ }
+
+ @Override
+ public void handleMessage(@NonNull Message msg) {
+ switch (msg.what) {
+ case EVENT_CALLBACK:
+ {
+ handleCallback(msg);
+ break;
+ }
+ case EVENT_SET_CALLBACK:
+ {
+ mCallback = (MediaCodec.Callback) msg.obj;
+ break;
+ }
+ case EVENT_FRAME_RENDERED:
+ synchronized (mListenerLock) {
+ Map<String, Object> map = (Map<String, Object>)msg.obj;
+ for (int i = 0; ; ++i) {
+ Object mediaTimeUs = map.get(i + "-media-time-us");
+ Object systemNano = map.get(i + "-system-nano");
+ if (mediaTimeUs == null || systemNano == null
+ || mOnFrameRenderedListener == null) {
+ break;
+ }
+ mOnFrameRenderedListener.onFrameRendered(
+ mCodec, (long)mediaTimeUs, (long)systemNano);
+ }
+ break;
+ }
+ default:
+ {
+ break;
+ }
+ }
+ }
+
+ private void handleCallback(@NonNull Message msg) {
+ if (mCallback == null) {
+ return;
+ }
+
+ switch (msg.arg1) {
+ case CB_INPUT_AVAILABLE:
+ {
+ int index = msg.arg2;
+ synchronized(mBufferLock) {
+ validateInputByteBuffer(mCachedInputBuffers, index);
+ }
+ mCallback.onInputBufferAvailable(mCodec, index);
+ break;
+ }
+
+ case CB_OUTPUT_AVAILABLE:
+ {
+ int index = msg.arg2;
+ BufferInfo info = (MediaCodec.BufferInfo) msg.obj;
+ synchronized(mBufferLock) {
+ validateOutputByteBuffer(mCachedOutputBuffers, index, info);
+ }
+ mCallback.onOutputBufferAvailable(
+ mCodec, index, info);
+ break;
+ }
+
+ case CB_ERROR:
+ {
+ mCallback.onError(mCodec, (MediaCodec.CodecException) msg.obj);
+ break;
+ }
+
+ case CB_OUTPUT_FORMAT_CHANGE:
+ {
+ mCallback.onOutputFormatChanged(mCodec,
+ new MediaFormat((Map<String, Object>) msg.obj));
+ break;
+ }
+
+ default:
+ {
+ break;
+ }
+ }
+ }
+ }
+
+ private boolean mHasSurface = false;
+
+ /**
+ * Instantiate the preferred decoder supporting input data of the given mime type.
+ *
+ * The following is a partial list of defined mime types and their semantics:
+ * <ul>
+ * <li>"video/x-vnd.on2.vp8" - VP8 video (i.e. video in .webm)
+ * <li>"video/x-vnd.on2.vp9" - VP9 video (i.e. video in .webm)
+ * <li>"video/avc" - H.264/AVC video
+ * <li>"video/hevc" - H.265/HEVC video
+ * <li>"video/mp4v-es" - MPEG4 video
+ * <li>"video/3gpp" - H.263 video
+ * <li>"audio/3gpp" - AMR narrowband audio
+ * <li>"audio/amr-wb" - AMR wideband audio
+ * <li>"audio/mpeg" - MPEG1/2 audio layer III
+ * <li>"audio/mp4a-latm" - AAC audio (note, this is raw AAC packets, not packaged in LATM!)
+ * <li>"audio/vorbis" - vorbis audio
+ * <li>"audio/g711-alaw" - G.711 alaw audio
+ * <li>"audio/g711-mlaw" - G.711 ulaw audio
+ * </ul>
+ *
+ * <strong>Note:</strong> It is preferred to use {@link MediaCodecList#findDecoderForFormat}
+ * and {@link #createByCodecName} to ensure that the resulting codec can handle a
+ * given format.
+ *
+ * @param type The mime type of the input data.
+ * @throws IOException if the codec cannot be created.
+ * @throws IllegalArgumentException if type is not a valid mime type.
+ * @throws NullPointerException if type is null.
+ */
+ @NonNull
+ public static MediaCodec createDecoderByType(@NonNull String type)
+ throws IOException {
+ return new MediaCodec(type, true /* nameIsType */, false /* encoder */);
+ }
+
+ /**
+ * Instantiate the preferred encoder supporting output data of the given mime type.
+ *
+ * <strong>Note:</strong> It is preferred to use {@link MediaCodecList#findEncoderForFormat}
+ * and {@link #createByCodecName} to ensure that the resulting codec can handle a
+ * given format.
+ *
+ * @param type The desired mime type of the output data.
+ * @throws IOException if the codec cannot be created.
+ * @throws IllegalArgumentException if type is not a valid mime type.
+ * @throws NullPointerException if type is null.
+ */
+ @NonNull
+ public static MediaCodec createEncoderByType(@NonNull String type)
+ throws IOException {
+ return new MediaCodec(type, true /* nameIsType */, true /* encoder */);
+ }
+
+ /**
+ * If you know the exact name of the component you want to instantiate
+ * use this method to instantiate it. Use with caution.
+ * Likely to be used with information obtained from {@link android.media.MediaCodecList}
+ * @param name The name of the codec to be instantiated.
+ * @throws IOException if the codec cannot be created.
+ * @throws IllegalArgumentException if name is not valid.
+ * @throws NullPointerException if name is null.
+ */
+ @NonNull
+ public static MediaCodec createByCodecName(@NonNull String name)
+ throws IOException {
+ return new MediaCodec(
+ name, false /* nameIsType */, false /* unused */);
+ }
+
+ private MediaCodec(
+ @NonNull String name, boolean nameIsType, boolean encoder) {
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else {
+ mEventHandler = null;
+ }
+ mCallbackHandler = mEventHandler;
+ mOnFrameRenderedHandler = mEventHandler;
+
+ mBufferLock = new Object();
+
+ native_setup(name, nameIsType, encoder);
+ }
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ /**
+ * Returns the codec to its initial (Uninitialized) state.
+ *
+ * Call this if an {@link MediaCodec.CodecException#isRecoverable unrecoverable}
+ * error has occured to reset the codec to its initial state after creation.
+ *
+ * @throws CodecException if an unrecoverable error has occured and the codec
+ * could not be reset.
+ * @throws IllegalStateException if in the Released state.
+ */
+ public final void reset() {
+ freeAllTrackedBuffers(); // free buffers first
+ native_reset();
+ }
+
+ private native final void native_reset();
+
+ /**
+ * Free up resources used by the codec instance.
+ *
+ * Make sure you call this when you're done to free up any opened
+ * component instance instead of relying on the garbage collector
+ * to do this for you at some point in the future.
+ */
+ public final void release() {
+ freeAllTrackedBuffers(); // free buffers first
+ native_release();
+ }
+
+ private native final void native_release();
+
+ /**
+ * If this codec is to be used as an encoder, pass this flag.
+ */
+ public static final int CONFIGURE_FLAG_ENCODE = 1;
+
+ /** @hide */
+ @IntDef(flag = true, value = { CONFIGURE_FLAG_ENCODE })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ConfigureFlag {}
+
+ /**
+ * Configures a component.
+ *
+ * @param format The format of the input data (decoder) or the desired
+ * format of the output data (encoder). Passing {@code null}
+ * as {@code format} is equivalent to passing an
+ * {@link MediaFormat#MediaFormat an empty mediaformat}.
+ * @param surface Specify a surface on which to render the output of this
+ * decoder. Pass {@code null} as {@code surface} if the
+ * codec does not generate raw video output (e.g. not a video
+ * decoder) and/or if you want to configure the codec for
+ * {@link ByteBuffer} output.
+ * @param crypto Specify a crypto object to facilitate secure decryption
+ * of the media data. Pass {@code null} as {@code crypto} for
+ * non-secure codecs.
+ * @param flags Specify {@link #CONFIGURE_FLAG_ENCODE} to configure the
+ * component as an encoder.
+ * @throws IllegalArgumentException if the surface has been released (or is invalid),
+ * or the format is unacceptable (e.g. missing a mandatory key),
+ * or the flags are not set properly
+ * (e.g. missing {@link #CONFIGURE_FLAG_ENCODE} for an encoder).
+ * @throws IllegalStateException if not in the Uninitialized state.
+ * @throws CryptoException upon DRM error.
+ * @throws CodecException upon codec error.
+ */
+ public void configure(
+ @Nullable MediaFormat format,
+ @Nullable Surface surface, @Nullable MediaCrypto crypto,
+ @ConfigureFlag int flags) {
+ configure(format, surface, crypto, null, flags);
+ }
+
+ /**
+ * Configure a component to be used with a descrambler.
+ * @param format The format of the input data (decoder) or the desired
+ * format of the output data (encoder). Passing {@code null}
+ * as {@code format} is equivalent to passing an
+ * {@link MediaFormat#MediaFormat an empty mediaformat}.
+ * @param surface Specify a surface on which to render the output of this
+ * decoder. Pass {@code null} as {@code surface} if the
+ * codec does not generate raw video output (e.g. not a video
+ * decoder) and/or if you want to configure the codec for
+ * {@link ByteBuffer} output.
+ * @param flags Specify {@link #CONFIGURE_FLAG_ENCODE} to configure the
+ * component as an encoder.
+ * @param descrambler Specify a descrambler object to facilitate secure
+ * descrambling of the media data, or null for non-secure codecs.
+ * @throws IllegalArgumentException if the surface has been released (or is invalid),
+ * or the format is unacceptable (e.g. missing a mandatory key),
+ * or the flags are not set properly
+ * (e.g. missing {@link #CONFIGURE_FLAG_ENCODE} for an encoder).
+ * @throws IllegalStateException if not in the Uninitialized state.
+ * @throws CryptoException upon DRM error.
+ * @throws CodecException upon codec error.
+ */
+ public void configure(
+ @Nullable MediaFormat format, @Nullable Surface surface,
+ @ConfigureFlag int flags, @Nullable MediaDescrambler descrambler) {
+ configure(format, surface, null,
+ descrambler != null ? descrambler.getBinder() : null, flags);
+ }
+
+ private void configure(
+ @Nullable MediaFormat format, @Nullable Surface surface,
+ @Nullable MediaCrypto crypto, @Nullable IHwBinder descramblerBinder,
+ @ConfigureFlag int flags) {
+ if (crypto != null && descramblerBinder != null) {
+ throw new IllegalArgumentException("Can't use crypto and descrambler together!");
+ }
+
+ String[] keys = null;
+ Object[] values = null;
+
+ if (format != null) {
+ Map<String, Object> formatMap = format.getMap();
+ keys = new String[formatMap.size()];
+ values = new Object[formatMap.size()];
+
+ int i = 0;
+ for (Map.Entry<String, Object> entry: formatMap.entrySet()) {
+ if (entry.getKey().equals(MediaFormat.KEY_AUDIO_SESSION_ID)) {
+ int sessionId = 0;
+ try {
+ sessionId = (Integer)entry.getValue();
+ }
+ catch (Exception e) {
+ throw new IllegalArgumentException("Wrong Session ID Parameter!");
+ }
+ keys[i] = "audio-hw-sync";
+ values[i] = AudioSystem.getAudioHwSyncForSession(sessionId);
+ } else {
+ keys[i] = entry.getKey();
+ values[i] = entry.getValue();
+ }
+ ++i;
+ }
+ }
+
+ mHasSurface = surface != null;
+
+ native_configure(keys, values, surface, crypto, descramblerBinder, flags);
+ }
+
+ /**
+ * Dynamically sets the output surface of a codec.
+ * <p>
+ * This can only be used if the codec was configured with an output surface. The
+ * new output surface should have a compatible usage type to the original output surface.
+ * E.g. codecs may not support switching from a SurfaceTexture (GPU readable) output
+ * to ImageReader (software readable) output.
+ * @param surface the output surface to use. It must not be {@code null}.
+ * @throws IllegalStateException if the codec does not support setting the output
+ * surface in the current state.
+ * @throws IllegalArgumentException if the new surface is not of a suitable type for the codec.
+ */
+ public void setOutputSurface(@NonNull Surface surface) {
+ if (!mHasSurface) {
+ throw new IllegalStateException("codec was not configured for an output surface");
+ }
+ native_setSurface(surface);
+ }
+
+ private native void native_setSurface(@NonNull Surface surface);
+
+ /**
+ * Create a persistent input surface that can be used with codecs that normally have an input
+ * surface, such as video encoders. A persistent input can be reused by subsequent
+ * {@link MediaCodec} or {@link MediaRecorder} instances, but can only be used by at
+ * most one codec or recorder instance concurrently.
+ * <p>
+ * The application is responsible for calling release() on the Surface when done.
+ *
+ * @return an input surface that can be used with {@link #setInputSurface}.
+ */
+ @NonNull
+ public static Surface createPersistentInputSurface() {
+ return native_createPersistentInputSurface();
+ }
+
+ static class PersistentSurface extends Surface {
+ @SuppressWarnings("unused")
+ PersistentSurface() {} // used by native
+
+ @Override
+ public void release() {
+ native_releasePersistentInputSurface(this);
+ super.release();
+ }
+
+ private long mPersistentObject;
+ };
+
+ /**
+ * Configures the codec (e.g. encoder) to use a persistent input surface in place of input
+ * buffers. This may only be called after {@link #configure} and before {@link #start}, in
+ * lieu of {@link #createInputSurface}.
+ * @param surface a persistent input surface created by {@link #createPersistentInputSurface}
+ * @throws IllegalStateException if not in the Configured state or does not require an input
+ * surface.
+ * @throws IllegalArgumentException if the surface was not created by
+ * {@link #createPersistentInputSurface}.
+ */
+ public void setInputSurface(@NonNull Surface surface) {
+ if (!(surface instanceof PersistentSurface)) {
+ throw new IllegalArgumentException("not a PersistentSurface");
+ }
+ native_setInputSurface(surface);
+ }
+
+ @NonNull
+ private static native final PersistentSurface native_createPersistentInputSurface();
+ private static native final void native_releasePersistentInputSurface(@NonNull Surface surface);
+ private native final void native_setInputSurface(@NonNull Surface surface);
+
+ private native final void native_setCallback(@Nullable Callback cb);
+
+ private native final void native_configure(
+ @Nullable String[] keys, @Nullable Object[] values,
+ @Nullable Surface surface, @Nullable MediaCrypto crypto,
+ @Nullable IHwBinder descramblerBinder, @ConfigureFlag int flags);
+
+ /**
+ * Requests a Surface to use as the input to an encoder, in place of input buffers. This
+ * may only be called after {@link #configure} and before {@link #start}.
+ * <p>
+ * The application is responsible for calling release() on the Surface when
+ * done.
+ * <p>
+ * The Surface must be rendered with a hardware-accelerated API, such as OpenGL ES.
+ * {@link android.view.Surface#lockCanvas(android.graphics.Rect)} may fail or produce
+ * unexpected results.
+ * @throws IllegalStateException if not in the Configured state.
+ */
+ @NonNull
+ public native final Surface createInputSurface();
+
+ /**
+ * After successfully configuring the component, call {@code start}.
+ * <p>
+ * Call {@code start} also if the codec is configured in asynchronous mode,
+ * and it has just been flushed, to resume requesting input buffers.
+ * @throws IllegalStateException if not in the Configured state
+ * or just after {@link #flush} for a codec that is configured
+ * in asynchronous mode.
+ * @throws MediaCodec.CodecException upon codec error. Note that some codec errors
+ * for start may be attributed to future method calls.
+ */
+ public final void start() {
+ native_start();
+ synchronized(mBufferLock) {
+ cacheBuffers(true /* input */);
+ cacheBuffers(false /* input */);
+ }
+ }
+ private native final void native_start();
+
+ /**
+ * Finish the decode/encode session, note that the codec instance
+ * remains active and ready to be {@link #start}ed again.
+ * To ensure that it is available to other client call {@link #release}
+ * and don't just rely on garbage collection to eventually do this for you.
+ * @throws IllegalStateException if in the Released state.
+ */
+ public final void stop() {
+ native_stop();
+ freeAllTrackedBuffers();
+
+ synchronized (mListenerLock) {
+ if (mCallbackHandler != null) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ }
+ if (mOnFrameRenderedHandler != null) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
+ }
+ }
+
+ private native final void native_stop();
+
+ /**
+ * Flush both input and output ports of the component.
+ * <p>
+ * Upon return, all indices previously returned in calls to {@link #dequeueInputBuffer
+ * dequeueInputBuffer} and {@link #dequeueOutputBuffer dequeueOutputBuffer} &mdash; or obtained
+ * via {@link Callback#onInputBufferAvailable onInputBufferAvailable} or
+ * {@link Callback#onOutputBufferAvailable onOutputBufferAvailable} callbacks &mdash; become
+ * invalid, and all buffers are owned by the codec.
+ * <p>
+ * If the codec is configured in asynchronous mode, call {@link #start}
+ * after {@code flush} has returned to resume codec operations. The codec
+ * will not request input buffers until this has happened.
+ * <strong>Note, however, that there may still be outstanding {@code onOutputBufferAvailable}
+ * callbacks that were not handled prior to calling {@code flush}.
+ * The indices returned via these callbacks also become invalid upon calling {@code flush} and
+ * should be discarded.</strong>
+ * <p>
+ * If the codec is configured in synchronous mode, codec will resume
+ * automatically if it is configured with an input surface. Otherwise, it
+ * will resume when {@link #dequeueInputBuffer dequeueInputBuffer} is called.
+ *
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ public final void flush() {
+ synchronized(mBufferLock) {
+ invalidateByteBuffers(mCachedInputBuffers);
+ invalidateByteBuffers(mCachedOutputBuffers);
+ mDequeuedInputBuffers.clear();
+ mDequeuedOutputBuffers.clear();
+ }
+ native_flush();
+ }
+
+ private native final void native_flush();
+
+ /**
+ * Thrown when an internal codec error occurs.
+ */
+ public final static class CodecException extends IllegalStateException {
+ CodecException(int errorCode, int actionCode, @Nullable String detailMessage) {
+ super(detailMessage);
+ mErrorCode = errorCode;
+ mActionCode = actionCode;
+
+ // TODO get this from codec
+ final String sign = errorCode < 0 ? "neg_" : "";
+ mDiagnosticInfo =
+ "android.media.MediaCodec.error_" + sign + Math.abs(errorCode);
+ }
+
+ /**
+ * Returns true if the codec exception is a transient issue,
+ * perhaps due to resource constraints, and that the method
+ * (or encoding/decoding) may be retried at a later time.
+ */
+ public boolean isTransient() {
+ return mActionCode == ACTION_TRANSIENT;
+ }
+
+ /**
+ * Returns true if the codec cannot proceed further,
+ * but can be recovered by stopping, configuring,
+ * and starting again.
+ */
+ public boolean isRecoverable() {
+ return mActionCode == ACTION_RECOVERABLE;
+ }
+
+ /**
+ * Retrieve the error code associated with a CodecException
+ */
+ public int getErrorCode() {
+ return mErrorCode;
+ }
+
+ /**
+ * Retrieve a developer-readable diagnostic information string
+ * associated with the exception. Do not show this to end-users,
+ * since this string will not be localized or generally
+ * comprehensible to end-users.
+ */
+ public @NonNull String getDiagnosticInfo() {
+ return mDiagnosticInfo;
+ }
+
+ /**
+ * This indicates required resource was not able to be allocated.
+ */
+ public static final int ERROR_INSUFFICIENT_RESOURCE = 1100;
+
+ /**
+ * This indicates the resource manager reclaimed the media resource used by the codec.
+ * <p>
+ * With this exception, the codec must be released, as it has moved to terminal state.
+ */
+ public static final int ERROR_RECLAIMED = 1101;
+
+ /** @hide */
+ @IntDef({
+ ERROR_INSUFFICIENT_RESOURCE,
+ ERROR_RECLAIMED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ReasonCode {}
+
+ /* Must be in sync with android_media_MediaCodec.cpp */
+ private final static int ACTION_TRANSIENT = 1;
+ private final static int ACTION_RECOVERABLE = 2;
+
+ private final String mDiagnosticInfo;
+ private final int mErrorCode;
+ private final int mActionCode;
+ }
+
+ /**
+ * Thrown when a crypto error occurs while queueing a secure input buffer.
+ */
+ public final static class CryptoException extends RuntimeException {
+ public CryptoException(int errorCode, @Nullable String detailMessage) {
+ super(detailMessage);
+ mErrorCode = errorCode;
+ }
+
+ /**
+ * This indicates that the requested key was not found when trying to
+ * perform a decrypt operation. The operation can be retried after adding
+ * the correct decryption key.
+ */
+ public static final int ERROR_NO_KEY = 1;
+
+ /**
+ * This indicates that the key used for decryption is no longer
+ * valid due to license term expiration. The operation can be retried
+ * after updating the expired keys.
+ */
+ public static final int ERROR_KEY_EXPIRED = 2;
+
+ /**
+ * This indicates that a required crypto resource was not able to be
+ * allocated while attempting the requested operation. The operation
+ * can be retried if the app is able to release resources.
+ */
+ public static final int ERROR_RESOURCE_BUSY = 3;
+
+ /**
+ * This indicates that the output protection levels supported by the
+ * device are not sufficient to meet the requirements set by the
+ * content owner in the license policy.
+ */
+ public static final int ERROR_INSUFFICIENT_OUTPUT_PROTECTION = 4;
+
+ /**
+ * This indicates that decryption was attempted on a session that is
+ * not opened, which could be due to a failure to open the session,
+ * closing the session prematurely, or the session being reclaimed
+ * by the resource manager.
+ */
+ public static final int ERROR_SESSION_NOT_OPENED = 5;
+
+ /**
+ * This indicates that an operation was attempted that could not be
+ * supported by the crypto system of the device in its current
+ * configuration. It may occur when the license policy requires
+ * device security features that aren't supported by the device,
+ * or due to an internal error in the crypto system that prevents
+ * the specified security policy from being met.
+ */
+ public static final int ERROR_UNSUPPORTED_OPERATION = 6;
+
+ /** @hide */
+ @IntDef({
+ ERROR_NO_KEY,
+ ERROR_KEY_EXPIRED,
+ ERROR_RESOURCE_BUSY,
+ ERROR_INSUFFICIENT_OUTPUT_PROTECTION,
+ ERROR_SESSION_NOT_OPENED,
+ ERROR_UNSUPPORTED_OPERATION
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface CryptoErrorCode {}
+
+ /**
+ * Retrieve the error code associated with a CryptoException
+ */
+ @CryptoErrorCode
+ public int getErrorCode() {
+ return mErrorCode;
+ }
+
+ private int mErrorCode;
+ }
+
+ /**
+ * After filling a range of the input buffer at the specified index
+ * submit it to the component. Once an input buffer is queued to
+ * the codec, it MUST NOT be used until it is later retrieved by
+ * {@link #getInputBuffer} in response to a {@link #dequeueInputBuffer}
+ * return value or a {@link Callback#onInputBufferAvailable}
+ * callback.
+ * <p>
+ * Many decoders require the actual compressed data stream to be
+ * preceded by "codec specific data", i.e. setup data used to initialize
+ * the codec such as PPS/SPS in the case of AVC video or code tables
+ * in the case of vorbis audio.
+ * The class {@link android.media.MediaExtractor} provides codec
+ * specific data as part of
+ * the returned track format in entries named "csd-0", "csd-1" ...
+ * <p>
+ * These buffers can be submitted directly after {@link #start} or
+ * {@link #flush} by specifying the flag {@link
+ * #BUFFER_FLAG_CODEC_CONFIG}. However, if you configure the
+ * codec with a {@link MediaFormat} containing these keys, they
+ * will be automatically submitted by MediaCodec directly after
+ * start. Therefore, the use of {@link
+ * #BUFFER_FLAG_CODEC_CONFIG} flag is discouraged and is
+ * recommended only for advanced users.
+ * <p>
+ * To indicate that this is the final piece of input data (or rather that
+ * no more input data follows unless the decoder is subsequently flushed)
+ * specify the flag {@link #BUFFER_FLAG_END_OF_STREAM}.
+ * <p class=note>
+ * <strong>Note:</strong> Prior to {@link android.os.Build.VERSION_CODES#M},
+ * {@code presentationTimeUs} was not propagated to the frame timestamp of (rendered)
+ * Surface output buffers, and the resulting frame timestamp was undefined.
+ * Use {@link #releaseOutputBuffer(int, long)} to ensure a specific frame timestamp is set.
+ * Similarly, since frame timestamps can be used by the destination surface for rendering
+ * synchronization, <strong>care must be taken to normalize presentationTimeUs so as to not be
+ * mistaken for a system time. (See {@linkplain #releaseOutputBuffer(int, long)
+ * SurfaceView specifics}).</strong>
+ *
+ * @param index The index of a client-owned input buffer previously returned
+ * in a call to {@link #dequeueInputBuffer}.
+ * @param offset The byte offset into the input buffer at which the data starts.
+ * @param size The number of bytes of valid input data.
+ * @param presentationTimeUs The presentation timestamp in microseconds for this
+ * buffer. This is normally the media time at which this
+ * buffer should be presented (rendered). When using an output
+ * surface, this will be propagated as the {@link
+ * SurfaceTexture#getTimestamp timestamp} for the frame (after
+ * conversion to nanoseconds).
+ * @param flags A bitmask of flags
+ * {@link #BUFFER_FLAG_CODEC_CONFIG} and {@link #BUFFER_FLAG_END_OF_STREAM}.
+ * While not prohibited, most codecs do not use the
+ * {@link #BUFFER_FLAG_KEY_FRAME} flag for input buffers.
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ * @throws CryptoException if a crypto object has been specified in
+ * {@link #configure}
+ */
+ public final void queueInputBuffer(
+ int index,
+ int offset, int size, long presentationTimeUs, int flags)
+ throws CryptoException {
+ synchronized(mBufferLock) {
+ invalidateByteBuffer(mCachedInputBuffers, index);
+ mDequeuedInputBuffers.remove(index);
+ }
+ try {
+ native_queueInputBuffer(
+ index, offset, size, presentationTimeUs, flags);
+ } catch (CryptoException | IllegalStateException e) {
+ revalidateByteBuffer(mCachedInputBuffers, index);
+ throw e;
+ }
+ }
+
+ private native final void native_queueInputBuffer(
+ int index,
+ int offset, int size, long presentationTimeUs, int flags)
+ throws CryptoException;
+
+ public static final int CRYPTO_MODE_UNENCRYPTED = 0;
+ public static final int CRYPTO_MODE_AES_CTR = 1;
+ public static final int CRYPTO_MODE_AES_CBC = 2;
+
+ /**
+ * Metadata describing the structure of a (at least partially) encrypted
+ * input sample.
+ * A buffer's data is considered to be partitioned into "subSamples",
+ * each subSample starts with a (potentially empty) run of plain,
+ * unencrypted bytes followed by a (also potentially empty) run of
+ * encrypted bytes. If pattern encryption applies, each of the latter runs
+ * is encrypted only partly, according to a repeating pattern of "encrypt"
+ * and "skip" blocks. numBytesOfClearData can be null to indicate that all
+ * data is encrypted. This information encapsulates per-sample metadata as
+ * outlined in ISO/IEC FDIS 23001-7:2011 "Common encryption in ISO base
+ * media file format files".
+ */
+ public final static class CryptoInfo {
+ /**
+ * The number of subSamples that make up the buffer's contents.
+ */
+ public int numSubSamples;
+ /**
+ * The number of leading unencrypted bytes in each subSample.
+ */
+ public int[] numBytesOfClearData;
+ /**
+ * The number of trailing encrypted bytes in each subSample.
+ */
+ public int[] numBytesOfEncryptedData;
+ /**
+ * A 16-byte key id
+ */
+ public byte[] key;
+ /**
+ * A 16-byte initialization vector
+ */
+ public byte[] iv;
+ /**
+ * The type of encryption that has been applied,
+ * see {@link #CRYPTO_MODE_UNENCRYPTED}, {@link #CRYPTO_MODE_AES_CTR}
+ * and {@link #CRYPTO_MODE_AES_CBC}
+ */
+ public int mode;
+
+ /**
+ * Metadata describing an encryption pattern for the protected bytes in
+ * a subsample. An encryption pattern consists of a repeating sequence
+ * of crypto blocks comprised of a number of encrypted blocks followed
+ * by a number of unencrypted, or skipped, blocks.
+ */
+ public final static class Pattern {
+ /**
+ * Number of blocks to be encrypted in the pattern. If zero, pattern
+ * encryption is inoperative.
+ */
+ private int mEncryptBlocks;
+
+ /**
+ * Number of blocks to be skipped (left clear) in the pattern. If zero,
+ * pattern encryption is inoperative.
+ */
+ private int mSkipBlocks;
+
+ /**
+ * Construct a sample encryption pattern given the number of blocks to
+ * encrypt and skip in the pattern.
+ */
+ public Pattern(int blocksToEncrypt, int blocksToSkip) {
+ set(blocksToEncrypt, blocksToSkip);
+ }
+
+ /**
+ * Set the number of blocks to encrypt and skip in a sample encryption
+ * pattern.
+ */
+ public void set(int blocksToEncrypt, int blocksToSkip) {
+ mEncryptBlocks = blocksToEncrypt;
+ mSkipBlocks = blocksToSkip;
+ }
+
+ /**
+ * Return the number of blocks to skip in a sample encryption pattern.
+ */
+ public int getSkipBlocks() {
+ return mSkipBlocks;
+ }
+
+ /**
+ * Return the number of blocks to encrypt in a sample encryption pattern.
+ */
+ public int getEncryptBlocks() {
+ return mEncryptBlocks;
+ }
+ };
+
+ private final Pattern zeroPattern = new Pattern(0, 0);
+
+ /**
+ * The pattern applicable to the protected data in each subsample.
+ */
+ private Pattern pattern;
+
+ /**
+ * Set the subsample count, clear/encrypted sizes, key, IV and mode fields of
+ * a {@link MediaCodec.CryptoInfo} instance.
+ */
+ public void set(
+ int newNumSubSamples,
+ @NonNull int[] newNumBytesOfClearData,
+ @NonNull int[] newNumBytesOfEncryptedData,
+ @NonNull byte[] newKey,
+ @NonNull byte[] newIV,
+ int newMode) {
+ numSubSamples = newNumSubSamples;
+ numBytesOfClearData = newNumBytesOfClearData;
+ numBytesOfEncryptedData = newNumBytesOfEncryptedData;
+ key = newKey;
+ iv = newIV;
+ mode = newMode;
+ pattern = zeroPattern;
+ }
+
+ /**
+ * Set the encryption pattern on a {@link MediaCodec.CryptoInfo} instance.
+ * See {@link MediaCodec.CryptoInfo.Pattern}.
+ */
+ public void setPattern(Pattern newPattern) {
+ pattern = newPattern;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append(numSubSamples + " subsamples, key [");
+ String hexdigits = "0123456789abcdef";
+ for (int i = 0; i < key.length; i++) {
+ builder.append(hexdigits.charAt((key[i] & 0xf0) >> 4));
+ builder.append(hexdigits.charAt(key[i] & 0x0f));
+ }
+ builder.append("], iv [");
+ for (int i = 0; i < key.length; i++) {
+ builder.append(hexdigits.charAt((iv[i] & 0xf0) >> 4));
+ builder.append(hexdigits.charAt(iv[i] & 0x0f));
+ }
+ builder.append("], clear ");
+ builder.append(Arrays.toString(numBytesOfClearData));
+ builder.append(", encrypted ");
+ builder.append(Arrays.toString(numBytesOfEncryptedData));
+ return builder.toString();
+ }
+ };
+
+ /**
+ * Similar to {@link #queueInputBuffer queueInputBuffer} but submits a buffer that is
+ * potentially encrypted.
+ * <strong>Check out further notes at {@link #queueInputBuffer queueInputBuffer}.</strong>
+ *
+ * @param index The index of a client-owned input buffer previously returned
+ * in a call to {@link #dequeueInputBuffer}.
+ * @param offset The byte offset into the input buffer at which the data starts.
+ * @param info Metadata required to facilitate decryption, the object can be
+ * reused immediately after this call returns.
+ * @param presentationTimeUs The presentation timestamp in microseconds for this
+ * buffer. This is normally the media time at which this
+ * buffer should be presented (rendered).
+ * @param flags A bitmask of flags
+ * {@link #BUFFER_FLAG_CODEC_CONFIG} and {@link #BUFFER_FLAG_END_OF_STREAM}.
+ * While not prohibited, most codecs do not use the
+ * {@link #BUFFER_FLAG_KEY_FRAME} flag for input buffers.
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ * @throws CryptoException if an error occurs while attempting to decrypt the buffer.
+ * An error code associated with the exception helps identify the
+ * reason for the failure.
+ */
+ public final void queueSecureInputBuffer(
+ int index,
+ int offset,
+ @NonNull CryptoInfo info,
+ long presentationTimeUs,
+ int flags) throws CryptoException {
+ synchronized(mBufferLock) {
+ invalidateByteBuffer(mCachedInputBuffers, index);
+ mDequeuedInputBuffers.remove(index);
+ }
+ try {
+ native_queueSecureInputBuffer(
+ index, offset, info, presentationTimeUs, flags);
+ } catch (CryptoException | IllegalStateException e) {
+ revalidateByteBuffer(mCachedInputBuffers, index);
+ throw e;
+ }
+ }
+
+ private native final void native_queueSecureInputBuffer(
+ int index,
+ int offset,
+ @NonNull CryptoInfo info,
+ long presentationTimeUs,
+ int flags) throws CryptoException;
+
+ /**
+ * Returns the index of an input buffer to be filled with valid data
+ * or -1 if no such buffer is currently available.
+ * This method will return immediately if timeoutUs == 0, wait indefinitely
+ * for the availability of an input buffer if timeoutUs &lt; 0 or wait up
+ * to "timeoutUs" microseconds if timeoutUs &gt; 0.
+ * @param timeoutUs The timeout in microseconds, a negative timeout indicates "infinite".
+ * @throws IllegalStateException if not in the Executing state,
+ * or codec is configured in asynchronous mode.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ public final int dequeueInputBuffer(long timeoutUs) {
+ int res = native_dequeueInputBuffer(timeoutUs);
+ if (res >= 0) {
+ synchronized(mBufferLock) {
+ validateInputByteBuffer(mCachedInputBuffers, res);
+ }
+ }
+ return res;
+ }
+
+ private native final int native_dequeueInputBuffer(long timeoutUs);
+
+ /**
+ * If a non-negative timeout had been specified in the call
+ * to {@link #dequeueOutputBuffer}, indicates that the call timed out.
+ */
+ public static final int INFO_TRY_AGAIN_LATER = -1;
+
+ /**
+ * The output format has changed, subsequent data will follow the new
+ * format. {@link #getOutputFormat()} returns the new format. Note, that
+ * you can also use the new {@link #getOutputFormat(int)} method to
+ * get the format for a specific output buffer. This frees you from
+ * having to track output format changes.
+ */
+ public static final int INFO_OUTPUT_FORMAT_CHANGED = -2;
+
+ /**
+ * The output buffers have changed, the client must refer to the new
+ * set of output buffers returned by {@link #getOutputBuffers} from
+ * this point on.
+ *
+ * <p>Additionally, this event signals that the video scaling mode
+ * may have been reset to the default.</p>
+ *
+ * @deprecated This return value can be ignored as {@link
+ * #getOutputBuffers} has been deprecated. Client should
+ * request a current buffer using on of the get-buffer or
+ * get-image methods each time one has been dequeued.
+ */
+ public static final int INFO_OUTPUT_BUFFERS_CHANGED = -3;
+
+ /** @hide */
+ @IntDef({
+ INFO_TRY_AGAIN_LATER,
+ INFO_OUTPUT_FORMAT_CHANGED,
+ INFO_OUTPUT_BUFFERS_CHANGED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface OutputBufferInfo {}
+
+ /**
+ * Dequeue an output buffer, block at most "timeoutUs" microseconds.
+ * Returns the index of an output buffer that has been successfully
+ * decoded or one of the INFO_* constants.
+ * @param info Will be filled with buffer meta data.
+ * @param timeoutUs The timeout in microseconds, a negative timeout indicates "infinite".
+ * @throws IllegalStateException if not in the Executing state,
+ * or codec is configured in asynchronous mode.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @OutputBufferInfo
+ public final int dequeueOutputBuffer(
+ @NonNull BufferInfo info, long timeoutUs) {
+ int res = native_dequeueOutputBuffer(info, timeoutUs);
+ synchronized(mBufferLock) {
+ if (res == INFO_OUTPUT_BUFFERS_CHANGED) {
+ cacheBuffers(false /* input */);
+ } else if (res >= 0) {
+ validateOutputByteBuffer(mCachedOutputBuffers, res, info);
+ if (mHasSurface) {
+ mDequeuedOutputInfos.put(res, info.dup());
+ }
+ }
+ }
+ return res;
+ }
+
+ private native final int native_dequeueOutputBuffer(
+ @NonNull BufferInfo info, long timeoutUs);
+
+ /**
+ * If you are done with a buffer, use this call to return the buffer to the codec
+ * or to render it on the output surface. If you configured the codec with an
+ * output surface, setting {@code render} to {@code true} will first send the buffer
+ * to that output surface. The surface will release the buffer back to the codec once
+ * it is no longer used/displayed.
+ *
+ * Once an output buffer is released to the codec, it MUST NOT
+ * be used until it is later retrieved by {@link #getOutputBuffer} in response
+ * to a {@link #dequeueOutputBuffer} return value or a
+ * {@link Callback#onOutputBufferAvailable} callback.
+ *
+ * @param index The index of a client-owned output buffer previously returned
+ * from a call to {@link #dequeueOutputBuffer}.
+ * @param render If a valid surface was specified when configuring the codec,
+ * passing true renders this output buffer to the surface.
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ public final void releaseOutputBuffer(int index, boolean render) {
+ BufferInfo info = null;
+ synchronized(mBufferLock) {
+ invalidateByteBuffer(mCachedOutputBuffers, index);
+ mDequeuedOutputBuffers.remove(index);
+ if (mHasSurface) {
+ info = mDequeuedOutputInfos.remove(index);
+ }
+ }
+ releaseOutputBuffer(index, render, false /* updatePTS */, 0 /* dummy */);
+ }
+
+ /**
+ * If you are done with a buffer, use this call to update its surface timestamp
+ * and return it to the codec to render it on the output surface. If you
+ * have not specified an output surface when configuring this video codec,
+ * this call will simply return the buffer to the codec.<p>
+ *
+ * The timestamp may have special meaning depending on the destination surface.
+ *
+ * <table>
+ * <tr><th>SurfaceView specifics</th></tr>
+ * <tr><td>
+ * If you render your buffer on a {@link android.view.SurfaceView},
+ * you can use the timestamp to render the buffer at a specific time (at the
+ * VSYNC at or after the buffer timestamp). For this to work, the timestamp
+ * needs to be <i>reasonably close</i> to the current {@link System#nanoTime}.
+ * Currently, this is set as within one (1) second. A few notes:
+ *
+ * <ul>
+ * <li>the buffer will not be returned to the codec until the timestamp
+ * has passed and the buffer is no longer used by the {@link android.view.Surface}.
+ * <li>buffers are processed sequentially, so you may block subsequent buffers to
+ * be displayed on the {@link android.view.Surface}. This is important if you
+ * want to react to user action, e.g. stop the video or seek.
+ * <li>if multiple buffers are sent to the {@link android.view.Surface} to be
+ * rendered at the same VSYNC, the last one will be shown, and the other ones
+ * will be dropped.
+ * <li>if the timestamp is <em>not</em> "reasonably close" to the current system
+ * time, the {@link android.view.Surface} will ignore the timestamp, and
+ * display the buffer at the earliest feasible time. In this mode it will not
+ * drop frames.
+ * <li>for best performance and quality, call this method when you are about
+ * two VSYNCs' time before the desired render time. For 60Hz displays, this is
+ * about 33 msec.
+ * </ul>
+ * </td></tr>
+ * </table>
+ *
+ * Once an output buffer is released to the codec, it MUST NOT
+ * be used until it is later retrieved by {@link #getOutputBuffer} in response
+ * to a {@link #dequeueOutputBuffer} return value or a
+ * {@link Callback#onOutputBufferAvailable} callback.
+ *
+ * @param index The index of a client-owned output buffer previously returned
+ * from a call to {@link #dequeueOutputBuffer}.
+ * @param renderTimestampNs The timestamp to associate with this buffer when
+ * it is sent to the Surface.
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ public final void releaseOutputBuffer(int index, long renderTimestampNs) {
+ BufferInfo info = null;
+ synchronized(mBufferLock) {
+ invalidateByteBuffer(mCachedOutputBuffers, index);
+ mDequeuedOutputBuffers.remove(index);
+ if (mHasSurface) {
+ info = mDequeuedOutputInfos.remove(index);
+ }
+ }
+ releaseOutputBuffer(
+ index, true /* render */, true /* updatePTS */, renderTimestampNs);
+ }
+
+ private native final void releaseOutputBuffer(
+ int index, boolean render, boolean updatePTS, long timeNs);
+
+ /**
+ * Signals end-of-stream on input. Equivalent to submitting an empty buffer with
+ * {@link #BUFFER_FLAG_END_OF_STREAM} set. This may only be used with
+ * encoders receiving input from a Surface created by {@link #createInputSurface}.
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ public native final void signalEndOfInputStream();
+
+ /**
+ * Call this after dequeueOutputBuffer signals a format change by returning
+ * {@link #INFO_OUTPUT_FORMAT_CHANGED}.
+ * You can also call this after {@link #configure} returns
+ * successfully to get the output format initially configured
+ * for the codec. Do this to determine what optional
+ * configuration parameters were supported by the codec.
+ *
+ * @throws IllegalStateException if not in the Executing or
+ * Configured state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @NonNull
+ public final MediaFormat getOutputFormat() {
+ return new MediaFormat(getFormatNative(false /* input */));
+ }
+
+ /**
+ * Call this after {@link #configure} returns successfully to
+ * get the input format accepted by the codec. Do this to
+ * determine what optional configuration parameters were
+ * supported by the codec.
+ *
+ * @throws IllegalStateException if not in the Executing or
+ * Configured state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @NonNull
+ public final MediaFormat getInputFormat() {
+ return new MediaFormat(getFormatNative(true /* input */));
+ }
+
+ /**
+ * Returns the output format for a specific output buffer.
+ *
+ * @param index The index of a client-owned input buffer previously
+ * returned from a call to {@link #dequeueInputBuffer}.
+ *
+ * @return the format for the output buffer, or null if the index
+ * is not a dequeued output buffer.
+ */
+ @NonNull
+ public final MediaFormat getOutputFormat(int index) {
+ return new MediaFormat(getOutputFormatNative(index));
+ }
+
+ @NonNull
+ private native final Map<String, Object> getFormatNative(boolean input);
+
+ @NonNull
+ private native final Map<String, Object> getOutputFormatNative(int index);
+
+ // used to track dequeued buffers
+ private static class BufferMap {
+ // various returned representations of the codec buffer
+ private static class CodecBuffer {
+ private Image mImage;
+ private ByteBuffer mByteBuffer;
+
+ public void free() {
+ if (mByteBuffer != null) {
+ // all of our ByteBuffers are direct
+ java.nio.NioUtils.freeDirectBuffer(mByteBuffer);
+ mByteBuffer = null;
+ }
+ if (mImage != null) {
+ mImage.close();
+ mImage = null;
+ }
+ }
+
+ public void setImage(@Nullable Image image) {
+ free();
+ mImage = image;
+ }
+
+ public void setByteBuffer(@Nullable ByteBuffer buffer) {
+ free();
+ mByteBuffer = buffer;
+ }
+ }
+
+ private final Map<Integer, CodecBuffer> mMap =
+ new HashMap<Integer, CodecBuffer>();
+
+ public void remove(int index) {
+ CodecBuffer buffer = mMap.get(index);
+ if (buffer != null) {
+ buffer.free();
+ mMap.remove(index);
+ }
+ }
+
+ public void put(int index, @Nullable ByteBuffer newBuffer) {
+ CodecBuffer buffer = mMap.get(index);
+ if (buffer == null) { // likely
+ buffer = new CodecBuffer();
+ mMap.put(index, buffer);
+ }
+ buffer.setByteBuffer(newBuffer);
+ }
+
+ public void put(int index, @Nullable Image newImage) {
+ CodecBuffer buffer = mMap.get(index);
+ if (buffer == null) { // likely
+ buffer = new CodecBuffer();
+ mMap.put(index, buffer);
+ }
+ buffer.setImage(newImage);
+ }
+
+ public void clear() {
+ for (CodecBuffer buffer: mMap.values()) {
+ buffer.free();
+ }
+ mMap.clear();
+ }
+ }
+
+ private ByteBuffer[] mCachedInputBuffers;
+ private ByteBuffer[] mCachedOutputBuffers;
+ private final BufferMap mDequeuedInputBuffers = new BufferMap();
+ private final BufferMap mDequeuedOutputBuffers = new BufferMap();
+ private final Map<Integer, BufferInfo> mDequeuedOutputInfos =
+ new HashMap<Integer, BufferInfo>();
+ final private Object mBufferLock;
+
+ private final void invalidateByteBuffer(
+ @Nullable ByteBuffer[] buffers, int index) {
+ if (buffers != null && index >= 0 && index < buffers.length) {
+ ByteBuffer buffer = buffers[index];
+ if (buffer != null) {
+ buffer.setAccessible(false);
+ }
+ }
+ }
+
+ private final void validateInputByteBuffer(
+ @Nullable ByteBuffer[] buffers, int index) {
+ if (buffers != null && index >= 0 && index < buffers.length) {
+ ByteBuffer buffer = buffers[index];
+ if (buffer != null) {
+ buffer.setAccessible(true);
+ buffer.clear();
+ }
+ }
+ }
+
+ private final void revalidateByteBuffer(
+ @Nullable ByteBuffer[] buffers, int index) {
+ synchronized(mBufferLock) {
+ if (buffers != null && index >= 0 && index < buffers.length) {
+ ByteBuffer buffer = buffers[index];
+ if (buffer != null) {
+ buffer.setAccessible(true);
+ }
+ }
+ }
+ }
+
+ private final void validateOutputByteBuffer(
+ @Nullable ByteBuffer[] buffers, int index, @NonNull BufferInfo info) {
+ if (buffers != null && index >= 0 && index < buffers.length) {
+ ByteBuffer buffer = buffers[index];
+ if (buffer != null) {
+ buffer.setAccessible(true);
+ buffer.limit(info.offset + info.size).position(info.offset);
+ }
+ }
+ }
+
+ private final void invalidateByteBuffers(@Nullable ByteBuffer[] buffers) {
+ if (buffers != null) {
+ for (ByteBuffer buffer: buffers) {
+ if (buffer != null) {
+ buffer.setAccessible(false);
+ }
+ }
+ }
+ }
+
+ private final void freeByteBuffer(@Nullable ByteBuffer buffer) {
+ if (buffer != null /* && buffer.isDirect() */) {
+ // all of our ByteBuffers are direct
+ java.nio.NioUtils.freeDirectBuffer(buffer);
+ }
+ }
+
+ private final void freeByteBuffers(@Nullable ByteBuffer[] buffers) {
+ if (buffers != null) {
+ for (ByteBuffer buffer: buffers) {
+ freeByteBuffer(buffer);
+ }
+ }
+ }
+
+ private final void freeAllTrackedBuffers() {
+ synchronized(mBufferLock) {
+ freeByteBuffers(mCachedInputBuffers);
+ freeByteBuffers(mCachedOutputBuffers);
+ mCachedInputBuffers = null;
+ mCachedOutputBuffers = null;
+ mDequeuedInputBuffers.clear();
+ mDequeuedOutputBuffers.clear();
+ }
+ }
+
+ private final void cacheBuffers(boolean input) {
+ ByteBuffer[] buffers = null;
+ try {
+ buffers = getBuffers(input);
+ invalidateByteBuffers(buffers);
+ } catch (IllegalStateException e) {
+ // we don't get buffers in async mode
+ }
+ if (input) {
+ mCachedInputBuffers = buffers;
+ } else {
+ mCachedOutputBuffers = buffers;
+ }
+ }
+
+ /**
+ * Retrieve the set of input buffers. Call this after start()
+ * returns. After calling this method, any ByteBuffers
+ * previously returned by an earlier call to this method MUST no
+ * longer be used.
+ *
+ * @deprecated Use the new {@link #getInputBuffer} method instead
+ * each time an input buffer is dequeued.
+ *
+ * <b>Note:</b> As of API 21, dequeued input buffers are
+ * automatically {@link java.nio.Buffer#clear cleared}.
+ *
+ * <em>Do not use this method if using an input surface.</em>
+ *
+ * @throws IllegalStateException if not in the Executing state,
+ * or codec is configured in asynchronous mode.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @NonNull
+ public ByteBuffer[] getInputBuffers() {
+ if (mCachedInputBuffers == null) {
+ throw new IllegalStateException();
+ }
+ // FIXME: check codec status
+ return mCachedInputBuffers;
+ }
+
+ /**
+ * Retrieve the set of output buffers. Call this after start()
+ * returns and whenever dequeueOutputBuffer signals an output
+ * buffer change by returning {@link
+ * #INFO_OUTPUT_BUFFERS_CHANGED}. After calling this method, any
+ * ByteBuffers previously returned by an earlier call to this
+ * method MUST no longer be used.
+ *
+ * @deprecated Use the new {@link #getOutputBuffer} method instead
+ * each time an output buffer is dequeued. This method is not
+ * supported if codec is configured in asynchronous mode.
+ *
+ * <b>Note:</b> As of API 21, the position and limit of output
+ * buffers that are dequeued will be set to the valid data
+ * range.
+ *
+ * <em>Do not use this method if using an output surface.</em>
+ *
+ * @throws IllegalStateException if not in the Executing state,
+ * or codec is configured in asynchronous mode.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @NonNull
+ public ByteBuffer[] getOutputBuffers() {
+ if (mCachedOutputBuffers == null) {
+ throw new IllegalStateException();
+ }
+ // FIXME: check codec status
+ return mCachedOutputBuffers;
+ }
+
+ /**
+ * Returns a {@link java.nio.Buffer#clear cleared}, writable ByteBuffer
+ * object for a dequeued input buffer index to contain the input data.
+ *
+ * After calling this method any ByteBuffer or Image object
+ * previously returned for the same input index MUST no longer
+ * be used.
+ *
+ * @param index The index of a client-owned input buffer previously
+ * returned from a call to {@link #dequeueInputBuffer},
+ * or received via an onInputBufferAvailable callback.
+ *
+ * @return the input buffer, or null if the index is not a dequeued
+ * input buffer, or if the codec is configured for surface input.
+ *
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @Nullable
+ public ByteBuffer getInputBuffer(int index) {
+ ByteBuffer newBuffer = getBuffer(true /* input */, index);
+ synchronized(mBufferLock) {
+ invalidateByteBuffer(mCachedInputBuffers, index);
+ mDequeuedInputBuffers.put(index, newBuffer);
+ }
+ return newBuffer;
+ }
+
+ /**
+ * Returns a writable Image object for a dequeued input buffer
+ * index to contain the raw input video frame.
+ *
+ * After calling this method any ByteBuffer or Image object
+ * previously returned for the same input index MUST no longer
+ * be used.
+ *
+ * @param index The index of a client-owned input buffer previously
+ * returned from a call to {@link #dequeueInputBuffer},
+ * or received via an onInputBufferAvailable callback.
+ *
+ * @return the input image, or null if the index is not a
+ * dequeued input buffer, or not a ByteBuffer that contains a
+ * raw image.
+ *
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @Nullable
+ public Image getInputImage(int index) {
+ Image newImage = getImage(true /* input */, index);
+ synchronized(mBufferLock) {
+ invalidateByteBuffer(mCachedInputBuffers, index);
+ mDequeuedInputBuffers.put(index, newImage);
+ }
+ return newImage;
+ }
+
+ /**
+ * Returns a read-only ByteBuffer for a dequeued output buffer
+ * index. The position and limit of the returned buffer are set
+ * to the valid output data.
+ *
+ * After calling this method, any ByteBuffer or Image object
+ * previously returned for the same output index MUST no longer
+ * be used.
+ *
+ * @param index The index of a client-owned output buffer previously
+ * returned from a call to {@link #dequeueOutputBuffer},
+ * or received via an onOutputBufferAvailable callback.
+ *
+ * @return the output buffer, or null if the index is not a dequeued
+ * output buffer, or the codec is configured with an output surface.
+ *
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @Nullable
+ public ByteBuffer getOutputBuffer(int index) {
+ ByteBuffer newBuffer = getBuffer(false /* input */, index);
+ synchronized(mBufferLock) {
+ invalidateByteBuffer(mCachedOutputBuffers, index);
+ mDequeuedOutputBuffers.put(index, newBuffer);
+ }
+ return newBuffer;
+ }
+
+ /**
+ * Returns a read-only Image object for a dequeued output buffer
+ * index that contains the raw video frame.
+ *
+ * After calling this method, any ByteBuffer or Image object previously
+ * returned for the same output index MUST no longer be used.
+ *
+ * @param index The index of a client-owned output buffer previously
+ * returned from a call to {@link #dequeueOutputBuffer},
+ * or received via an onOutputBufferAvailable callback.
+ *
+ * @return the output image, or null if the index is not a
+ * dequeued output buffer, not a raw video frame, or if the codec
+ * was configured with an output surface.
+ *
+ * @throws IllegalStateException if not in the Executing state.
+ * @throws MediaCodec.CodecException upon codec error.
+ */
+ @Nullable
+ public Image getOutputImage(int index) {
+ Image newImage = getImage(false /* input */, index);
+ synchronized(mBufferLock) {
+ invalidateByteBuffer(mCachedOutputBuffers, index);
+ mDequeuedOutputBuffers.put(index, newImage);
+ }
+ return newImage;
+ }
+
+ /**
+ * The content is scaled to the surface dimensions
+ */
+ public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT = 1;
+
+ /**
+ * The content is scaled, maintaining its aspect ratio, the whole
+ * surface area is used, content may be cropped.
+ * <p class=note>
+ * This mode is only suitable for content with 1:1 pixel aspect ratio as you cannot
+ * configure the pixel aspect ratio for a {@link Surface}.
+ * <p class=note>
+ * As of {@link android.os.Build.VERSION_CODES#N} release, this mode may not work if
+ * the video is {@linkplain MediaFormat#KEY_ROTATION rotated} by 90 or 270 degrees.
+ */
+ public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
+
+ /** @hide */
+ @IntDef({
+ VIDEO_SCALING_MODE_SCALE_TO_FIT,
+ VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface VideoScalingMode {}
+
+ /**
+ * If a surface has been specified in a previous call to {@link #configure}
+ * specifies the scaling mode to use. The default is "scale to fit".
+ * <p class=note>
+ * The scaling mode may be reset to the <strong>default</strong> each time an
+ * {@link #INFO_OUTPUT_BUFFERS_CHANGED} event is received from the codec; therefore, the client
+ * must call this method after every buffer change event (and before the first output buffer is
+ * released for rendering) to ensure consistent scaling mode.
+ * <p class=note>
+ * Since the {@link #INFO_OUTPUT_BUFFERS_CHANGED} event is deprecated, this can also be done
+ * after each {@link #INFO_OUTPUT_FORMAT_CHANGED} event.
+ *
+ * @throws IllegalArgumentException if mode is not recognized.
+ * @throws IllegalStateException if in the Released state.
+ */
+ public native final void setVideoScalingMode(@VideoScalingMode int mode);
+
+ /**
+ * Get the component name. If the codec was created by createDecoderByType
+ * or createEncoderByType, what component is chosen is not known beforehand.
+ * @throws IllegalStateException if in the Released state.
+ */
+ @NonNull
+ public native final String getName();
+
+ /**
+ * Return Metrics data about the current codec instance.
+ *
+ * @return a {@link PersistableBundle} containing the set of attributes and values
+ * available for the media being handled by this instance of MediaCodec
+ * The attributes are descibed in {@link MetricsConstants}.
+ *
+ * Additional vendor-specific fields may also be present in
+ * the return value.
+ */
+ public PersistableBundle getMetrics() {
+ PersistableBundle bundle = native_getMetrics();
+ return bundle;
+ }
+
+ private native PersistableBundle native_getMetrics();
+
+ /**
+ * Change a video encoder's target bitrate on the fly. The value is an
+ * Integer object containing the new bitrate in bps.
+ */
+ public static final String PARAMETER_KEY_VIDEO_BITRATE = "video-bitrate";
+
+ /**
+ * Temporarily suspend/resume encoding of input data. While suspended
+ * input data is effectively discarded instead of being fed into the
+ * encoder. This parameter really only makes sense to use with an encoder
+ * in "surface-input" mode, as the client code has no control over the
+ * input-side of the encoder in that case.
+ * The value is an Integer object containing the value 1 to suspend
+ * or the value 0 to resume.
+ */
+ public static final String PARAMETER_KEY_SUSPEND = "drop-input-frames";
+
+ /**
+ * Request that the encoder produce a sync frame "soon".
+ * Provide an Integer with the value 0.
+ */
+ public static final String PARAMETER_KEY_REQUEST_SYNC_FRAME = "request-sync";
+
+ /**
+ * Communicate additional parameter changes to the component instance.
+ * <b>Note:</b> Some of these parameter changes may silently fail to apply.
+ *
+ * @param params The bundle of parameters to set.
+ * @throws IllegalStateException if in the Released state.
+ */
+ public final void setParameters(@Nullable Bundle params) {
+ if (params == null) {
+ return;
+ }
+
+ String[] keys = new String[params.size()];
+ Object[] values = new Object[params.size()];
+
+ int i = 0;
+ for (final String key: params.keySet()) {
+ keys[i] = key;
+ values[i] = params.get(key);
+ ++i;
+ }
+
+ setParameters(keys, values);
+ }
+
+ /**
+ * Sets an asynchronous callback for actionable MediaCodec events.
+ *
+ * If the client intends to use the component in asynchronous mode,
+ * a valid callback should be provided before {@link #configure} is called.
+ *
+ * When asynchronous callback is enabled, the client should not call
+ * {@link #getInputBuffers}, {@link #getOutputBuffers},
+ * {@link #dequeueInputBuffer(long)} or {@link #dequeueOutputBuffer(BufferInfo, long)}.
+ * <p>
+ * Also, {@link #flush} behaves differently in asynchronous mode. After calling
+ * {@code flush}, you must call {@link #start} to "resume" receiving input buffers,
+ * even if an input surface was created.
+ *
+ * @param cb The callback that will run. Use {@code null} to clear a previously
+ * set callback (before {@link #configure configure} is called and run
+ * in synchronous mode).
+ * @param handler Callbacks will happen on the handler's thread. If {@code null},
+ * callbacks are done on the default thread (the caller's thread or the
+ * main thread.)
+ */
+ public void setCallback(@Nullable /* MediaCodec. */ Callback cb, @Nullable Handler handler) {
+ if (cb != null) {
+ synchronized (mListenerLock) {
+ EventHandler newHandler = getEventHandlerOn(handler, mCallbackHandler);
+ // NOTE: there are no callbacks on the handler at this time, but check anyways
+ // even if we were to extend this to be callable dynamically, it must
+ // be called when codec is flushed, so no messages are pending.
+ if (newHandler != mCallbackHandler) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ mCallbackHandler = newHandler;
+ }
+ }
+ } else if (mCallbackHandler != null) {
+ mCallbackHandler.removeMessages(EVENT_SET_CALLBACK);
+ mCallbackHandler.removeMessages(EVENT_CALLBACK);
+ }
+
+ if (mCallbackHandler != null) {
+ // set java callback on main handler
+ Message msg = mCallbackHandler.obtainMessage(EVENT_SET_CALLBACK, 0, 0, cb);
+ mCallbackHandler.sendMessage(msg);
+
+ // set native handler here, don't post to handler because
+ // it may cause the callback to be delayed and set in a wrong state.
+ // Note that native codec may start sending events to the callback
+ // handler after this returns.
+ native_setCallback(cb);
+ }
+ }
+
+ /**
+ * Sets an asynchronous callback for actionable MediaCodec events on the default
+ * looper.
+ * <p>
+ * Same as {@link #setCallback(Callback, Handler)} with handler set to null.
+ * @param cb The callback that will run. Use {@code null} to clear a previously
+ * set callback (before {@link #configure configure} is called and run
+ * in synchronous mode).
+ * @see #setCallback(Callback, Handler)
+ */
+ public void setCallback(@Nullable /* MediaCodec. */ Callback cb) {
+ setCallback(cb, null /* handler */);
+ }
+
+ /**
+ * Listener to be called when an output frame has rendered on the output surface
+ *
+ * @see MediaCodec#setOnFrameRenderedListener
+ */
+ public interface OnFrameRenderedListener {
+
+ /**
+ * Called when an output frame has rendered on the output surface.
+ * <p>
+ * <strong>Note:</strong> This callback is for informational purposes only: to get precise
+ * render timing samples, and can be significantly delayed and batched. Some frames may have
+ * been rendered even if there was no callback generated.
+ *
+ * @param codec the MediaCodec instance
+ * @param presentationTimeUs the presentation time (media time) of the frame rendered.
+ * This is usually the same as specified in {@link #queueInputBuffer}; however,
+ * some codecs may alter the media time by applying some time-based transformation,
+ * such as frame rate conversion. In that case, presentation time corresponds
+ * to the actual output frame rendered.
+ * @param nanoTime The system time when the frame was rendered.
+ *
+ * @see System#nanoTime
+ */
+ public void onFrameRendered(
+ @NonNull MediaCodec codec, long presentationTimeUs, long nanoTime);
+ }
+
+ /**
+ * Registers a callback to be invoked when an output frame is rendered on the output surface.
+ * <p>
+ * This method can be called in any codec state, but will only have an effect in the
+ * Executing state for codecs that render buffers to the output surface.
+ * <p>
+ * <strong>Note:</strong> This callback is for informational purposes only: to get precise
+ * render timing samples, and can be significantly delayed and batched. Some frames may have
+ * been rendered even if there was no callback generated.
+ *
+ * @param listener the callback that will be run
+ * @param handler the callback will be run on the handler's thread. If {@code null},
+ * the callback will be run on the default thread, which is the looper
+ * from which the codec was created, or a new thread if there was none.
+ */
+ public void setOnFrameRenderedListener(
+ @Nullable OnFrameRenderedListener listener, @Nullable Handler handler) {
+ synchronized (mListenerLock) {
+ mOnFrameRenderedListener = listener;
+ if (listener != null) {
+ EventHandler newHandler = getEventHandlerOn(handler, mOnFrameRenderedHandler);
+ if (newHandler != mOnFrameRenderedHandler) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
+ mOnFrameRenderedHandler = newHandler;
+ } else if (mOnFrameRenderedHandler != null) {
+ mOnFrameRenderedHandler.removeMessages(EVENT_FRAME_RENDERED);
+ }
+ native_enableOnFrameRenderedListener(listener != null);
+ }
+ }
+
+ private native void native_enableOnFrameRenderedListener(boolean enable);
+
+ private EventHandler getEventHandlerOn(
+ @Nullable Handler handler, @NonNull EventHandler lastHandler) {
+ if (handler == null) {
+ return mEventHandler;
+ } else {
+ Looper looper = handler.getLooper();
+ if (lastHandler.getLooper() == looper) {
+ return lastHandler;
+ } else {
+ return new EventHandler(this, looper);
+ }
+ }
+ }
+
+ /**
+ * MediaCodec callback interface. Used to notify the user asynchronously
+ * of various MediaCodec events.
+ */
+ public static abstract class Callback {
+ /**
+ * Called when an input buffer becomes available.
+ *
+ * @param codec The MediaCodec object.
+ * @param index The index of the available input buffer.
+ */
+ public abstract void onInputBufferAvailable(@NonNull MediaCodec codec, int index);
+
+ /**
+ * Called when an output buffer becomes available.
+ *
+ * @param codec The MediaCodec object.
+ * @param index The index of the available output buffer.
+ * @param info Info regarding the available output buffer {@link MediaCodec.BufferInfo}.
+ */
+ public abstract void onOutputBufferAvailable(
+ @NonNull MediaCodec codec, int index, @NonNull BufferInfo info);
+
+ /**
+ * Called when the MediaCodec encountered an error
+ *
+ * @param codec The MediaCodec object.
+ * @param e The {@link MediaCodec.CodecException} object describing the error.
+ */
+ public abstract void onError(@NonNull MediaCodec codec, @NonNull CodecException e);
+
+ /**
+ * Called when the output format has changed
+ *
+ * @param codec The MediaCodec object.
+ * @param format The new output format.
+ */
+ public abstract void onOutputFormatChanged(
+ @NonNull MediaCodec codec, @NonNull MediaFormat format);
+ }
+
+ private void postEventFromNative(
+ int what, int arg1, int arg2, @Nullable Object obj) {
+ synchronized (mListenerLock) {
+ EventHandler handler = mEventHandler;
+ if (what == EVENT_CALLBACK) {
+ handler = mCallbackHandler;
+ } else if (what == EVENT_FRAME_RENDERED) {
+ handler = mOnFrameRenderedHandler;
+ }
+ if (handler != null) {
+ Message msg = handler.obtainMessage(what, arg1, arg2, obj);
+ handler.sendMessage(msg);
+ }
+ }
+ }
+
+ private native final void setParameters(@NonNull String[] keys, @NonNull Object[] values);
+
+ /**
+ * Get the codec info. If the codec was created by createDecoderByType
+ * or createEncoderByType, what component is chosen is not known beforehand,
+ * and thus the caller does not have the MediaCodecInfo.
+ * @throws IllegalStateException if in the Released state.
+ */
+ @NonNull
+ public MediaCodecInfo getCodecInfo() {
+ return MediaCodecList.getInfoFor(getName());
+ }
+
+ @NonNull
+ private native final ByteBuffer[] getBuffers(boolean input);
+
+ @Nullable
+ private native final ByteBuffer getBuffer(boolean input, int index);
+
+ @Nullable
+ private native final Image getImage(boolean input, int index);
+
+ private static native final void native_init();
+
+ private native final void native_setup(
+ @NonNull String name, boolean nameIsType, boolean encoder);
+
+ private native final void native_finalize();
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private long mNativeContext;
+
+ /** @hide */
+ public static class MediaImage extends Image {
+ private final boolean mIsReadOnly;
+ private final int mWidth;
+ private final int mHeight;
+ private final int mFormat;
+ private long mTimestamp;
+ private final Plane[] mPlanes;
+ private final ByteBuffer mBuffer;
+ private final ByteBuffer mInfo;
+ private final int mXOffset;
+ private final int mYOffset;
+
+ private final static int TYPE_YUV = 1;
+
+ @Override
+ public int getFormat() {
+ throwISEIfImageIsInvalid();
+ return mFormat;
+ }
+
+ @Override
+ public int getHeight() {
+ throwISEIfImageIsInvalid();
+ return mHeight;
+ }
+
+ @Override
+ public int getWidth() {
+ throwISEIfImageIsInvalid();
+ return mWidth;
+ }
+
+ @Override
+ public long getTimestamp() {
+ throwISEIfImageIsInvalid();
+ return mTimestamp;
+ }
+
+ @Override
+ @NonNull
+ public Plane[] getPlanes() {
+ throwISEIfImageIsInvalid();
+ return Arrays.copyOf(mPlanes, mPlanes.length);
+ }
+
+ @Override
+ public void close() {
+ if (mIsImageValid) {
+ java.nio.NioUtils.freeDirectBuffer(mBuffer);
+ mIsImageValid = false;
+ }
+ }
+
+ /**
+ * Set the crop rectangle associated with this frame.
+ * <p>
+ * The crop rectangle specifies the region of valid pixels in the image,
+ * using coordinates in the largest-resolution plane.
+ */
+ @Override
+ public void setCropRect(@Nullable Rect cropRect) {
+ if (mIsReadOnly) {
+ throw new ReadOnlyBufferException();
+ }
+ super.setCropRect(cropRect);
+ }
+
+
+ public MediaImage(
+ @NonNull ByteBuffer buffer, @NonNull ByteBuffer info, boolean readOnly,
+ long timestamp, int xOffset, int yOffset, @Nullable Rect cropRect) {
+ mFormat = ImageFormat.YUV_420_888;
+ mTimestamp = timestamp;
+ mIsImageValid = true;
+ mIsReadOnly = buffer.isReadOnly();
+ mBuffer = buffer.duplicate();
+
+ // save offsets and info
+ mXOffset = xOffset;
+ mYOffset = yOffset;
+ mInfo = info;
+
+ // read media-info. See MediaImage2
+ if (info.remaining() == 104) {
+ int type = info.getInt();
+ if (type != TYPE_YUV) {
+ throw new UnsupportedOperationException("unsupported type: " + type);
+ }
+ int numPlanes = info.getInt();
+ if (numPlanes != 3) {
+ throw new RuntimeException("unexpected number of planes: " + numPlanes);
+ }
+ mWidth = info.getInt();
+ mHeight = info.getInt();
+ if (mWidth < 1 || mHeight < 1) {
+ throw new UnsupportedOperationException(
+ "unsupported size: " + mWidth + "x" + mHeight);
+ }
+ int bitDepth = info.getInt();
+ if (bitDepth != 8) {
+ throw new UnsupportedOperationException("unsupported bit depth: " + bitDepth);
+ }
+ int bitDepthAllocated = info.getInt();
+ if (bitDepthAllocated != 8) {
+ throw new UnsupportedOperationException(
+ "unsupported allocated bit depth: " + bitDepthAllocated);
+ }
+ mPlanes = new MediaPlane[numPlanes];
+ for (int ix = 0; ix < numPlanes; ix++) {
+ int planeOffset = info.getInt();
+ int colInc = info.getInt();
+ int rowInc = info.getInt();
+ int horiz = info.getInt();
+ int vert = info.getInt();
+ if (horiz != vert || horiz != (ix == 0 ? 1 : 2)) {
+ throw new UnsupportedOperationException("unexpected subsampling: "
+ + horiz + "x" + vert + " on plane " + ix);
+ }
+ if (colInc < 1 || rowInc < 1) {
+ throw new UnsupportedOperationException("unexpected strides: "
+ + colInc + " pixel, " + rowInc + " row on plane " + ix);
+ }
+
+ buffer.clear();
+ buffer.position(mBuffer.position() + planeOffset
+ + (xOffset / horiz) * colInc + (yOffset / vert) * rowInc);
+ buffer.limit(buffer.position() + Utils.divUp(bitDepth, 8)
+ + (mHeight / vert - 1) * rowInc + (mWidth / horiz - 1) * colInc);
+ mPlanes[ix] = new MediaPlane(buffer.slice(), rowInc, colInc);
+ }
+ } else {
+ throw new UnsupportedOperationException(
+ "unsupported info length: " + info.remaining());
+ }
+
+ if (cropRect == null) {
+ cropRect = new Rect(0, 0, mWidth, mHeight);
+ }
+ cropRect.offset(-xOffset, -yOffset);
+ super.setCropRect(cropRect);
+ }
+
+ private class MediaPlane extends Plane {
+ public MediaPlane(@NonNull ByteBuffer buffer, int rowInc, int colInc) {
+ mData = buffer;
+ mRowInc = rowInc;
+ mColInc = colInc;
+ }
+
+ @Override
+ public int getRowStride() {
+ throwISEIfImageIsInvalid();
+ return mRowInc;
+ }
+
+ @Override
+ public int getPixelStride() {
+ throwISEIfImageIsInvalid();
+ return mColInc;
+ }
+
+ @Override
+ @NonNull
+ public ByteBuffer getBuffer() {
+ throwISEIfImageIsInvalid();
+ return mData;
+ }
+
+ private final int mRowInc;
+ private final int mColInc;
+ private final ByteBuffer mData;
+ }
+ }
+
+ public final static class MetricsConstants
+ {
+ private MetricsConstants() {}
+
+ /**
+ * Key to extract the codec being used
+ * from the {@link MediaCodec#getMetrics} return value.
+ * The value is a String.
+ */
+ public static final String CODEC = "android.media.mediacodec.codec";
+
+ /**
+ * Key to extract the MIME type
+ * from the {@link MediaCodec#getMetrics} return value.
+ * The value is a String.
+ */
+ public static final String MIME_TYPE = "android.media.mediacodec.mime";
+
+ /**
+ * Key to extract what the codec mode
+ * from the {@link MediaCodec#getMetrics} return value.
+ * The value is a String. Values will be one of the constants
+ * {@link #MODE_AUDIO} or {@link #MODE_VIDEO}.
+ */
+ public static final String MODE = "android.media.mediacodec.mode";
+
+ /**
+ * The value returned for the key {@link #MODE} when the
+ * codec is a audio codec.
+ */
+ public static final String MODE_AUDIO = "audio";
+
+ /**
+ * The value returned for the key {@link #MODE} when the
+ * codec is a video codec.
+ */
+ public static final String MODE_VIDEO = "video";
+
+ /**
+ * Key to extract the flag indicating whether the codec is running
+ * as an encoder or decoder from the {@link MediaCodec#getMetrics} return value.
+ * The value is an integer.
+ * A 0 indicates decoder; 1 indicates encoder.
+ */
+ public static final String ENCODER = "android.media.mediacodec.encoder";
+
+ /**
+ * Key to extract the flag indicating whether the codec is running
+ * in secure (DRM) mode from the {@link MediaCodec#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String SECURE = "android.media.mediacodec.secure";
+
+ /**
+ * Key to extract the width (in pixels) of the video track
+ * from the {@link MediaCodec#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String WIDTH = "android.media.mediacodec.width";
+
+ /**
+ * Key to extract the height (in pixels) of the video track
+ * from the {@link MediaCodec#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String HEIGHT = "android.media.mediacodec.height";
+
+ /**
+ * Key to extract the rotation (in degrees) to properly orient the video
+ * from the {@link MediaCodec#getMetrics} return.
+ * The value is a integer.
+ */
+ public static final String ROTATION = "android.media.mediacodec.rotation";
+
+ }
+}
diff --git a/android/media/MediaCodecInfo.java b/android/media/MediaCodecInfo.java
new file mode 100644
index 00000000..f85925d8
--- /dev/null
+++ b/android/media/MediaCodecInfo.java
@@ -0,0 +1,3116 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Range;
+import android.util.Rational;
+import android.util.Size;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import static android.media.Utils.intersectSortedDistinctRanges;
+import static android.media.Utils.sortDistinctRanges;
+
+/**
+ * Provides information about a given media codec available on the device. You can
+ * iterate through all codecs available by querying {@link MediaCodecList}. For example,
+ * here's how to find an encoder that supports a given MIME type:
+ * <pre>
+ * private static MediaCodecInfo selectCodec(String mimeType) {
+ * int numCodecs = MediaCodecList.getCodecCount();
+ * for (int i = 0; i &lt; numCodecs; i++) {
+ * MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
+ *
+ * if (!codecInfo.isEncoder()) {
+ * continue;
+ * }
+ *
+ * String[] types = codecInfo.getSupportedTypes();
+ * for (int j = 0; j &lt; types.length; j++) {
+ * if (types[j].equalsIgnoreCase(mimeType)) {
+ * return codecInfo;
+ * }
+ * }
+ * }
+ * return null;
+ * }</pre>
+ *
+ */
+public final class MediaCodecInfo {
+ private boolean mIsEncoder;
+ private String mName;
+ private Map<String, CodecCapabilities> mCaps;
+
+ /* package private */ MediaCodecInfo(
+ String name, boolean isEncoder, CodecCapabilities[] caps) {
+ mName = name;
+ mIsEncoder = isEncoder;
+ mCaps = new HashMap<String, CodecCapabilities>();
+ for (CodecCapabilities c: caps) {
+ mCaps.put(c.getMimeType(), c);
+ }
+ }
+
+ /**
+ * Retrieve the codec name.
+ */
+ public final String getName() {
+ return mName;
+ }
+
+ /**
+ * Query if the codec is an encoder.
+ */
+ public final boolean isEncoder() {
+ return mIsEncoder;
+ }
+
+ /**
+ * Query the media types supported by the codec.
+ */
+ public final String[] getSupportedTypes() {
+ Set<String> typeSet = mCaps.keySet();
+ String[] types = typeSet.toArray(new String[typeSet.size()]);
+ Arrays.sort(types);
+ return types;
+ }
+
+ private static int checkPowerOfTwo(int value, String message) {
+ if ((value & (value - 1)) != 0) {
+ throw new IllegalArgumentException(message);
+ }
+ return value;
+ }
+
+ private static class Feature {
+ public String mName;
+ public int mValue;
+ public boolean mDefault;
+ public Feature(String name, int value, boolean def) {
+ mName = name;
+ mValue = value;
+ mDefault = def;
+ }
+ }
+
+ // COMMON CONSTANTS
+ private static final Range<Integer> POSITIVE_INTEGERS =
+ Range.create(1, Integer.MAX_VALUE);
+ private static final Range<Long> POSITIVE_LONGS =
+ Range.create(1l, Long.MAX_VALUE);
+ private static final Range<Rational> POSITIVE_RATIONALS =
+ Range.create(new Rational(1, Integer.MAX_VALUE),
+ new Rational(Integer.MAX_VALUE, 1));
+ private static final Range<Integer> SIZE_RANGE = Range.create(1, 32768);
+ private static final Range<Integer> FRAME_RATE_RANGE = Range.create(0, 960);
+ private static final Range<Integer> BITRATE_RANGE = Range.create(0, 500000000);
+ private static final int DEFAULT_MAX_SUPPORTED_INSTANCES = 32;
+ private static final int MAX_SUPPORTED_INSTANCES_LIMIT = 256;
+
+ // found stuff that is not supported by framework (=> this should not happen)
+ private static final int ERROR_UNRECOGNIZED = (1 << 0);
+ // found profile/level for which we don't have capability estimates
+ private static final int ERROR_UNSUPPORTED = (1 << 1);
+ // have not found any profile/level for which we don't have capability estimate
+ private static final int ERROR_NONE_SUPPORTED = (1 << 2);
+
+
+ /**
+ * Encapsulates the capabilities of a given codec component.
+ * For example, what profile/level combinations it supports and what colorspaces
+ * it is capable of providing the decoded data in, as well as some
+ * codec-type specific capability flags.
+ * <p>You can get an instance for a given {@link MediaCodecInfo} object with
+ * {@link MediaCodecInfo#getCapabilitiesForType getCapabilitiesForType()}, passing a MIME type.
+ */
+ public static final class CodecCapabilities {
+ public CodecCapabilities() {
+ }
+
+ // CLASSIFICATION
+ private String mMime;
+ private int mMaxSupportedInstances;
+
+ // LEGACY FIELDS
+
+ // Enumerates supported profile/level combinations as defined
+ // by the type of encoded data. These combinations impose restrictions
+ // on video resolution, bitrate... and limit the available encoder tools
+ // such as B-frame support, arithmetic coding...
+ public CodecProfileLevel[] profileLevels; // NOTE this array is modifiable by user
+
+ // from OMX_COLOR_FORMATTYPE
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
+ public static final int COLOR_FormatMonochrome = 1;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
+ public static final int COLOR_Format8bitRGB332 = 2;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
+ public static final int COLOR_Format12bitRGB444 = 3;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
+ public static final int COLOR_Format16bitARGB4444 = 4;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
+ public static final int COLOR_Format16bitARGB1555 = 5;
+
+ /**
+ * 16 bits per pixel RGB color format, with 5-bit red & blue and 6-bit green component.
+ * <p>
+ * Using 16-bit little-endian representation, colors stored as Red 15:11, Green 10:5, Blue 4:0.
+ * <pre>
+ * byte byte
+ * <--------- i --------> | <------ i + 1 ------>
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * | BLUE | GREEN | RED |
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * 0 4 5 7 0 2 3 7
+ * bit
+ * </pre>
+ *
+ * This format corresponds to {@link android.graphics.PixelFormat#RGB_565} and
+ * {@link android.graphics.ImageFormat#RGB_565}.
+ */
+ public static final int COLOR_Format16bitRGB565 = 6;
+ /** @deprecated Use {@link #COLOR_Format16bitRGB565}. */
+ public static final int COLOR_Format16bitBGR565 = 7;
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
+ public static final int COLOR_Format18bitRGB666 = 8;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
+ public static final int COLOR_Format18bitARGB1665 = 9;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
+ public static final int COLOR_Format19bitARGB1666 = 10;
+
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888} or {@link #COLOR_FormatRGBFlexible}. */
+ public static final int COLOR_Format24bitRGB888 = 11;
+
+ /**
+ * 24 bits per pixel RGB color format, with 8-bit red, green & blue components.
+ * <p>
+ * Using 24-bit little-endian representation, colors stored as Red 7:0, Green 15:8, Blue 23:16.
+ * <pre>
+ * byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----->
+ * +-----------------+-----------------+-----------------+
+ * | RED | GREEN | BLUE |
+ * +-----------------+-----------------+-----------------+
+ * </pre>
+ *
+ * This format corresponds to {@link android.graphics.PixelFormat#RGB_888}, and can also be
+ * represented as a flexible format by {@link #COLOR_FormatRGBFlexible}.
+ */
+ public static final int COLOR_Format24bitBGR888 = 12;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
+ public static final int COLOR_Format24bitARGB1887 = 13;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
+ public static final int COLOR_Format25bitARGB1888 = 14;
+
+ /**
+ * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}.
+ */
+ public static final int COLOR_Format32bitBGRA8888 = 15;
+ /**
+ * @deprecated Use {@link #COLOR_Format32bitABGR8888} Or {@link #COLOR_FormatRGBAFlexible}.
+ */
+ public static final int COLOR_Format32bitARGB8888 = 16;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
+ public static final int COLOR_FormatYUV411Planar = 17;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
+ public static final int COLOR_FormatYUV411PackedPlanar = 18;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
+ public static final int COLOR_FormatYUV420Planar = 19;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
+ public static final int COLOR_FormatYUV420PackedPlanar = 20;
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
+ public static final int COLOR_FormatYUV420SemiPlanar = 21;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
+ public static final int COLOR_FormatYUV422Planar = 22;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
+ public static final int COLOR_FormatYUV422PackedPlanar = 23;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
+ public static final int COLOR_FormatYUV422SemiPlanar = 24;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
+ public static final int COLOR_FormatYCbYCr = 25;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
+ public static final int COLOR_FormatYCrYCb = 26;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
+ public static final int COLOR_FormatCbYCrY = 27;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
+ public static final int COLOR_FormatCrYCbY = 28;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV444Flexible}. */
+ public static final int COLOR_FormatYUV444Interleaved = 29;
+
+ /**
+ * SMIA 8-bit Bayer format.
+ * Each byte represents the top 8-bits of a 10-bit signal.
+ */
+ public static final int COLOR_FormatRawBayer8bit = 30;
+ /**
+ * SMIA 10-bit Bayer format.
+ */
+ public static final int COLOR_FormatRawBayer10bit = 31;
+
+ /**
+ * SMIA 8-bit compressed Bayer format.
+ * Each byte represents a sample from the 10-bit signal that is compressed into 8-bits
+ * using DPCM/PCM compression, as defined by the SMIA Functional Specification.
+ */
+ public static final int COLOR_FormatRawBayer8bitcompressed = 32;
+
+ /** @deprecated Use {@link #COLOR_FormatL8}. */
+ public static final int COLOR_FormatL2 = 33;
+ /** @deprecated Use {@link #COLOR_FormatL8}. */
+ public static final int COLOR_FormatL4 = 34;
+
+ /**
+ * 8 bits per pixel Y color format.
+ * <p>
+ * Each byte contains a single pixel.
+ * This format corresponds to {@link android.graphics.PixelFormat#L_8}.
+ */
+ public static final int COLOR_FormatL8 = 35;
+
+ /**
+ * 16 bits per pixel, little-endian Y color format.
+ * <p>
+ * <pre>
+ * byte byte
+ * <--------- i --------> | <------ i + 1 ------>
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * | Y |
+ * +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ * 0 7 0 7
+ * bit
+ * </pre>
+ */
+ public static final int COLOR_FormatL16 = 36;
+ /** @deprecated Use {@link #COLOR_FormatL16}. */
+ public static final int COLOR_FormatL24 = 37;
+
+ /**
+ * 32 bits per pixel, little-endian Y color format.
+ * <p>
+ * <pre>
+ * byte byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 ----->
+ * +-----------------+-----------------+-----------------+-----------------+
+ * | Y |
+ * +-----------------+-----------------+-----------------+-----------------+
+ * 0 7 0 7 0 7 0 7
+ * bit
+ * </pre>
+ *
+ * @deprecated Use {@link #COLOR_FormatL16}.
+ */
+ public static final int COLOR_FormatL32 = 38;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
+ public static final int COLOR_FormatYUV420PackedSemiPlanar = 39;
+ /** @deprecated Use {@link #COLOR_FormatYUV422Flexible}. */
+ public static final int COLOR_FormatYUV422PackedSemiPlanar = 40;
+
+ /** @deprecated Use {@link #COLOR_Format24bitBGR888}. */
+ public static final int COLOR_Format18BitBGR666 = 41;
+
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
+ public static final int COLOR_Format24BitARGB6666 = 42;
+ /** @deprecated Use {@link #COLOR_Format32bitABGR8888}. */
+ public static final int COLOR_Format24BitABGR6666 = 43;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
+ public static final int COLOR_TI_FormatYUV420PackedSemiPlanar = 0x7f000100;
+ // COLOR_FormatSurface indicates that the data will be a GraphicBuffer metadata reference.
+ // In OMX this is called OMX_COLOR_FormatAndroidOpaque.
+ public static final int COLOR_FormatSurface = 0x7F000789;
+
+ /**
+ * 32 bits per pixel RGBA color format, with 8-bit red, green, blue, and alpha components.
+ * <p>
+ * Using 32-bit little-endian representation, colors stored as Red 7:0, Green 15:8,
+ * Blue 23:16, and Alpha 31:24.
+ * <pre>
+ * byte byte byte byte
+ * <------ i -----> | <---- i+1 ----> | <---- i+2 ----> | <---- i+3 ----->
+ * +-----------------+-----------------+-----------------+-----------------+
+ * | RED | GREEN | BLUE | ALPHA |
+ * +-----------------+-----------------+-----------------+-----------------+
+ * </pre>
+ *
+ * This corresponds to {@link android.graphics.PixelFormat#RGBA_8888}.
+ */
+ public static final int COLOR_Format32bitABGR8888 = 0x7F00A000;
+
+ /**
+ * Flexible 12 bits per pixel, subsampled YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are subsampled by 2 both horizontally and vertically.
+ * Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_420_888},
+ * and can represent the {@link #COLOR_FormatYUV411Planar},
+ * {@link #COLOR_FormatYUV411PackedPlanar}, {@link #COLOR_FormatYUV420Planar},
+ * {@link #COLOR_FormatYUV420PackedPlanar}, {@link #COLOR_FormatYUV420SemiPlanar}
+ * and {@link #COLOR_FormatYUV420PackedSemiPlanar} formats.
+ *
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatYUV420Flexible = 0x7F420888;
+
+ /**
+ * Flexible 16 bits per pixel, subsampled YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are horizontally subsampled by 2. Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_422_888},
+ * and can represent the {@link #COLOR_FormatYCbYCr}, {@link #COLOR_FormatYCrYCb},
+ * {@link #COLOR_FormatCbYCrY}, {@link #COLOR_FormatCrYCbY},
+ * {@link #COLOR_FormatYUV422Planar}, {@link #COLOR_FormatYUV422PackedPlanar},
+ * {@link #COLOR_FormatYUV422SemiPlanar} and {@link #COLOR_FormatYUV422PackedSemiPlanar}
+ * formats.
+ *
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatYUV422Flexible = 0x7F422888;
+
+ /**
+ * Flexible 24 bits per pixel YUV color format with 8-bit chroma and luma
+ * components.
+ * <p>
+ * Chroma planes are not subsampled. Use this format with {@link Image}.
+ * This format corresponds to {@link android.graphics.ImageFormat#YUV_444_888},
+ * and can represent the {@link #COLOR_FormatYUV444Interleaved} format.
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatYUV444Flexible = 0x7F444888;
+
+ /**
+ * Flexible 24 bits per pixel RGB color format with 8-bit red, green and blue
+ * components.
+ * <p>
+ * Use this format with {@link Image}. This format corresponds to
+ * {@link android.graphics.ImageFormat#FLEX_RGB_888}, and can represent
+ * {@link #COLOR_Format24bitBGR888} and {@link #COLOR_Format24bitRGB888} formats.
+ * @see Image#getFormat.
+ */
+ public static final int COLOR_FormatRGBFlexible = 0x7F36B888;
+
+ /**
+ * Flexible 32 bits per pixel RGBA color format with 8-bit red, green, blue, and alpha
+ * components.
+ * <p>
+ * Use this format with {@link Image}. This format corresponds to
+ * {@link android.graphics.ImageFormat#FLEX_RGBA_8888}, and can represent
+ * {@link #COLOR_Format32bitBGRA8888}, {@link #COLOR_Format32bitABGR8888} and
+ * {@link #COLOR_Format32bitARGB8888} formats.
+ *
+ * @see Image#getFormat
+ */
+ public static final int COLOR_FormatRGBAFlexible = 0x7F36A888;
+
+ /** @deprecated Use {@link #COLOR_FormatYUV420Flexible}. */
+ public static final int COLOR_QCOM_FormatYUV420SemiPlanar = 0x7fa30c00;
+
+ /**
+ * Defined in the OpenMAX IL specs, color format values are drawn from
+ * OMX_COLOR_FORMATTYPE.
+ */
+ public int[] colorFormats; // NOTE this array is modifiable by user
+
+ // FEATURES
+
+ private int mFlagsSupported;
+ private int mFlagsRequired;
+ private int mFlagsVerified;
+
+ /**
+ * <b>video decoder only</b>: codec supports seamless resolution changes.
+ */
+ public static final String FEATURE_AdaptivePlayback = "adaptive-playback";
+
+ /**
+ * <b>video decoder only</b>: codec supports secure decryption.
+ */
+ public static final String FEATURE_SecurePlayback = "secure-playback";
+
+ /**
+ * <b>video or audio decoder only</b>: codec supports tunneled playback.
+ */
+ public static final String FEATURE_TunneledPlayback = "tunneled-playback";
+
+ /**
+ * <b>video decoder only</b>: codec supports queuing partial frames.
+ */
+ public static final String FEATURE_PartialFrame = "partial-frame";
+
+ /**
+ * <b>video encoder only</b>: codec supports intra refresh.
+ */
+ public static final String FEATURE_IntraRefresh = "intra-refresh";
+
+ /**
+ * Query codec feature capabilities.
+ * <p>
+ * These features are supported to be used by the codec. These
+ * include optional features that can be turned on, as well as
+ * features that are always on.
+ */
+ public final boolean isFeatureSupported(String name) {
+ return checkFeature(name, mFlagsSupported);
+ }
+
+ /**
+ * Query codec feature requirements.
+ * <p>
+ * These features are required to be used by the codec, and as such,
+ * they are always turned on.
+ */
+ public final boolean isFeatureRequired(String name) {
+ return checkFeature(name, mFlagsRequired);
+ }
+
+ private static final Feature[] decoderFeatures = {
+ new Feature(FEATURE_AdaptivePlayback, (1 << 0), true),
+ new Feature(FEATURE_SecurePlayback, (1 << 1), false),
+ new Feature(FEATURE_TunneledPlayback, (1 << 2), false),
+ new Feature(FEATURE_PartialFrame, (1 << 3), false),
+ };
+
+ private static final Feature[] encoderFeatures = {
+ new Feature(FEATURE_IntraRefresh, (1 << 0), false),
+ };
+
+ /** @hide */
+ public String[] validFeatures() {
+ Feature[] features = getValidFeatures();
+ String[] res = new String[features.length];
+ for (int i = 0; i < res.length; i++) {
+ res[i] = features[i].mName;
+ }
+ return res;
+ }
+
+ private Feature[] getValidFeatures() {
+ if (!isEncoder()) {
+ return decoderFeatures;
+ }
+ return encoderFeatures;
+ }
+
+ private boolean checkFeature(String name, int flags) {
+ for (Feature feat: getValidFeatures()) {
+ if (feat.mName.equals(name)) {
+ return (flags & feat.mValue) != 0;
+ }
+ }
+ return false;
+ }
+
+ /** @hide */
+ public boolean isRegular() {
+ // regular codecs only require default features
+ for (Feature feat: getValidFeatures()) {
+ if (!feat.mDefault && isFeatureRequired(feat.mName)) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Query whether codec supports a given {@link MediaFormat}.
+ *
+ * <p class=note>
+ * <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP},
+ * {@code format} must not contain a {@linkplain MediaFormat#KEY_FRAME_RATE
+ * frame rate}. Use
+ * <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code>
+ * to clear any existing frame rate setting in the format.
+ * <p>
+ *
+ * The following table summarizes the format keys considered by this method.
+ *
+ * <table style="width: 0%">
+ * <thead>
+ * <tr>
+ * <th rowspan=3>OS Version(s)</th>
+ * <td colspan=3>{@code MediaFormat} keys considered for</th>
+ * </tr><tr>
+ * <th>Audio Codecs</th>
+ * <th>Video Codecs</th>
+ * <th>Encoders</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</th>
+ * <td rowspan=3>{@link MediaFormat#KEY_MIME}<sup>*</sup>,<br>
+ * {@link MediaFormat#KEY_SAMPLE_RATE},<br>
+ * {@link MediaFormat#KEY_CHANNEL_COUNT},</td>
+ * <td>{@link MediaFormat#KEY_MIME}<sup>*</sup>,<br>
+ * {@link CodecCapabilities#FEATURE_AdaptivePlayback}<sup>D</sup>,<br>
+ * {@link CodecCapabilities#FEATURE_SecurePlayback}<sup>D</sup>,<br>
+ * {@link CodecCapabilities#FEATURE_TunneledPlayback}<sup>D</sup>,<br>
+ * {@link MediaFormat#KEY_WIDTH},<br>
+ * {@link MediaFormat#KEY_HEIGHT},<br>
+ * <strong>no</strong> {@code KEY_FRAME_RATE}</td>
+ * <td rowspan=4>{@link MediaFormat#KEY_BITRATE_MODE},<br>
+ * {@link MediaFormat#KEY_PROFILE}
+ * (and/or {@link MediaFormat#KEY_AAC_PROFILE}<sup>~</sup>),<br>
+ * <!-- {link MediaFormat#KEY_QUALITY},<br> -->
+ * {@link MediaFormat#KEY_COMPLEXITY}
+ * (and/or {@link MediaFormat#KEY_FLAC_COMPRESSION_LEVEL}<sup>~</sup>)</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</th>
+ * <td rowspan=2>as above, plus<br>
+ * {@link MediaFormat#KEY_FRAME_RATE}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#M}</th>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#N}</th>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#KEY_PROFILE},<br>
+ * <!-- {link MediaFormat#KEY_MAX_BIT_RATE},<br> -->
+ * {@link MediaFormat#KEY_BIT_RATE}</td>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#KEY_PROFILE},<br>
+ * {@link MediaFormat#KEY_LEVEL}<sup>+</sup>,<br>
+ * <!-- {link MediaFormat#KEY_MAX_BIT_RATE},<br> -->
+ * {@link MediaFormat#KEY_BIT_RATE},<br>
+ * {@link CodecCapabilities#FEATURE_IntraRefresh}<sup>E</sup></td>
+ * </tr>
+ * <tr>
+ * <td colspan=4>
+ * <p class=note><strong>Notes:</strong><br>
+ * *: must be specified; otherwise, method returns {@code false}.<br>
+ * +: method does not verify that the format parameters are supported
+ * by the specified level.<br>
+ * D: decoders only<br>
+ * E: encoders only<br>
+ * ~: if both keys are provided values must match
+ * </td>
+ * </tr>
+ * </tbody>
+ * </table>
+ *
+ * @param format media format with optional feature directives.
+ * @throws IllegalArgumentException if format is not a valid media format.
+ * @return whether the codec capabilities support the given format
+ * and feature requests.
+ */
+ public final boolean isFormatSupported(MediaFormat format) {
+ final Map<String, Object> map = format.getMap();
+ final String mime = (String)map.get(MediaFormat.KEY_MIME);
+
+ // mime must match if present
+ if (mime != null && !mMime.equalsIgnoreCase(mime)) {
+ return false;
+ }
+
+ // check feature support
+ for (Feature feat: getValidFeatures()) {
+ Integer yesNo = (Integer)map.get(MediaFormat.KEY_FEATURE_ + feat.mName);
+ if (yesNo == null) {
+ continue;
+ }
+ if ((yesNo == 1 && !isFeatureSupported(feat.mName)) ||
+ (yesNo == 0 && isFeatureRequired(feat.mName))) {
+ return false;
+ }
+ }
+
+ Integer profile = (Integer)map.get(MediaFormat.KEY_PROFILE);
+ Integer level = (Integer)map.get(MediaFormat.KEY_LEVEL);
+
+ if (profile != null) {
+ if (!supportsProfileLevel(profile, level)) {
+ return false;
+ }
+
+ // If we recognize this profile, check that this format is supported by the
+ // highest level supported by the codec for that profile. (Ignore specified
+ // level beyond the above profile/level check as level is only used as a
+ // guidance. E.g. AVC Level 1 CIF format is supported if codec supports level 1.1
+ // even though max size for Level 1 is QCIF. However, MPEG2 Simple Profile
+ // 1080p format is not supported even if codec supports Main Profile Level High,
+ // as Simple Profile does not support 1080p.
+ CodecCapabilities levelCaps = null;
+ int maxLevel = 0;
+ for (CodecProfileLevel pl : profileLevels) {
+ if (pl.profile == profile && pl.level > maxLevel) {
+ maxLevel = pl.level;
+ }
+ }
+ levelCaps = createFromProfileLevel(mMime, profile, maxLevel);
+ // remove profile from this format otherwise levelCaps.isFormatSupported will
+ // get into this same conditon and loop forever.
+ Map<String, Object> mapWithoutProfile = new HashMap<>(map);
+ mapWithoutProfile.remove(MediaFormat.KEY_PROFILE);
+ MediaFormat formatWithoutProfile = new MediaFormat(mapWithoutProfile);
+ if (levelCaps != null && !levelCaps.isFormatSupported(formatWithoutProfile)) {
+ return false;
+ }
+ }
+ if (mAudioCaps != null && !mAudioCaps.supportsFormat(format)) {
+ return false;
+ }
+ if (mVideoCaps != null && !mVideoCaps.supportsFormat(format)) {
+ return false;
+ }
+ if (mEncoderCaps != null && !mEncoderCaps.supportsFormat(format)) {
+ return false;
+ }
+ return true;
+ }
+
+ private static boolean supportsBitrate(
+ Range<Integer> bitrateRange, MediaFormat format) {
+ Map<String, Object> map = format.getMap();
+
+ // consider max bitrate over average bitrate for support
+ Integer maxBitrate = (Integer)map.get(MediaFormat.KEY_MAX_BIT_RATE);
+ Integer bitrate = (Integer)map.get(MediaFormat.KEY_BIT_RATE);
+ if (bitrate == null) {
+ bitrate = maxBitrate;
+ } else if (maxBitrate != null) {
+ bitrate = Math.max(bitrate, maxBitrate);
+ }
+
+ if (bitrate != null && bitrate > 0) {
+ return bitrateRange.contains(bitrate);
+ }
+
+ return true;
+ }
+
+ private boolean supportsProfileLevel(int profile, Integer level) {
+ for (CodecProfileLevel pl: profileLevels) {
+ if (pl.profile != profile) {
+ continue;
+ }
+
+ // AAC does not use levels
+ if (level == null || mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AAC)) {
+ return true;
+ }
+
+ // H.263 levels are not completely ordered:
+ // Level45 support only implies Level10 support
+ if (mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_H263)) {
+ if (pl.level != level && pl.level == CodecProfileLevel.H263Level45
+ && level > CodecProfileLevel.H263Level10) {
+ continue;
+ }
+ }
+
+ // MPEG4 levels are not completely ordered:
+ // Level1 support only implies Level0 (and not Level0b) support
+ if (mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_MPEG4)) {
+ if (pl.level != level && pl.level == CodecProfileLevel.MPEG4Level1
+ && level > CodecProfileLevel.MPEG4Level0) {
+ continue;
+ }
+ }
+
+ // HEVC levels incorporate both tiers and levels. Verify tier support.
+ if (mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+ boolean supportsHighTier =
+ (pl.level & CodecProfileLevel.HEVCHighTierLevels) != 0;
+ boolean checkingHighTier = (level & CodecProfileLevel.HEVCHighTierLevels) != 0;
+ // high tier levels are only supported by other high tier levels
+ if (checkingHighTier && !supportsHighTier) {
+ continue;
+ }
+ }
+
+ if (pl.level >= level) {
+ // if we recognize the listed profile/level, we must also recognize the
+ // profile/level arguments.
+ if (createFromProfileLevel(mMime, profile, pl.level) != null) {
+ return createFromProfileLevel(mMime, profile, level) != null;
+ }
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // errors while reading profile levels - accessed from sister capabilities
+ int mError;
+
+ private static final String TAG = "CodecCapabilities";
+
+ // NEW-STYLE CAPABILITIES
+ private AudioCapabilities mAudioCaps;
+ private VideoCapabilities mVideoCaps;
+ private EncoderCapabilities mEncoderCaps;
+ private MediaFormat mDefaultFormat;
+
+ /**
+ * Returns a MediaFormat object with default values for configurations that have
+ * defaults.
+ */
+ public MediaFormat getDefaultFormat() {
+ return mDefaultFormat;
+ }
+
+ /**
+ * Returns the mime type for which this codec-capability object was created.
+ */
+ public String getMimeType() {
+ return mMime;
+ }
+
+ /**
+ * Returns the max number of the supported concurrent codec instances.
+ * <p>
+ * This is a hint for an upper bound. Applications should not expect to successfully
+ * operate more instances than the returned value, but the actual number of
+ * concurrently operable instances may be less as it depends on the available
+ * resources at time of use.
+ */
+ public int getMaxSupportedInstances() {
+ return mMaxSupportedInstances;
+ }
+
+ private boolean isAudio() {
+ return mAudioCaps != null;
+ }
+
+ /**
+ * Returns the audio capabilities or {@code null} if this is not an audio codec.
+ */
+ public AudioCapabilities getAudioCapabilities() {
+ return mAudioCaps;
+ }
+
+ private boolean isEncoder() {
+ return mEncoderCaps != null;
+ }
+
+ /**
+ * Returns the encoding capabilities or {@code null} if this is not an encoder.
+ */
+ public EncoderCapabilities getEncoderCapabilities() {
+ return mEncoderCaps;
+ }
+
+ private boolean isVideo() {
+ return mVideoCaps != null;
+ }
+
+ /**
+ * Returns the video capabilities or {@code null} if this is not a video codec.
+ */
+ public VideoCapabilities getVideoCapabilities() {
+ return mVideoCaps;
+ }
+
+ /** @hide */
+ public CodecCapabilities dup() {
+ return new CodecCapabilities(
+ // clone writable arrays
+ Arrays.copyOf(profileLevels, profileLevels.length),
+ Arrays.copyOf(colorFormats, colorFormats.length),
+ isEncoder(),
+ mFlagsVerified,
+ mDefaultFormat,
+ mCapabilitiesInfo);
+ }
+
+ /**
+ * Retrieve the codec capabilities for a certain {@code mime type}, {@code
+ * profile} and {@code level}. If the type, or profile-level combination
+ * is not understood by the framework, it returns null.
+ * <p class=note> In {@link android.os.Build.VERSION_CODES#M}, calling this
+ * method without calling any method of the {@link MediaCodecList} class beforehand
+ * results in a {@link NullPointerException}.</p>
+ */
+ public static CodecCapabilities createFromProfileLevel(
+ String mime, int profile, int level) {
+ CodecProfileLevel pl = new CodecProfileLevel();
+ pl.profile = profile;
+ pl.level = level;
+ MediaFormat defaultFormat = new MediaFormat();
+ defaultFormat.setString(MediaFormat.KEY_MIME, mime);
+
+ CodecCapabilities ret = new CodecCapabilities(
+ new CodecProfileLevel[] { pl }, new int[0], true /* encoder */,
+ 0 /* flags */, defaultFormat, new MediaFormat() /* info */);
+ if (ret.mError != 0) {
+ return null;
+ }
+ return ret;
+ }
+
+ /* package private */ CodecCapabilities(
+ CodecProfileLevel[] profLevs, int[] colFmts,
+ boolean encoder, int flags,
+ Map<String, Object>defaultFormatMap,
+ Map<String, Object>capabilitiesMap) {
+ this(profLevs, colFmts, encoder, flags,
+ new MediaFormat(defaultFormatMap),
+ new MediaFormat(capabilitiesMap));
+ }
+
+ private MediaFormat mCapabilitiesInfo;
+
+ /* package private */ CodecCapabilities(
+ CodecProfileLevel[] profLevs, int[] colFmts, boolean encoder, int flags,
+ MediaFormat defaultFormat, MediaFormat info) {
+ final Map<String, Object> map = info.getMap();
+ colorFormats = colFmts;
+ mFlagsVerified = flags;
+ mDefaultFormat = defaultFormat;
+ mCapabilitiesInfo = info;
+ mMime = mDefaultFormat.getString(MediaFormat.KEY_MIME);
+
+ /* VP9 introduced profiles around 2016, so some VP9 codecs may not advertise any
+ supported profiles. Determine the level for them using the info they provide. */
+ if (profLevs.length == 0 && mMime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_VP9)) {
+ CodecProfileLevel profLev = new CodecProfileLevel();
+ profLev.profile = CodecProfileLevel.VP9Profile0;
+ profLev.level = VideoCapabilities.equivalentVP9Level(info);
+ profLevs = new CodecProfileLevel[] { profLev };
+ }
+ profileLevels = profLevs;
+
+ if (mMime.toLowerCase().startsWith("audio/")) {
+ mAudioCaps = AudioCapabilities.create(info, this);
+ mAudioCaps.setDefaultFormat(mDefaultFormat);
+ } else if (mMime.toLowerCase().startsWith("video/")) {
+ mVideoCaps = VideoCapabilities.create(info, this);
+ }
+ if (encoder) {
+ mEncoderCaps = EncoderCapabilities.create(info, this);
+ mEncoderCaps.setDefaultFormat(mDefaultFormat);
+ }
+
+ final Map<String, Object> global = MediaCodecList.getGlobalSettings();
+ mMaxSupportedInstances = Utils.parseIntSafely(
+ global.get("max-concurrent-instances"), DEFAULT_MAX_SUPPORTED_INSTANCES);
+
+ int maxInstances = Utils.parseIntSafely(
+ map.get("max-concurrent-instances"), mMaxSupportedInstances);
+ mMaxSupportedInstances =
+ Range.create(1, MAX_SUPPORTED_INSTANCES_LIMIT).clamp(maxInstances);
+
+ for (Feature feat: getValidFeatures()) {
+ String key = MediaFormat.KEY_FEATURE_ + feat.mName;
+ Integer yesNo = (Integer)map.get(key);
+ if (yesNo == null) {
+ continue;
+ }
+ if (yesNo > 0) {
+ mFlagsRequired |= feat.mValue;
+ }
+ mFlagsSupported |= feat.mValue;
+ mDefaultFormat.setInteger(key, 1);
+ // TODO restrict features by mFlagsVerified once all codecs reliably verify them
+ }
+ }
+ }
+
+ /**
+ * A class that supports querying the audio capabilities of a codec.
+ */
+ public static final class AudioCapabilities {
+ private static final String TAG = "AudioCapabilities";
+ private CodecCapabilities mParent;
+ private Range<Integer> mBitrateRange;
+
+ private int[] mSampleRates;
+ private Range<Integer>[] mSampleRateRanges;
+ private int mMaxInputChannelCount;
+
+ private static final int MAX_INPUT_CHANNEL_COUNT = 30;
+
+ /**
+ * Returns the range of supported bitrates in bits/second.
+ */
+ public Range<Integer> getBitrateRange() {
+ return mBitrateRange;
+ }
+
+ /**
+ * Returns the array of supported sample rates if the codec
+ * supports only discrete values. Otherwise, it returns
+ * {@code null}. The array is sorted in ascending order.
+ */
+ public int[] getSupportedSampleRates() {
+ return Arrays.copyOf(mSampleRates, mSampleRates.length);
+ }
+
+ /**
+ * Returns the array of supported sample rate ranges. The
+ * array is sorted in ascending order, and the ranges are
+ * distinct.
+ */
+ public Range<Integer>[] getSupportedSampleRateRanges() {
+ return Arrays.copyOf(mSampleRateRanges, mSampleRateRanges.length);
+ }
+
+ /**
+ * Returns the maximum number of input channels supported. The codec
+ * supports any number of channels between 1 and this maximum value.
+ */
+ public int getMaxInputChannelCount() {
+ return mMaxInputChannelCount;
+ }
+
+ /* no public constructor */
+ private AudioCapabilities() { }
+
+ /** @hide */
+ public static AudioCapabilities create(
+ MediaFormat info, CodecCapabilities parent) {
+ AudioCapabilities caps = new AudioCapabilities();
+ caps.init(info, parent);
+ return caps;
+ }
+
+ /** @hide */
+ public void init(MediaFormat info, CodecCapabilities parent) {
+ mParent = parent;
+ initWithPlatformLimits();
+ applyLevelLimits();
+ parseFromInfo(info);
+ }
+
+ private void initWithPlatformLimits() {
+ mBitrateRange = Range.create(0, Integer.MAX_VALUE);
+ mMaxInputChannelCount = MAX_INPUT_CHANNEL_COUNT;
+ // mBitrateRange = Range.create(1, 320000);
+ mSampleRateRanges = new Range[] { Range.create(8000, 96000) };
+ mSampleRates = null;
+ }
+
+ private boolean supports(Integer sampleRate, Integer inputChannels) {
+ // channels and sample rates are checked orthogonally
+ if (inputChannels != null &&
+ (inputChannels < 1 || inputChannels > mMaxInputChannelCount)) {
+ return false;
+ }
+ if (sampleRate != null) {
+ int ix = Utils.binarySearchDistinctRanges(
+ mSampleRateRanges, sampleRate);
+ if (ix < 0) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Query whether the sample rate is supported by the codec.
+ */
+ public boolean isSampleRateSupported(int sampleRate) {
+ return supports(sampleRate, null);
+ }
+
+ /** modifies rates */
+ private void limitSampleRates(int[] rates) {
+ Arrays.sort(rates);
+ ArrayList<Range<Integer>> ranges = new ArrayList<Range<Integer>>();
+ for (int rate: rates) {
+ if (supports(rate, null /* channels */)) {
+ ranges.add(Range.create(rate, rate));
+ }
+ }
+ mSampleRateRanges = ranges.toArray(new Range[ranges.size()]);
+ createDiscreteSampleRates();
+ }
+
+ private void createDiscreteSampleRates() {
+ mSampleRates = new int[mSampleRateRanges.length];
+ for (int i = 0; i < mSampleRateRanges.length; i++) {
+ mSampleRates[i] = mSampleRateRanges[i].getLower();
+ }
+ }
+
+ /** modifies rateRanges */
+ private void limitSampleRates(Range<Integer>[] rateRanges) {
+ sortDistinctRanges(rateRanges);
+ mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, rateRanges);
+
+ // check if all values are discrete
+ for (Range<Integer> range: mSampleRateRanges) {
+ if (!range.getLower().equals(range.getUpper())) {
+ mSampleRates = null;
+ return;
+ }
+ }
+ createDiscreteSampleRates();
+ }
+
+ private void applyLevelLimits() {
+ int[] sampleRates = null;
+ Range<Integer> sampleRateRange = null, bitRates = null;
+ int maxChannels = MAX_INPUT_CHANNEL_COUNT;
+ String mime = mParent.getMimeType();
+
+ if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_MPEG)) {
+ sampleRates = new int[] {
+ 8000, 11025, 12000,
+ 16000, 22050, 24000,
+ 32000, 44100, 48000 };
+ bitRates = Range.create(8000, 320000);
+ maxChannels = 2;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AMR_NB)) {
+ sampleRates = new int[] { 8000 };
+ bitRates = Range.create(4750, 12200);
+ maxChannels = 1;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AMR_WB)) {
+ sampleRates = new int[] { 16000 };
+ bitRates = Range.create(6600, 23850);
+ maxChannels = 1;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AAC)) {
+ sampleRates = new int[] {
+ 7350, 8000,
+ 11025, 12000, 16000,
+ 22050, 24000, 32000,
+ 44100, 48000, 64000,
+ 88200, 96000 };
+ bitRates = Range.create(8000, 510000);
+ maxChannels = 48;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_VORBIS)) {
+ bitRates = Range.create(32000, 500000);
+ sampleRateRange = Range.create(8000, 192000);
+ maxChannels = 255;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_OPUS)) {
+ bitRates = Range.create(6000, 510000);
+ sampleRates = new int[] { 8000, 12000, 16000, 24000, 48000 };
+ maxChannels = 255;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_RAW)) {
+ sampleRateRange = Range.create(1, 96000);
+ bitRates = Range.create(1, 10000000);
+ maxChannels = AudioTrack.CHANNEL_COUNT_MAX;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_FLAC)) {
+ sampleRateRange = Range.create(1, 655350);
+ // lossless codec, so bitrate is ignored
+ maxChannels = 255;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_G711_ALAW)
+ || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_G711_MLAW)) {
+ sampleRates = new int[] { 8000 };
+ bitRates = Range.create(64000, 64000);
+ // platform allows multiple channels for this format
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_MSGSM)) {
+ sampleRates = new int[] { 8000 };
+ bitRates = Range.create(13000, 13000);
+ maxChannels = 1;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AC3)) {
+ maxChannels = 6;
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_EAC3)) {
+ maxChannels = 16;
+ } else {
+ Log.w(TAG, "Unsupported mime " + mime);
+ mParent.mError |= ERROR_UNSUPPORTED;
+ }
+
+ // restrict ranges
+ if (sampleRates != null) {
+ limitSampleRates(sampleRates);
+ } else if (sampleRateRange != null) {
+ limitSampleRates(new Range[] { sampleRateRange });
+ }
+ applyLimits(maxChannels, bitRates);
+ }
+
+ private void applyLimits(int maxInputChannels, Range<Integer> bitRates) {
+ mMaxInputChannelCount = Range.create(1, mMaxInputChannelCount)
+ .clamp(maxInputChannels);
+ if (bitRates != null) {
+ mBitrateRange = mBitrateRange.intersect(bitRates);
+ }
+ }
+
+ private void parseFromInfo(MediaFormat info) {
+ int maxInputChannels = MAX_INPUT_CHANNEL_COUNT;
+ Range<Integer> bitRates = POSITIVE_INTEGERS;
+
+ if (info.containsKey("sample-rate-ranges")) {
+ String[] rateStrings = info.getString("sample-rate-ranges").split(",");
+ Range<Integer>[] rateRanges = new Range[rateStrings.length];
+ for (int i = 0; i < rateStrings.length; i++) {
+ rateRanges[i] = Utils.parseIntRange(rateStrings[i], null);
+ }
+ limitSampleRates(rateRanges);
+ }
+ if (info.containsKey("max-channel-count")) {
+ maxInputChannels = Utils.parseIntSafely(
+ info.getString("max-channel-count"), maxInputChannels);
+ } else if ((mParent.mError & ERROR_UNSUPPORTED) != 0) {
+ maxInputChannels = 0;
+ }
+ if (info.containsKey("bitrate-range")) {
+ bitRates = bitRates.intersect(
+ Utils.parseIntRange(info.getString("bitrate-range"), bitRates));
+ }
+ applyLimits(maxInputChannels, bitRates);
+ }
+
+ /** @hide */
+ public void setDefaultFormat(MediaFormat format) {
+ // report settings that have only a single choice
+ if (mBitrateRange.getLower().equals(mBitrateRange.getUpper())) {
+ format.setInteger(MediaFormat.KEY_BIT_RATE, mBitrateRange.getLower());
+ }
+ if (mMaxInputChannelCount == 1) {
+ // mono-only format
+ format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
+ }
+ if (mSampleRates != null && mSampleRates.length == 1) {
+ format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mSampleRates[0]);
+ }
+ }
+
+ /** @hide */
+ public boolean supportsFormat(MediaFormat format) {
+ Map<String, Object> map = format.getMap();
+ Integer sampleRate = (Integer)map.get(MediaFormat.KEY_SAMPLE_RATE);
+ Integer channels = (Integer)map.get(MediaFormat.KEY_CHANNEL_COUNT);
+
+ if (!supports(sampleRate, channels)) {
+ return false;
+ }
+
+ if (!CodecCapabilities.supportsBitrate(mBitrateRange, format)) {
+ return false;
+ }
+
+ // nothing to do for:
+ // KEY_CHANNEL_MASK: codecs don't get this
+ // KEY_IS_ADTS: required feature for all AAC decoders
+ return true;
+ }
+ }
+
+ /**
+ * A class that supports querying the video capabilities of a codec.
+ */
+ public static final class VideoCapabilities {
+ private static final String TAG = "VideoCapabilities";
+ private CodecCapabilities mParent;
+ private Range<Integer> mBitrateRange;
+
+ private Range<Integer> mHeightRange;
+ private Range<Integer> mWidthRange;
+ private Range<Integer> mBlockCountRange;
+ private Range<Integer> mHorizontalBlockRange;
+ private Range<Integer> mVerticalBlockRange;
+ private Range<Rational> mAspectRatioRange;
+ private Range<Rational> mBlockAspectRatioRange;
+ private Range<Long> mBlocksPerSecondRange;
+ private Map<Size, Range<Long>> mMeasuredFrameRates;
+ private Range<Integer> mFrameRateRange;
+
+ private int mBlockWidth;
+ private int mBlockHeight;
+ private int mWidthAlignment;
+ private int mHeightAlignment;
+ private int mSmallerDimensionUpperLimit;
+
+ private boolean mAllowMbOverride; // allow XML to override calculated limits
+
+ /**
+ * Returns the range of supported bitrates in bits/second.
+ */
+ public Range<Integer> getBitrateRange() {
+ return mBitrateRange;
+ }
+
+ /**
+ * Returns the range of supported video widths.
+ */
+ public Range<Integer> getSupportedWidths() {
+ return mWidthRange;
+ }
+
+ /**
+ * Returns the range of supported video heights.
+ */
+ public Range<Integer> getSupportedHeights() {
+ return mHeightRange;
+ }
+
+ /**
+ * Returns the alignment requirement for video width (in pixels).
+ *
+ * This is a power-of-2 value that video width must be a
+ * multiple of.
+ */
+ public int getWidthAlignment() {
+ return mWidthAlignment;
+ }
+
+ /**
+ * Returns the alignment requirement for video height (in pixels).
+ *
+ * This is a power-of-2 value that video height must be a
+ * multiple of.
+ */
+ public int getHeightAlignment() {
+ return mHeightAlignment;
+ }
+
+ /**
+ * Return the upper limit on the smaller dimension of width or height.
+ * <p></p>
+ * Some codecs have a limit on the smaller dimension, whether it be
+ * the width or the height. E.g. a codec may only be able to handle
+ * up to 1920x1080 both in landscape and portrait mode (1080x1920).
+ * In this case the maximum width and height are both 1920, but the
+ * smaller dimension limit will be 1080. For other codecs, this is
+ * {@code Math.min(getSupportedWidths().getUpper(),
+ * getSupportedHeights().getUpper())}.
+ *
+ * @hide
+ */
+ public int getSmallerDimensionUpperLimit() {
+ return mSmallerDimensionUpperLimit;
+ }
+
+ /**
+ * Returns the range of supported frame rates.
+ * <p>
+ * This is not a performance indicator. Rather, it expresses the
+ * limits specified in the coding standard, based on the complexities
+ * of encoding material for later playback at a certain frame rate,
+ * or the decoding of such material in non-realtime.
+ */
+ public Range<Integer> getSupportedFrameRates() {
+ return mFrameRateRange;
+ }
+
+ /**
+ * Returns the range of supported video widths for a video height.
+ * @param height the height of the video
+ */
+ public Range<Integer> getSupportedWidthsFor(int height) {
+ try {
+ Range<Integer> range = mWidthRange;
+ if (!mHeightRange.contains(height)
+ || (height % mHeightAlignment) != 0) {
+ throw new IllegalArgumentException("unsupported height");
+ }
+ final int heightInBlocks = Utils.divUp(height, mBlockHeight);
+
+ // constrain by block count and by block aspect ratio
+ final int minWidthInBlocks = Math.max(
+ Utils.divUp(mBlockCountRange.getLower(), heightInBlocks),
+ (int)Math.ceil(mBlockAspectRatioRange.getLower().doubleValue()
+ * heightInBlocks));
+ final int maxWidthInBlocks = Math.min(
+ mBlockCountRange.getUpper() / heightInBlocks,
+ (int)(mBlockAspectRatioRange.getUpper().doubleValue()
+ * heightInBlocks));
+ range = range.intersect(
+ (minWidthInBlocks - 1) * mBlockWidth + mWidthAlignment,
+ maxWidthInBlocks * mBlockWidth);
+
+ // constrain by smaller dimension limit
+ if (height > mSmallerDimensionUpperLimit) {
+ range = range.intersect(1, mSmallerDimensionUpperLimit);
+ }
+
+ // constrain by aspect ratio
+ range = range.intersect(
+ (int)Math.ceil(mAspectRatioRange.getLower().doubleValue()
+ * height),
+ (int)(mAspectRatioRange.getUpper().doubleValue() * height));
+ return range;
+ } catch (IllegalArgumentException e) {
+ // height is not supported because there are no suitable widths
+ Log.v(TAG, "could not get supported widths for " + height);
+ throw new IllegalArgumentException("unsupported height");
+ }
+ }
+
+ /**
+ * Returns the range of supported video heights for a video width
+ * @param width the width of the video
+ */
+ public Range<Integer> getSupportedHeightsFor(int width) {
+ try {
+ Range<Integer> range = mHeightRange;
+ if (!mWidthRange.contains(width)
+ || (width % mWidthAlignment) != 0) {
+ throw new IllegalArgumentException("unsupported width");
+ }
+ final int widthInBlocks = Utils.divUp(width, mBlockWidth);
+
+ // constrain by block count and by block aspect ratio
+ final int minHeightInBlocks = Math.max(
+ Utils.divUp(mBlockCountRange.getLower(), widthInBlocks),
+ (int)Math.ceil(widthInBlocks /
+ mBlockAspectRatioRange.getUpper().doubleValue()));
+ final int maxHeightInBlocks = Math.min(
+ mBlockCountRange.getUpper() / widthInBlocks,
+ (int)(widthInBlocks /
+ mBlockAspectRatioRange.getLower().doubleValue()));
+ range = range.intersect(
+ (minHeightInBlocks - 1) * mBlockHeight + mHeightAlignment,
+ maxHeightInBlocks * mBlockHeight);
+
+ // constrain by smaller dimension limit
+ if (width > mSmallerDimensionUpperLimit) {
+ range = range.intersect(1, mSmallerDimensionUpperLimit);
+ }
+
+ // constrain by aspect ratio
+ range = range.intersect(
+ (int)Math.ceil(width /
+ mAspectRatioRange.getUpper().doubleValue()),
+ (int)(width / mAspectRatioRange.getLower().doubleValue()));
+ return range;
+ } catch (IllegalArgumentException e) {
+ // width is not supported because there are no suitable heights
+ Log.v(TAG, "could not get supported heights for " + width);
+ throw new IllegalArgumentException("unsupported width");
+ }
+ }
+
+ /**
+ * Returns the range of supported video frame rates for a video size.
+ * <p>
+ * This is not a performance indicator. Rather, it expresses the limits specified in
+ * the coding standard, based on the complexities of encoding material of a given
+ * size for later playback at a certain frame rate, or the decoding of such material
+ * in non-realtime.
+
+ * @param width the width of the video
+ * @param height the height of the video
+ */
+ public Range<Double> getSupportedFrameRatesFor(int width, int height) {
+ Range<Integer> range = mHeightRange;
+ if (!supports(width, height, null)) {
+ throw new IllegalArgumentException("unsupported size");
+ }
+ final int blockCount =
+ Utils.divUp(width, mBlockWidth) * Utils.divUp(height, mBlockHeight);
+
+ return Range.create(
+ Math.max(mBlocksPerSecondRange.getLower() / (double) blockCount,
+ (double) mFrameRateRange.getLower()),
+ Math.min(mBlocksPerSecondRange.getUpper() / (double) blockCount,
+ (double) mFrameRateRange.getUpper()));
+ }
+
+ private int getBlockCount(int width, int height) {
+ return Utils.divUp(width, mBlockWidth) * Utils.divUp(height, mBlockHeight);
+ }
+
+ @NonNull
+ private Size findClosestSize(int width, int height) {
+ int targetBlockCount = getBlockCount(width, height);
+ Size closestSize = null;
+ int minDiff = Integer.MAX_VALUE;
+ for (Size size : mMeasuredFrameRates.keySet()) {
+ int diff = Math.abs(targetBlockCount -
+ getBlockCount(size.getWidth(), size.getHeight()));
+ if (diff < minDiff) {
+ minDiff = diff;
+ closestSize = size;
+ }
+ }
+ return closestSize;
+ }
+
+ private Range<Double> estimateFrameRatesFor(int width, int height) {
+ Size size = findClosestSize(width, height);
+ Range<Long> range = mMeasuredFrameRates.get(size);
+ Double ratio = getBlockCount(size.getWidth(), size.getHeight())
+ / (double)Math.max(getBlockCount(width, height), 1);
+ return Range.create(range.getLower() * ratio, range.getUpper() * ratio);
+ }
+
+ /**
+ * Returns the range of achievable video frame rates for a video size.
+ * May return {@code null}, if the codec did not publish any measurement
+ * data.
+ * <p>
+ * This is a performance estimate provided by the device manufacturer based on statistical
+ * sampling of full-speed decoding and encoding measurements in various configurations
+ * of common video sizes supported by the codec. As such it should only be used to
+ * compare individual codecs on the device. The value is not suitable for comparing
+ * different devices or even different android releases for the same device.
+ * <p>
+ * <em>On {@link android.os.Build.VERSION_CODES#M} release</em> the returned range
+ * corresponds to the fastest frame rates achieved in the tested configurations. As
+ * such, it should not be used to gauge guaranteed or even average codec performance
+ * on the device.
+ * <p>
+ * <em>On {@link android.os.Build.VERSION_CODES#N} release</em> the returned range
+ * corresponds closer to sustained performance <em>in tested configurations</em>.
+ * One can expect to achieve sustained performance higher than the lower limit more than
+ * 50% of the time, and higher than half of the lower limit at least 90% of the time
+ * <em>in tested configurations</em>.
+ * Conversely, one can expect performance lower than twice the upper limit at least
+ * 90% of the time.
+ * <p class=note>
+ * Tested configurations use a single active codec. For use cases where multiple
+ * codecs are active, applications can expect lower and in most cases significantly lower
+ * performance.
+ * <p class=note>
+ * The returned range value is interpolated from the nearest frame size(s) tested.
+ * Codec performance is severely impacted by other activity on the device as well
+ * as environmental factors (such as battery level, temperature or power source), and can
+ * vary significantly even in a steady environment.
+ * <p class=note>
+ * Use this method in cases where only codec performance matters, e.g. to evaluate if
+ * a codec has any chance of meeting a performance target. Codecs are listed
+ * in {@link MediaCodecList} in the preferred order as defined by the device
+ * manufacturer. As such, applications should use the first suitable codec in the
+ * list to achieve the best balance between power use and performance.
+ *
+ * @param width the width of the video
+ * @param height the height of the video
+ *
+ * @throws IllegalArgumentException if the video size is not supported.
+ */
+ @Nullable
+ public Range<Double> getAchievableFrameRatesFor(int width, int height) {
+ if (!supports(width, height, null)) {
+ throw new IllegalArgumentException("unsupported size");
+ }
+
+ if (mMeasuredFrameRates == null || mMeasuredFrameRates.size() <= 0) {
+ Log.w(TAG, "Codec did not publish any measurement data.");
+ return null;
+ }
+
+ return estimateFrameRatesFor(width, height);
+ }
+
+ /**
+ * Returns whether a given video size ({@code width} and
+ * {@code height}) and {@code frameRate} combination is supported.
+ */
+ public boolean areSizeAndRateSupported(
+ int width, int height, double frameRate) {
+ return supports(width, height, frameRate);
+ }
+
+ /**
+ * Returns whether a given video size ({@code width} and
+ * {@code height}) is supported.
+ */
+ public boolean isSizeSupported(int width, int height) {
+ return supports(width, height, null);
+ }
+
+ private boolean supports(Integer width, Integer height, Number rate) {
+ boolean ok = true;
+
+ if (ok && width != null) {
+ ok = mWidthRange.contains(width)
+ && (width % mWidthAlignment == 0);
+ }
+ if (ok && height != null) {
+ ok = mHeightRange.contains(height)
+ && (height % mHeightAlignment == 0);
+ }
+ if (ok && rate != null) {
+ ok = mFrameRateRange.contains(Utils.intRangeFor(rate.doubleValue()));
+ }
+ if (ok && height != null && width != null) {
+ ok = Math.min(height, width) <= mSmallerDimensionUpperLimit;
+
+ final int widthInBlocks = Utils.divUp(width, mBlockWidth);
+ final int heightInBlocks = Utils.divUp(height, mBlockHeight);
+ final int blockCount = widthInBlocks * heightInBlocks;
+ ok = ok && mBlockCountRange.contains(blockCount)
+ && mBlockAspectRatioRange.contains(
+ new Rational(widthInBlocks, heightInBlocks))
+ && mAspectRatioRange.contains(new Rational(width, height));
+ if (ok && rate != null) {
+ double blocksPerSec = blockCount * rate.doubleValue();
+ ok = mBlocksPerSecondRange.contains(
+ Utils.longRangeFor(blocksPerSec));
+ }
+ }
+ return ok;
+ }
+
+ /**
+ * @hide
+ * @throws java.lang.ClassCastException */
+ public boolean supportsFormat(MediaFormat format) {
+ final Map<String, Object> map = format.getMap();
+ Integer width = (Integer)map.get(MediaFormat.KEY_WIDTH);
+ Integer height = (Integer)map.get(MediaFormat.KEY_HEIGHT);
+ Number rate = (Number)map.get(MediaFormat.KEY_FRAME_RATE);
+
+ if (!supports(width, height, rate)) {
+ return false;
+ }
+
+ if (!CodecCapabilities.supportsBitrate(mBitrateRange, format)) {
+ return false;
+ }
+
+ // we ignore color-format for now as it is not reliably reported by codec
+ return true;
+ }
+
+ /* no public constructor */
+ private VideoCapabilities() { }
+
+ /** @hide */
+ public static VideoCapabilities create(
+ MediaFormat info, CodecCapabilities parent) {
+ VideoCapabilities caps = new VideoCapabilities();
+ caps.init(info, parent);
+ return caps;
+ }
+
+ /** @hide */
+ public void init(MediaFormat info, CodecCapabilities parent) {
+ mParent = parent;
+ initWithPlatformLimits();
+ applyLevelLimits();
+ parseFromInfo(info);
+ updateLimits();
+ }
+
+ /** @hide */
+ public Size getBlockSize() {
+ return new Size(mBlockWidth, mBlockHeight);
+ }
+
+ /** @hide */
+ public Range<Integer> getBlockCountRange() {
+ return mBlockCountRange;
+ }
+
+ /** @hide */
+ public Range<Long> getBlocksPerSecondRange() {
+ return mBlocksPerSecondRange;
+ }
+
+ /** @hide */
+ public Range<Rational> getAspectRatioRange(boolean blocks) {
+ return blocks ? mBlockAspectRatioRange : mAspectRatioRange;
+ }
+
+ private void initWithPlatformLimits() {
+ mBitrateRange = BITRATE_RANGE;
+
+ mWidthRange = SIZE_RANGE;
+ mHeightRange = SIZE_RANGE;
+ mFrameRateRange = FRAME_RATE_RANGE;
+
+ mHorizontalBlockRange = SIZE_RANGE;
+ mVerticalBlockRange = SIZE_RANGE;
+
+ // full positive ranges are supported as these get calculated
+ mBlockCountRange = POSITIVE_INTEGERS;
+ mBlocksPerSecondRange = POSITIVE_LONGS;
+
+ mBlockAspectRatioRange = POSITIVE_RATIONALS;
+ mAspectRatioRange = POSITIVE_RATIONALS;
+
+ // YUV 4:2:0 requires 2:2 alignment
+ mWidthAlignment = 2;
+ mHeightAlignment = 2;
+ mBlockWidth = 2;
+ mBlockHeight = 2;
+ mSmallerDimensionUpperLimit = SIZE_RANGE.getUpper();
+ }
+
+ private Map<Size, Range<Long>> getMeasuredFrameRates(Map<String, Object> map) {
+ Map<Size, Range<Long>> ret = new HashMap<Size, Range<Long>>();
+ final String prefix = "measured-frame-rate-";
+ Set<String> keys = map.keySet();
+ for (String key : keys) {
+ // looking for: measured-frame-rate-WIDTHxHEIGHT-range
+ if (!key.startsWith(prefix)) {
+ continue;
+ }
+ String subKey = key.substring(prefix.length());
+ String[] temp = key.split("-");
+ if (temp.length != 5) {
+ continue;
+ }
+ String sizeStr = temp[3];
+ Size size = Utils.parseSize(sizeStr, null);
+ if (size == null || size.getWidth() * size.getHeight() <= 0) {
+ continue;
+ }
+ Range<Long> range = Utils.parseLongRange(map.get(key), null);
+ if (range == null || range.getLower() < 0 || range.getUpper() < 0) {
+ continue;
+ }
+ ret.put(size, range);
+ }
+ return ret;
+ }
+
+ private static Pair<Range<Integer>, Range<Integer>> parseWidthHeightRanges(Object o) {
+ Pair<Size, Size> range = Utils.parseSizeRange(o);
+ if (range != null) {
+ try {
+ return Pair.create(
+ Range.create(range.first.getWidth(), range.second.getWidth()),
+ Range.create(range.first.getHeight(), range.second.getHeight()));
+ } catch (IllegalArgumentException e) {
+ Log.w(TAG, "could not parse size range '" + o + "'");
+ }
+ }
+ return null;
+ }
+
+ /** @hide */
+ public static int equivalentVP9Level(MediaFormat info) {
+ final Map<String, Object> map = info.getMap();
+
+ Size blockSize = Utils.parseSize(map.get("block-size"), new Size(8, 8));
+ int BS = blockSize.getWidth() * blockSize.getHeight();
+
+ Range<Integer> counts = Utils.parseIntRange(map.get("block-count-range"), null);
+ int FS = counts == null ? 0 : BS * counts.getUpper();
+
+ Range<Long> blockRates =
+ Utils.parseLongRange(map.get("blocks-per-second-range"), null);
+ long SR = blockRates == null ? 0 : BS * blockRates.getUpper();
+
+ Pair<Range<Integer>, Range<Integer>> dimensionRanges =
+ parseWidthHeightRanges(map.get("size-range"));
+ int D = dimensionRanges == null ? 0 : Math.max(
+ dimensionRanges.first.getUpper(), dimensionRanges.second.getUpper());
+
+ Range<Integer> bitRates = Utils.parseIntRange(map.get("bitrate-range"), null);
+ int BR = bitRates == null ? 0 : Utils.divUp(bitRates.getUpper(), 1000);
+
+ if (SR <= 829440 && FS <= 36864 && BR <= 200 && D <= 512)
+ return CodecProfileLevel.VP9Level1;
+ if (SR <= 2764800 && FS <= 73728 && BR <= 800 && D <= 768)
+ return CodecProfileLevel.VP9Level11;
+ if (SR <= 4608000 && FS <= 122880 && BR <= 1800 && D <= 960)
+ return CodecProfileLevel.VP9Level2;
+ if (SR <= 9216000 && FS <= 245760 && BR <= 3600 && D <= 1344)
+ return CodecProfileLevel.VP9Level21;
+ if (SR <= 20736000 && FS <= 552960 && BR <= 7200 && D <= 2048)
+ return CodecProfileLevel.VP9Level3;
+ if (SR <= 36864000 && FS <= 983040 && BR <= 12000 && D <= 2752)
+ return CodecProfileLevel.VP9Level31;
+ if (SR <= 83558400 && FS <= 2228224 && BR <= 18000 && D <= 4160)
+ return CodecProfileLevel.VP9Level4;
+ if (SR <= 160432128 && FS <= 2228224 && BR <= 30000 && D <= 4160)
+ return CodecProfileLevel.VP9Level41;
+ if (SR <= 311951360 && FS <= 8912896 && BR <= 60000 && D <= 8384)
+ return CodecProfileLevel.VP9Level5;
+ if (SR <= 588251136 && FS <= 8912896 && BR <= 120000 && D <= 8384)
+ return CodecProfileLevel.VP9Level51;
+ if (SR <= 1176502272 && FS <= 8912896 && BR <= 180000 && D <= 8384)
+ return CodecProfileLevel.VP9Level52;
+ if (SR <= 1176502272 && FS <= 35651584 && BR <= 180000 && D <= 16832)
+ return CodecProfileLevel.VP9Level6;
+ if (SR <= 2353004544L && FS <= 35651584 && BR <= 240000 && D <= 16832)
+ return CodecProfileLevel.VP9Level61;
+ if (SR <= 4706009088L && FS <= 35651584 && BR <= 480000 && D <= 16832)
+ return CodecProfileLevel.VP9Level62;
+ // returning largest level
+ return CodecProfileLevel.VP9Level62;
+ }
+
+ private void parseFromInfo(MediaFormat info) {
+ final Map<String, Object> map = info.getMap();
+ Size blockSize = new Size(mBlockWidth, mBlockHeight);
+ Size alignment = new Size(mWidthAlignment, mHeightAlignment);
+ Range<Integer> counts = null, widths = null, heights = null;
+ Range<Integer> frameRates = null, bitRates = null;
+ Range<Long> blockRates = null;
+ Range<Rational> ratios = null, blockRatios = null;
+
+ blockSize = Utils.parseSize(map.get("block-size"), blockSize);
+ alignment = Utils.parseSize(map.get("alignment"), alignment);
+ counts = Utils.parseIntRange(map.get("block-count-range"), null);
+ blockRates =
+ Utils.parseLongRange(map.get("blocks-per-second-range"), null);
+ mMeasuredFrameRates = getMeasuredFrameRates(map);
+ Pair<Range<Integer>, Range<Integer>> sizeRanges =
+ parseWidthHeightRanges(map.get("size-range"));
+ if (sizeRanges != null) {
+ widths = sizeRanges.first;
+ heights = sizeRanges.second;
+ }
+ // for now this just means using the smaller max size as 2nd
+ // upper limit.
+ // for now we are keeping the profile specific "width/height
+ // in macroblocks" limits.
+ if (map.containsKey("feature-can-swap-width-height")) {
+ if (widths != null) {
+ mSmallerDimensionUpperLimit =
+ Math.min(widths.getUpper(), heights.getUpper());
+ widths = heights = widths.extend(heights);
+ } else {
+ Log.w(TAG, "feature can-swap-width-height is best used with size-range");
+ mSmallerDimensionUpperLimit =
+ Math.min(mWidthRange.getUpper(), mHeightRange.getUpper());
+ mWidthRange = mHeightRange = mWidthRange.extend(mHeightRange);
+ }
+ }
+
+ ratios = Utils.parseRationalRange(
+ map.get("block-aspect-ratio-range"), null);
+ blockRatios = Utils.parseRationalRange(
+ map.get("pixel-aspect-ratio-range"), null);
+ frameRates = Utils.parseIntRange(map.get("frame-rate-range"), null);
+ if (frameRates != null) {
+ try {
+ frameRates = frameRates.intersect(FRAME_RATE_RANGE);
+ } catch (IllegalArgumentException e) {
+ Log.w(TAG, "frame rate range (" + frameRates
+ + ") is out of limits: " + FRAME_RATE_RANGE);
+ frameRates = null;
+ }
+ }
+ bitRates = Utils.parseIntRange(map.get("bitrate-range"), null);
+ if (bitRates != null) {
+ try {
+ bitRates = bitRates.intersect(BITRATE_RANGE);
+ } catch (IllegalArgumentException e) {
+ Log.w(TAG, "bitrate range (" + bitRates
+ + ") is out of limits: " + BITRATE_RANGE);
+ bitRates = null;
+ }
+ }
+
+ checkPowerOfTwo(
+ blockSize.getWidth(), "block-size width must be power of two");
+ checkPowerOfTwo(
+ blockSize.getHeight(), "block-size height must be power of two");
+
+ checkPowerOfTwo(
+ alignment.getWidth(), "alignment width must be power of two");
+ checkPowerOfTwo(
+ alignment.getHeight(), "alignment height must be power of two");
+
+ // update block-size and alignment
+ applyMacroBlockLimits(
+ Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE,
+ Long.MAX_VALUE, blockSize.getWidth(), blockSize.getHeight(),
+ alignment.getWidth(), alignment.getHeight());
+
+ if ((mParent.mError & ERROR_UNSUPPORTED) != 0 || mAllowMbOverride) {
+ // codec supports profiles that we don't know.
+ // Use supplied values clipped to platform limits
+ if (widths != null) {
+ mWidthRange = SIZE_RANGE.intersect(widths);
+ }
+ if (heights != null) {
+ mHeightRange = SIZE_RANGE.intersect(heights);
+ }
+ if (counts != null) {
+ mBlockCountRange = POSITIVE_INTEGERS.intersect(
+ Utils.factorRange(counts, mBlockWidth * mBlockHeight
+ / blockSize.getWidth() / blockSize.getHeight()));
+ }
+ if (blockRates != null) {
+ mBlocksPerSecondRange = POSITIVE_LONGS.intersect(
+ Utils.factorRange(blockRates, mBlockWidth * mBlockHeight
+ / blockSize.getWidth() / blockSize.getHeight()));
+ }
+ if (blockRatios != null) {
+ mBlockAspectRatioRange = POSITIVE_RATIONALS.intersect(
+ Utils.scaleRange(blockRatios,
+ mBlockHeight / blockSize.getHeight(),
+ mBlockWidth / blockSize.getWidth()));
+ }
+ if (ratios != null) {
+ mAspectRatioRange = POSITIVE_RATIONALS.intersect(ratios);
+ }
+ if (frameRates != null) {
+ mFrameRateRange = FRAME_RATE_RANGE.intersect(frameRates);
+ }
+ if (bitRates != null) {
+ // only allow bitrate override if unsupported profiles were encountered
+ if ((mParent.mError & ERROR_UNSUPPORTED) != 0) {
+ mBitrateRange = BITRATE_RANGE.intersect(bitRates);
+ } else {
+ mBitrateRange = mBitrateRange.intersect(bitRates);
+ }
+ }
+ } else {
+ // no unsupported profile/levels, so restrict values to known limits
+ if (widths != null) {
+ mWidthRange = mWidthRange.intersect(widths);
+ }
+ if (heights != null) {
+ mHeightRange = mHeightRange.intersect(heights);
+ }
+ if (counts != null) {
+ mBlockCountRange = mBlockCountRange.intersect(
+ Utils.factorRange(counts, mBlockWidth * mBlockHeight
+ / blockSize.getWidth() / blockSize.getHeight()));
+ }
+ if (blockRates != null) {
+ mBlocksPerSecondRange = mBlocksPerSecondRange.intersect(
+ Utils.factorRange(blockRates, mBlockWidth * mBlockHeight
+ / blockSize.getWidth() / blockSize.getHeight()));
+ }
+ if (blockRatios != null) {
+ mBlockAspectRatioRange = mBlockAspectRatioRange.intersect(
+ Utils.scaleRange(blockRatios,
+ mBlockHeight / blockSize.getHeight(),
+ mBlockWidth / blockSize.getWidth()));
+ }
+ if (ratios != null) {
+ mAspectRatioRange = mAspectRatioRange.intersect(ratios);
+ }
+ if (frameRates != null) {
+ mFrameRateRange = mFrameRateRange.intersect(frameRates);
+ }
+ if (bitRates != null) {
+ mBitrateRange = mBitrateRange.intersect(bitRates);
+ }
+ }
+ updateLimits();
+ }
+
+ private void applyBlockLimits(
+ int blockWidth, int blockHeight,
+ Range<Integer> counts, Range<Long> rates, Range<Rational> ratios) {
+ checkPowerOfTwo(blockWidth, "blockWidth must be a power of two");
+ checkPowerOfTwo(blockHeight, "blockHeight must be a power of two");
+
+ final int newBlockWidth = Math.max(blockWidth, mBlockWidth);
+ final int newBlockHeight = Math.max(blockHeight, mBlockHeight);
+
+ // factor will always be a power-of-2
+ int factor =
+ newBlockWidth * newBlockHeight / mBlockWidth / mBlockHeight;
+ if (factor != 1) {
+ mBlockCountRange = Utils.factorRange(mBlockCountRange, factor);
+ mBlocksPerSecondRange = Utils.factorRange(
+ mBlocksPerSecondRange, factor);
+ mBlockAspectRatioRange = Utils.scaleRange(
+ mBlockAspectRatioRange,
+ newBlockHeight / mBlockHeight,
+ newBlockWidth / mBlockWidth);
+ mHorizontalBlockRange = Utils.factorRange(
+ mHorizontalBlockRange, newBlockWidth / mBlockWidth);
+ mVerticalBlockRange = Utils.factorRange(
+ mVerticalBlockRange, newBlockHeight / mBlockHeight);
+ }
+ factor = newBlockWidth * newBlockHeight / blockWidth / blockHeight;
+ if (factor != 1) {
+ counts = Utils.factorRange(counts, factor);
+ rates = Utils.factorRange(rates, factor);
+ ratios = Utils.scaleRange(
+ ratios, newBlockHeight / blockHeight,
+ newBlockWidth / blockWidth);
+ }
+ mBlockCountRange = mBlockCountRange.intersect(counts);
+ mBlocksPerSecondRange = mBlocksPerSecondRange.intersect(rates);
+ mBlockAspectRatioRange = mBlockAspectRatioRange.intersect(ratios);
+ mBlockWidth = newBlockWidth;
+ mBlockHeight = newBlockHeight;
+ }
+
+ private void applyAlignment(int widthAlignment, int heightAlignment) {
+ checkPowerOfTwo(widthAlignment, "widthAlignment must be a power of two");
+ checkPowerOfTwo(heightAlignment, "heightAlignment must be a power of two");
+
+ if (widthAlignment > mBlockWidth || heightAlignment > mBlockHeight) {
+ // maintain assumption that 0 < alignment <= block-size
+ applyBlockLimits(
+ Math.max(widthAlignment, mBlockWidth),
+ Math.max(heightAlignment, mBlockHeight),
+ POSITIVE_INTEGERS, POSITIVE_LONGS, POSITIVE_RATIONALS);
+ }
+
+ mWidthAlignment = Math.max(widthAlignment, mWidthAlignment);
+ mHeightAlignment = Math.max(heightAlignment, mHeightAlignment);
+
+ mWidthRange = Utils.alignRange(mWidthRange, mWidthAlignment);
+ mHeightRange = Utils.alignRange(mHeightRange, mHeightAlignment);
+ }
+
+ private void updateLimits() {
+ // pixels -> blocks <- counts
+ mHorizontalBlockRange = mHorizontalBlockRange.intersect(
+ Utils.factorRange(mWidthRange, mBlockWidth));
+ mHorizontalBlockRange = mHorizontalBlockRange.intersect(
+ Range.create(
+ mBlockCountRange.getLower() / mVerticalBlockRange.getUpper(),
+ mBlockCountRange.getUpper() / mVerticalBlockRange.getLower()));
+ mVerticalBlockRange = mVerticalBlockRange.intersect(
+ Utils.factorRange(mHeightRange, mBlockHeight));
+ mVerticalBlockRange = mVerticalBlockRange.intersect(
+ Range.create(
+ mBlockCountRange.getLower() / mHorizontalBlockRange.getUpper(),
+ mBlockCountRange.getUpper() / mHorizontalBlockRange.getLower()));
+ mBlockCountRange = mBlockCountRange.intersect(
+ Range.create(
+ mHorizontalBlockRange.getLower()
+ * mVerticalBlockRange.getLower(),
+ mHorizontalBlockRange.getUpper()
+ * mVerticalBlockRange.getUpper()));
+ mBlockAspectRatioRange = mBlockAspectRatioRange.intersect(
+ new Rational(
+ mHorizontalBlockRange.getLower(), mVerticalBlockRange.getUpper()),
+ new Rational(
+ mHorizontalBlockRange.getUpper(), mVerticalBlockRange.getLower()));
+
+ // blocks -> pixels
+ mWidthRange = mWidthRange.intersect(
+ (mHorizontalBlockRange.getLower() - 1) * mBlockWidth + mWidthAlignment,
+ mHorizontalBlockRange.getUpper() * mBlockWidth);
+ mHeightRange = mHeightRange.intersect(
+ (mVerticalBlockRange.getLower() - 1) * mBlockHeight + mHeightAlignment,
+ mVerticalBlockRange.getUpper() * mBlockHeight);
+ mAspectRatioRange = mAspectRatioRange.intersect(
+ new Rational(mWidthRange.getLower(), mHeightRange.getUpper()),
+ new Rational(mWidthRange.getUpper(), mHeightRange.getLower()));
+
+ mSmallerDimensionUpperLimit = Math.min(
+ mSmallerDimensionUpperLimit,
+ Math.min(mWidthRange.getUpper(), mHeightRange.getUpper()));
+
+ // blocks -> rate
+ mBlocksPerSecondRange = mBlocksPerSecondRange.intersect(
+ mBlockCountRange.getLower() * (long)mFrameRateRange.getLower(),
+ mBlockCountRange.getUpper() * (long)mFrameRateRange.getUpper());
+ mFrameRateRange = mFrameRateRange.intersect(
+ (int)(mBlocksPerSecondRange.getLower()
+ / mBlockCountRange.getUpper()),
+ (int)(mBlocksPerSecondRange.getUpper()
+ / (double)mBlockCountRange.getLower()));
+ }
+
+ private void applyMacroBlockLimits(
+ int maxHorizontalBlocks, int maxVerticalBlocks,
+ int maxBlocks, long maxBlocksPerSecond,
+ int blockWidth, int blockHeight,
+ int widthAlignment, int heightAlignment) {
+ applyMacroBlockLimits(
+ 1 /* minHorizontalBlocks */, 1 /* minVerticalBlocks */,
+ maxHorizontalBlocks, maxVerticalBlocks,
+ maxBlocks, maxBlocksPerSecond,
+ blockWidth, blockHeight, widthAlignment, heightAlignment);
+ }
+
+ private void applyMacroBlockLimits(
+ int minHorizontalBlocks, int minVerticalBlocks,
+ int maxHorizontalBlocks, int maxVerticalBlocks,
+ int maxBlocks, long maxBlocksPerSecond,
+ int blockWidth, int blockHeight,
+ int widthAlignment, int heightAlignment) {
+ applyAlignment(widthAlignment, heightAlignment);
+ applyBlockLimits(
+ blockWidth, blockHeight, Range.create(1, maxBlocks),
+ Range.create(1L, maxBlocksPerSecond),
+ Range.create(
+ new Rational(1, maxVerticalBlocks),
+ new Rational(maxHorizontalBlocks, 1)));
+ mHorizontalBlockRange =
+ mHorizontalBlockRange.intersect(
+ Utils.divUp(minHorizontalBlocks, (mBlockWidth / blockWidth)),
+ maxHorizontalBlocks / (mBlockWidth / blockWidth));
+ mVerticalBlockRange =
+ mVerticalBlockRange.intersect(
+ Utils.divUp(minVerticalBlocks, (mBlockHeight / blockHeight)),
+ maxVerticalBlocks / (mBlockHeight / blockHeight));
+ }
+
+ private void applyLevelLimits() {
+ long maxBlocksPerSecond = 0;
+ int maxBlocks = 0;
+ int maxBps = 0;
+ int maxDPBBlocks = 0;
+
+ int errors = ERROR_NONE_SUPPORTED;
+ CodecProfileLevel[] profileLevels = mParent.profileLevels;
+ String mime = mParent.getMimeType();
+
+ if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_AVC)) {
+ maxBlocks = 99;
+ maxBlocksPerSecond = 1485;
+ maxBps = 64000;
+ maxDPBBlocks = 396;
+ for (CodecProfileLevel profileLevel: profileLevels) {
+ int MBPS = 0, FS = 0, BR = 0, DPB = 0;
+ boolean supported = true;
+ switch (profileLevel.level) {
+ case CodecProfileLevel.AVCLevel1:
+ MBPS = 1485; FS = 99; BR = 64; DPB = 396; break;
+ case CodecProfileLevel.AVCLevel1b:
+ MBPS = 1485; FS = 99; BR = 128; DPB = 396; break;
+ case CodecProfileLevel.AVCLevel11:
+ MBPS = 3000; FS = 396; BR = 192; DPB = 900; break;
+ case CodecProfileLevel.AVCLevel12:
+ MBPS = 6000; FS = 396; BR = 384; DPB = 2376; break;
+ case CodecProfileLevel.AVCLevel13:
+ MBPS = 11880; FS = 396; BR = 768; DPB = 2376; break;
+ case CodecProfileLevel.AVCLevel2:
+ MBPS = 11880; FS = 396; BR = 2000; DPB = 2376; break;
+ case CodecProfileLevel.AVCLevel21:
+ MBPS = 19800; FS = 792; BR = 4000; DPB = 4752; break;
+ case CodecProfileLevel.AVCLevel22:
+ MBPS = 20250; FS = 1620; BR = 4000; DPB = 8100; break;
+ case CodecProfileLevel.AVCLevel3:
+ MBPS = 40500; FS = 1620; BR = 10000; DPB = 8100; break;
+ case CodecProfileLevel.AVCLevel31:
+ MBPS = 108000; FS = 3600; BR = 14000; DPB = 18000; break;
+ case CodecProfileLevel.AVCLevel32:
+ MBPS = 216000; FS = 5120; BR = 20000; DPB = 20480; break;
+ case CodecProfileLevel.AVCLevel4:
+ MBPS = 245760; FS = 8192; BR = 20000; DPB = 32768; break;
+ case CodecProfileLevel.AVCLevel41:
+ MBPS = 245760; FS = 8192; BR = 50000; DPB = 32768; break;
+ case CodecProfileLevel.AVCLevel42:
+ MBPS = 522240; FS = 8704; BR = 50000; DPB = 34816; break;
+ case CodecProfileLevel.AVCLevel5:
+ MBPS = 589824; FS = 22080; BR = 135000; DPB = 110400; break;
+ case CodecProfileLevel.AVCLevel51:
+ MBPS = 983040; FS = 36864; BR = 240000; DPB = 184320; break;
+ case CodecProfileLevel.AVCLevel52:
+ MBPS = 2073600; FS = 36864; BR = 240000; DPB = 184320; break;
+ default:
+ Log.w(TAG, "Unrecognized level "
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ switch (profileLevel.profile) {
+ case CodecProfileLevel.AVCProfileConstrainedHigh:
+ case CodecProfileLevel.AVCProfileHigh:
+ BR *= 1250; break;
+ case CodecProfileLevel.AVCProfileHigh10:
+ BR *= 3000; break;
+ case CodecProfileLevel.AVCProfileExtended:
+ case CodecProfileLevel.AVCProfileHigh422:
+ case CodecProfileLevel.AVCProfileHigh444:
+ Log.w(TAG, "Unsupported profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNSUPPORTED;
+ supported = false;
+ // fall through - treat as base profile
+ case CodecProfileLevel.AVCProfileConstrainedBaseline:
+ case CodecProfileLevel.AVCProfileBaseline:
+ case CodecProfileLevel.AVCProfileMain:
+ BR *= 1000; break;
+ default:
+ Log.w(TAG, "Unrecognized profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ BR *= 1000;
+ }
+ if (supported) {
+ errors &= ~ERROR_NONE_SUPPORTED;
+ }
+ maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond);
+ maxBlocks = Math.max(FS, maxBlocks);
+ maxBps = Math.max(BR, maxBps);
+ maxDPBBlocks = Math.max(maxDPBBlocks, DPB);
+ }
+
+ int maxLengthInBlocks = (int)(Math.sqrt(maxBlocks * 8));
+ applyMacroBlockLimits(
+ maxLengthInBlocks, maxLengthInBlocks,
+ maxBlocks, maxBlocksPerSecond,
+ 16 /* blockWidth */, 16 /* blockHeight */,
+ 1 /* widthAlignment */, 1 /* heightAlignment */);
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_MPEG2)) {
+ int maxWidth = 11, maxHeight = 9, maxRate = 15;
+ maxBlocks = 99;
+ maxBlocksPerSecond = 1485;
+ maxBps = 64000;
+ for (CodecProfileLevel profileLevel: profileLevels) {
+ int MBPS = 0, FS = 0, BR = 0, FR = 0, W = 0, H = 0;
+ boolean supported = true;
+ switch (profileLevel.profile) {
+ case CodecProfileLevel.MPEG2ProfileSimple:
+ switch (profileLevel.level) {
+ case CodecProfileLevel.MPEG2LevelML:
+ FR = 30; W = 45; H = 36; MBPS = 40500; FS = 1620; BR = 15000; break;
+ default:
+ Log.w(TAG, "Unrecognized profile/level "
+ + profileLevel.profile + "/"
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ break;
+ case CodecProfileLevel.MPEG2ProfileMain:
+ switch (profileLevel.level) {
+ case CodecProfileLevel.MPEG2LevelLL:
+ FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 4000; break;
+ case CodecProfileLevel.MPEG2LevelML:
+ FR = 30; W = 45; H = 36; MBPS = 40500; FS = 1620; BR = 15000; break;
+ case CodecProfileLevel.MPEG2LevelH14:
+ FR = 60; W = 90; H = 68; MBPS = 183600; FS = 6120; BR = 60000; break;
+ case CodecProfileLevel.MPEG2LevelHL:
+ FR = 60; W = 120; H = 68; MBPS = 244800; FS = 8160; BR = 80000; break;
+ case CodecProfileLevel.MPEG2LevelHP:
+ FR = 60; W = 120; H = 68; MBPS = 489600; FS = 8160; BR = 80000; break;
+ default:
+ Log.w(TAG, "Unrecognized profile/level "
+ + profileLevel.profile + "/"
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ break;
+ case CodecProfileLevel.MPEG2Profile422:
+ case CodecProfileLevel.MPEG2ProfileSNR:
+ case CodecProfileLevel.MPEG2ProfileSpatial:
+ case CodecProfileLevel.MPEG2ProfileHigh:
+ Log.i(TAG, "Unsupported profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNSUPPORTED;
+ supported = false;
+ break;
+ default:
+ Log.w(TAG, "Unrecognized profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ if (supported) {
+ errors &= ~ERROR_NONE_SUPPORTED;
+ }
+ maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond);
+ maxBlocks = Math.max(FS, maxBlocks);
+ maxBps = Math.max(BR * 1000, maxBps);
+ maxWidth = Math.max(W, maxWidth);
+ maxHeight = Math.max(H, maxHeight);
+ maxRate = Math.max(FR, maxRate);
+ }
+ applyMacroBlockLimits(maxWidth, maxHeight,
+ maxBlocks, maxBlocksPerSecond,
+ 16 /* blockWidth */, 16 /* blockHeight */,
+ 1 /* widthAlignment */, 1 /* heightAlignment */);
+ mFrameRateRange = mFrameRateRange.intersect(12, maxRate);
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_MPEG4)) {
+ int maxWidth = 11, maxHeight = 9, maxRate = 15;
+ maxBlocks = 99;
+ maxBlocksPerSecond = 1485;
+ maxBps = 64000;
+ for (CodecProfileLevel profileLevel: profileLevels) {
+ int MBPS = 0, FS = 0, BR = 0, FR = 0, W = 0, H = 0;
+ boolean strict = false; // true: W, H and FR are individual max limits
+ boolean supported = true;
+ switch (profileLevel.profile) {
+ case CodecProfileLevel.MPEG4ProfileSimple:
+ switch (profileLevel.level) {
+ case CodecProfileLevel.MPEG4Level0:
+ strict = true;
+ FR = 15; W = 11; H = 9; MBPS = 1485; FS = 99; BR = 64; break;
+ case CodecProfileLevel.MPEG4Level1:
+ FR = 30; W = 11; H = 9; MBPS = 1485; FS = 99; BR = 64; break;
+ case CodecProfileLevel.MPEG4Level0b:
+ strict = true;
+ FR = 15; W = 11; H = 9; MBPS = 1485; FS = 99; BR = 128; break;
+ case CodecProfileLevel.MPEG4Level2:
+ FR = 30; W = 22; H = 18; MBPS = 5940; FS = 396; BR = 128; break;
+ case CodecProfileLevel.MPEG4Level3:
+ FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 384; break;
+ case CodecProfileLevel.MPEG4Level4a:
+ FR = 30; W = 40; H = 30; MBPS = 36000; FS = 1200; BR = 4000; break;
+ case CodecProfileLevel.MPEG4Level5:
+ FR = 30; W = 45; H = 36; MBPS = 40500; FS = 1620; BR = 8000; break;
+ case CodecProfileLevel.MPEG4Level6:
+ FR = 30; W = 80; H = 45; MBPS = 108000; FS = 3600; BR = 12000; break;
+ default:
+ Log.w(TAG, "Unrecognized profile/level "
+ + profileLevel.profile + "/"
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ break;
+ case CodecProfileLevel.MPEG4ProfileAdvancedSimple:
+ switch (profileLevel.level) {
+ case CodecProfileLevel.MPEG4Level0:
+ case CodecProfileLevel.MPEG4Level1:
+ FR = 30; W = 11; H = 9; MBPS = 2970; FS = 99; BR = 128; break;
+ case CodecProfileLevel.MPEG4Level2:
+ FR = 30; W = 22; H = 18; MBPS = 5940; FS = 396; BR = 384; break;
+ case CodecProfileLevel.MPEG4Level3:
+ FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 768; break;
+ case CodecProfileLevel.MPEG4Level3b:
+ FR = 30; W = 22; H = 18; MBPS = 11880; FS = 396; BR = 1500; break;
+ case CodecProfileLevel.MPEG4Level4:
+ FR = 30; W = 44; H = 36; MBPS = 23760; FS = 792; BR = 3000; break;
+ case CodecProfileLevel.MPEG4Level5:
+ FR = 30; W = 45; H = 36; MBPS = 48600; FS = 1620; BR = 8000; break;
+ default:
+ Log.w(TAG, "Unrecognized profile/level "
+ + profileLevel.profile + "/"
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ break;
+ case CodecProfileLevel.MPEG4ProfileMain: // 2-4
+ case CodecProfileLevel.MPEG4ProfileNbit: // 2
+ case CodecProfileLevel.MPEG4ProfileAdvancedRealTime: // 1-4
+ case CodecProfileLevel.MPEG4ProfileCoreScalable: // 1-3
+ case CodecProfileLevel.MPEG4ProfileAdvancedCoding: // 1-4
+ case CodecProfileLevel.MPEG4ProfileCore: // 1-2
+ case CodecProfileLevel.MPEG4ProfileAdvancedCore: // 1-4
+ case CodecProfileLevel.MPEG4ProfileSimpleScalable: // 0-2
+ case CodecProfileLevel.MPEG4ProfileHybrid: // 1-2
+
+ // Studio profiles are not supported by our codecs.
+
+ // Only profiles that can decode simple object types are considered.
+ // The following profiles are not able to.
+ case CodecProfileLevel.MPEG4ProfileBasicAnimated: // 1-2
+ case CodecProfileLevel.MPEG4ProfileScalableTexture: // 1
+ case CodecProfileLevel.MPEG4ProfileSimpleFace: // 1-2
+ case CodecProfileLevel.MPEG4ProfileAdvancedScalable: // 1-3
+ case CodecProfileLevel.MPEG4ProfileSimpleFBA: // 1-2
+ Log.i(TAG, "Unsupported profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNSUPPORTED;
+ supported = false;
+ break;
+ default:
+ Log.w(TAG, "Unrecognized profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ if (supported) {
+ errors &= ~ERROR_NONE_SUPPORTED;
+ }
+ maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond);
+ maxBlocks = Math.max(FS, maxBlocks);
+ maxBps = Math.max(BR * 1000, maxBps);
+ if (strict) {
+ maxWidth = Math.max(W, maxWidth);
+ maxHeight = Math.max(H, maxHeight);
+ maxRate = Math.max(FR, maxRate);
+ } else {
+ // assuming max 60 fps frame rate and 1:2 aspect ratio
+ int maxDim = (int)Math.sqrt(FS * 2);
+ maxWidth = Math.max(maxDim, maxWidth);
+ maxHeight = Math.max(maxDim, maxHeight);
+ maxRate = Math.max(Math.max(FR, 60), maxRate);
+ }
+ }
+ applyMacroBlockLimits(maxWidth, maxHeight,
+ maxBlocks, maxBlocksPerSecond,
+ 16 /* blockWidth */, 16 /* blockHeight */,
+ 1 /* widthAlignment */, 1 /* heightAlignment */);
+ mFrameRateRange = mFrameRateRange.intersect(12, maxRate);
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_H263)) {
+ int maxWidth = 11, maxHeight = 9, maxRate = 15;
+ int minWidth = maxWidth, minHeight = maxHeight;
+ int minAlignment = 16;
+ maxBlocks = 99;
+ maxBlocksPerSecond = 1485;
+ maxBps = 64000;
+ for (CodecProfileLevel profileLevel: profileLevels) {
+ int MBPS = 0, BR = 0, FR = 0, W = 0, H = 0, minW = minWidth, minH = minHeight;
+ boolean strict = false; // true: support only sQCIF, QCIF (maybe CIF)
+ switch (profileLevel.level) {
+ case CodecProfileLevel.H263Level10:
+ strict = true; // only supports sQCIF & QCIF
+ FR = 15; W = 11; H = 9; BR = 1; MBPS = W * H * FR; break;
+ case CodecProfileLevel.H263Level20:
+ strict = true; // only supports sQCIF, QCIF & CIF
+ FR = 30; W = 22; H = 18; BR = 2; MBPS = W * H * 15; break;
+ case CodecProfileLevel.H263Level30:
+ strict = true; // only supports sQCIF, QCIF & CIF
+ FR = 30; W = 22; H = 18; BR = 6; MBPS = W * H * FR; break;
+ case CodecProfileLevel.H263Level40:
+ strict = true; // only supports sQCIF, QCIF & CIF
+ FR = 30; W = 22; H = 18; BR = 32; MBPS = W * H * FR; break;
+ case CodecProfileLevel.H263Level45:
+ // only implies level 10 support
+ strict = profileLevel.profile == CodecProfileLevel.H263ProfileBaseline
+ || profileLevel.profile ==
+ CodecProfileLevel.H263ProfileBackwardCompatible;
+ if (!strict) {
+ minW = 1; minH = 1; minAlignment = 4;
+ }
+ FR = 15; W = 11; H = 9; BR = 2; MBPS = W * H * FR; break;
+ case CodecProfileLevel.H263Level50:
+ // only supports 50fps for H > 15
+ minW = 1; minH = 1; minAlignment = 4;
+ FR = 60; W = 22; H = 18; BR = 64; MBPS = W * H * 50; break;
+ case CodecProfileLevel.H263Level60:
+ // only supports 50fps for H > 15
+ minW = 1; minH = 1; minAlignment = 4;
+ FR = 60; W = 45; H = 18; BR = 128; MBPS = W * H * 50; break;
+ case CodecProfileLevel.H263Level70:
+ // only supports 50fps for H > 30
+ minW = 1; minH = 1; minAlignment = 4;
+ FR = 60; W = 45; H = 36; BR = 256; MBPS = W * H * 50; break;
+ default:
+ Log.w(TAG, "Unrecognized profile/level " + profileLevel.profile
+ + "/" + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ switch (profileLevel.profile) {
+ case CodecProfileLevel.H263ProfileBackwardCompatible:
+ case CodecProfileLevel.H263ProfileBaseline:
+ case CodecProfileLevel.H263ProfileH320Coding:
+ case CodecProfileLevel.H263ProfileHighCompression:
+ case CodecProfileLevel.H263ProfileHighLatency:
+ case CodecProfileLevel.H263ProfileInterlace:
+ case CodecProfileLevel.H263ProfileInternet:
+ case CodecProfileLevel.H263ProfileISWV2:
+ case CodecProfileLevel.H263ProfileISWV3:
+ break;
+ default:
+ Log.w(TAG, "Unrecognized profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ if (strict) {
+ // Strict levels define sub-QCIF min size and enumerated sizes. We cannot
+ // express support for "only sQCIF & QCIF (& CIF)" using VideoCapabilities
+ // but we can express "only QCIF (& CIF)", so set minimume size at QCIF.
+ // minW = 8; minH = 6;
+ minW = 11; minH = 9;
+ } else {
+ // any support for non-strict levels (including unrecognized profiles or
+ // levels) allow custom frame size support beyond supported limits
+ // (other than bitrate)
+ mAllowMbOverride = true;
+ }
+ errors &= ~ERROR_NONE_SUPPORTED;
+ maxBlocksPerSecond = Math.max(MBPS, maxBlocksPerSecond);
+ maxBlocks = Math.max(W * H, maxBlocks);
+ maxBps = Math.max(BR * 64000, maxBps);
+ maxWidth = Math.max(W, maxWidth);
+ maxHeight = Math.max(H, maxHeight);
+ maxRate = Math.max(FR, maxRate);
+ minWidth = Math.min(minW, minWidth);
+ minHeight = Math.min(minH, minHeight);
+ }
+ // unless we encountered custom frame size support, limit size to QCIF and CIF
+ // using aspect ratio.
+ if (!mAllowMbOverride) {
+ mBlockAspectRatioRange =
+ Range.create(new Rational(11, 9), new Rational(11, 9));
+ }
+ applyMacroBlockLimits(
+ minWidth, minHeight,
+ maxWidth, maxHeight,
+ maxBlocks, maxBlocksPerSecond,
+ 16 /* blockWidth */, 16 /* blockHeight */,
+ minAlignment /* widthAlignment */, minAlignment /* heightAlignment */);
+ mFrameRateRange = Range.create(1, maxRate);
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_VP8)) {
+ maxBlocks = Integer.MAX_VALUE;
+ maxBlocksPerSecond = Integer.MAX_VALUE;
+
+ // TODO: set to 100Mbps for now, need a number for VP8
+ maxBps = 100000000;
+
+ // profile levels are not indicative for VPx, but verify
+ // them nonetheless
+ for (CodecProfileLevel profileLevel: profileLevels) {
+ switch (profileLevel.level) {
+ case CodecProfileLevel.VP8Level_Version0:
+ case CodecProfileLevel.VP8Level_Version1:
+ case CodecProfileLevel.VP8Level_Version2:
+ case CodecProfileLevel.VP8Level_Version3:
+ break;
+ default:
+ Log.w(TAG, "Unrecognized level "
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ switch (profileLevel.profile) {
+ case CodecProfileLevel.VP8ProfileMain:
+ break;
+ default:
+ Log.w(TAG, "Unrecognized profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ errors &= ~ERROR_NONE_SUPPORTED;
+ }
+
+ final int blockSize = 16;
+ applyMacroBlockLimits(Short.MAX_VALUE, Short.MAX_VALUE,
+ maxBlocks, maxBlocksPerSecond, blockSize, blockSize,
+ 1 /* widthAlignment */, 1 /* heightAlignment */);
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_VP9)) {
+ maxBlocksPerSecond = 829440;
+ maxBlocks = 36864;
+ maxBps = 200000;
+ int maxDim = 512;
+
+ for (CodecProfileLevel profileLevel: profileLevels) {
+ long SR = 0; // luma sample rate
+ int FS = 0; // luma picture size
+ int BR = 0; // bit rate kbps
+ int D = 0; // luma dimension
+ switch (profileLevel.level) {
+ case CodecProfileLevel.VP9Level1:
+ SR = 829440; FS = 36864; BR = 200; D = 512; break;
+ case CodecProfileLevel.VP9Level11:
+ SR = 2764800; FS = 73728; BR = 800; D = 768; break;
+ case CodecProfileLevel.VP9Level2:
+ SR = 4608000; FS = 122880; BR = 1800; D = 960; break;
+ case CodecProfileLevel.VP9Level21:
+ SR = 9216000; FS = 245760; BR = 3600; D = 1344; break;
+ case CodecProfileLevel.VP9Level3:
+ SR = 20736000; FS = 552960; BR = 7200; D = 2048; break;
+ case CodecProfileLevel.VP9Level31:
+ SR = 36864000; FS = 983040; BR = 12000; D = 2752; break;
+ case CodecProfileLevel.VP9Level4:
+ SR = 83558400; FS = 2228224; BR = 18000; D = 4160; break;
+ case CodecProfileLevel.VP9Level41:
+ SR = 160432128; FS = 2228224; BR = 30000; D = 4160; break;
+ case CodecProfileLevel.VP9Level5:
+ SR = 311951360; FS = 8912896; BR = 60000; D = 8384; break;
+ case CodecProfileLevel.VP9Level51:
+ SR = 588251136; FS = 8912896; BR = 120000; D = 8384; break;
+ case CodecProfileLevel.VP9Level52:
+ SR = 1176502272; FS = 8912896; BR = 180000; D = 8384; break;
+ case CodecProfileLevel.VP9Level6:
+ SR = 1176502272; FS = 35651584; BR = 180000; D = 16832; break;
+ case CodecProfileLevel.VP9Level61:
+ SR = 2353004544L; FS = 35651584; BR = 240000; D = 16832; break;
+ case CodecProfileLevel.VP9Level62:
+ SR = 4706009088L; FS = 35651584; BR = 480000; D = 16832; break;
+ default:
+ Log.w(TAG, "Unrecognized level "
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ switch (profileLevel.profile) {
+ case CodecProfileLevel.VP9Profile0:
+ case CodecProfileLevel.VP9Profile1:
+ case CodecProfileLevel.VP9Profile2:
+ case CodecProfileLevel.VP9Profile3:
+ case CodecProfileLevel.VP9Profile2HDR:
+ case CodecProfileLevel.VP9Profile3HDR:
+ break;
+ default:
+ Log.w(TAG, "Unrecognized profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ errors &= ~ERROR_NONE_SUPPORTED;
+ maxBlocksPerSecond = Math.max(SR, maxBlocksPerSecond);
+ maxBlocks = Math.max(FS, maxBlocks);
+ maxBps = Math.max(BR * 1000, maxBps);
+ maxDim = Math.max(D, maxDim);
+ }
+
+ final int blockSize = 8;
+ int maxLengthInBlocks = Utils.divUp(maxDim, blockSize);
+ maxBlocks = Utils.divUp(maxBlocks, blockSize * blockSize);
+ maxBlocksPerSecond = Utils.divUp(maxBlocksPerSecond, blockSize * blockSize);
+
+ applyMacroBlockLimits(
+ maxLengthInBlocks, maxLengthInBlocks,
+ maxBlocks, maxBlocksPerSecond,
+ blockSize, blockSize,
+ 1 /* widthAlignment */, 1 /* heightAlignment */);
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_VIDEO_HEVC)) {
+ // CTBs are at least 8x8 so use 8x8 block size
+ maxBlocks = 36864 >> 6; // 192x192 pixels == 576 8x8 blocks
+ maxBlocksPerSecond = maxBlocks * 15;
+ maxBps = 128000;
+ for (CodecProfileLevel profileLevel: profileLevels) {
+ double FR = 0;
+ int FS = 0;
+ int BR = 0;
+ switch (profileLevel.level) {
+ /* The HEVC spec talks only in a very convoluted manner about the
+ existence of levels 1-3.1 for High tier, which could also be
+ understood as 'decoders and encoders should treat these levels
+ as if they were Main tier', so we do that. */
+ case CodecProfileLevel.HEVCMainTierLevel1:
+ case CodecProfileLevel.HEVCHighTierLevel1:
+ FR = 15; FS = 36864; BR = 128; break;
+ case CodecProfileLevel.HEVCMainTierLevel2:
+ case CodecProfileLevel.HEVCHighTierLevel2:
+ FR = 30; FS = 122880; BR = 1500; break;
+ case CodecProfileLevel.HEVCMainTierLevel21:
+ case CodecProfileLevel.HEVCHighTierLevel21:
+ FR = 30; FS = 245760; BR = 3000; break;
+ case CodecProfileLevel.HEVCMainTierLevel3:
+ case CodecProfileLevel.HEVCHighTierLevel3:
+ FR = 30; FS = 552960; BR = 6000; break;
+ case CodecProfileLevel.HEVCMainTierLevel31:
+ case CodecProfileLevel.HEVCHighTierLevel31:
+ FR = 33.75; FS = 983040; BR = 10000; break;
+ case CodecProfileLevel.HEVCMainTierLevel4:
+ FR = 30; FS = 2228224; BR = 12000; break;
+ case CodecProfileLevel.HEVCHighTierLevel4:
+ FR = 30; FS = 2228224; BR = 30000; break;
+ case CodecProfileLevel.HEVCMainTierLevel41:
+ FR = 60; FS = 2228224; BR = 20000; break;
+ case CodecProfileLevel.HEVCHighTierLevel41:
+ FR = 60; FS = 2228224; BR = 50000; break;
+ case CodecProfileLevel.HEVCMainTierLevel5:
+ FR = 30; FS = 8912896; BR = 25000; break;
+ case CodecProfileLevel.HEVCHighTierLevel5:
+ FR = 30; FS = 8912896; BR = 100000; break;
+ case CodecProfileLevel.HEVCMainTierLevel51:
+ FR = 60; FS = 8912896; BR = 40000; break;
+ case CodecProfileLevel.HEVCHighTierLevel51:
+ FR = 60; FS = 8912896; BR = 160000; break;
+ case CodecProfileLevel.HEVCMainTierLevel52:
+ FR = 120; FS = 8912896; BR = 60000; break;
+ case CodecProfileLevel.HEVCHighTierLevel52:
+ FR = 120; FS = 8912896; BR = 240000; break;
+ case CodecProfileLevel.HEVCMainTierLevel6:
+ FR = 30; FS = 35651584; BR = 60000; break;
+ case CodecProfileLevel.HEVCHighTierLevel6:
+ FR = 30; FS = 35651584; BR = 240000; break;
+ case CodecProfileLevel.HEVCMainTierLevel61:
+ FR = 60; FS = 35651584; BR = 120000; break;
+ case CodecProfileLevel.HEVCHighTierLevel61:
+ FR = 60; FS = 35651584; BR = 480000; break;
+ case CodecProfileLevel.HEVCMainTierLevel62:
+ FR = 120; FS = 35651584; BR = 240000; break;
+ case CodecProfileLevel.HEVCHighTierLevel62:
+ FR = 120; FS = 35651584; BR = 800000; break;
+ default:
+ Log.w(TAG, "Unrecognized level "
+ + profileLevel.level + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+ switch (profileLevel.profile) {
+ case CodecProfileLevel.HEVCProfileMain:
+ case CodecProfileLevel.HEVCProfileMain10:
+ case CodecProfileLevel.HEVCProfileMain10HDR10:
+ break;
+ default:
+ Log.w(TAG, "Unrecognized profile "
+ + profileLevel.profile + " for " + mime);
+ errors |= ERROR_UNRECOGNIZED;
+ }
+
+ /* DPB logic:
+ if (width * height <= FS / 4) DPB = 16;
+ else if (width * height <= FS / 2) DPB = 12;
+ else if (width * height <= FS * 0.75) DPB = 8;
+ else DPB = 6;
+ */
+
+ FS >>= 6; // convert pixels to blocks
+ errors &= ~ERROR_NONE_SUPPORTED;
+ maxBlocksPerSecond = Math.max((int)(FR * FS), maxBlocksPerSecond);
+ maxBlocks = Math.max(FS, maxBlocks);
+ maxBps = Math.max(BR * 1000, maxBps);
+ }
+
+ int maxLengthInBlocks = (int)(Math.sqrt(maxBlocks * 8));
+ applyMacroBlockLimits(
+ maxLengthInBlocks, maxLengthInBlocks,
+ maxBlocks, maxBlocksPerSecond,
+ 8 /* blockWidth */, 8 /* blockHeight */,
+ 1 /* widthAlignment */, 1 /* heightAlignment */);
+ } else {
+ Log.w(TAG, "Unsupported mime " + mime);
+ // using minimal bitrate here. should be overriden by
+ // info from media_codecs.xml
+ maxBps = 64000;
+ errors |= ERROR_UNSUPPORTED;
+ }
+ mBitrateRange = Range.create(1, maxBps);
+ mParent.mError |= errors;
+ }
+ }
+
+ /**
+ * A class that supports querying the encoding capabilities of a codec.
+ */
+ public static final class EncoderCapabilities {
+ /**
+ * Returns the supported range of quality values.
+ *
+ * @hide
+ */
+ public Range<Integer> getQualityRange() {
+ return mQualityRange;
+ }
+
+ /**
+ * Returns the supported range of encoder complexity values.
+ * <p>
+ * Some codecs may support multiple complexity levels, where higher
+ * complexity values use more encoder tools (e.g. perform more
+ * intensive calculations) to improve the quality or the compression
+ * ratio. Use a lower value to save power and/or time.
+ */
+ public Range<Integer> getComplexityRange() {
+ return mComplexityRange;
+ }
+
+ /** Constant quality mode */
+ public static final int BITRATE_MODE_CQ = 0;
+ /** Variable bitrate mode */
+ public static final int BITRATE_MODE_VBR = 1;
+ /** Constant bitrate mode */
+ public static final int BITRATE_MODE_CBR = 2;
+
+ private static final Feature[] bitrates = new Feature[] {
+ new Feature("VBR", BITRATE_MODE_VBR, true),
+ new Feature("CBR", BITRATE_MODE_CBR, false),
+ new Feature("CQ", BITRATE_MODE_CQ, false)
+ };
+
+ private static int parseBitrateMode(String mode) {
+ for (Feature feat: bitrates) {
+ if (feat.mName.equalsIgnoreCase(mode)) {
+ return feat.mValue;
+ }
+ }
+ return 0;
+ }
+
+ /**
+ * Query whether a bitrate mode is supported.
+ */
+ public boolean isBitrateModeSupported(int mode) {
+ for (Feature feat: bitrates) {
+ if (mode == feat.mValue) {
+ return (mBitControl & (1 << mode)) != 0;
+ }
+ }
+ return false;
+ }
+
+ private Range<Integer> mQualityRange;
+ private Range<Integer> mComplexityRange;
+ private CodecCapabilities mParent;
+
+ /* no public constructor */
+ private EncoderCapabilities() { }
+
+ /** @hide */
+ public static EncoderCapabilities create(
+ MediaFormat info, CodecCapabilities parent) {
+ EncoderCapabilities caps = new EncoderCapabilities();
+ caps.init(info, parent);
+ return caps;
+ }
+
+ /** @hide */
+ public void init(MediaFormat info, CodecCapabilities parent) {
+ // no support for complexity or quality yet
+ mParent = parent;
+ mComplexityRange = Range.create(0, 0);
+ mQualityRange = Range.create(0, 0);
+ mBitControl = (1 << BITRATE_MODE_VBR);
+
+ applyLevelLimits();
+ parseFromInfo(info);
+ }
+
+ private void applyLevelLimits() {
+ String mime = mParent.getMimeType();
+ if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_FLAC)) {
+ mComplexityRange = Range.create(0, 8);
+ mBitControl = (1 << BITRATE_MODE_CQ);
+ } else if (mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AMR_NB)
+ || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_AMR_WB)
+ || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_G711_ALAW)
+ || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_G711_MLAW)
+ || mime.equalsIgnoreCase(MediaFormat.MIMETYPE_AUDIO_MSGSM)) {
+ mBitControl = (1 << BITRATE_MODE_CBR);
+ }
+ }
+
+ private int mBitControl;
+ private Integer mDefaultComplexity;
+ private Integer mDefaultQuality;
+ private String mQualityScale;
+
+ private void parseFromInfo(MediaFormat info) {
+ Map<String, Object> map = info.getMap();
+
+ if (info.containsKey("complexity-range")) {
+ mComplexityRange = Utils
+ .parseIntRange(info.getString("complexity-range"), mComplexityRange);
+ // TODO should we limit this to level limits?
+ }
+ if (info.containsKey("quality-range")) {
+ mQualityRange = Utils
+ .parseIntRange(info.getString("quality-range"), mQualityRange);
+ }
+ if (info.containsKey("feature-bitrate-control")) {
+ for (String mode: info.getString("feature-bitrate-control").split(",")) {
+ mBitControl |= parseBitrateMode(mode);
+ }
+ }
+
+ try {
+ mDefaultComplexity = Integer.parseInt((String)map.get("complexity-default"));
+ } catch (NumberFormatException e) { }
+
+ try {
+ mDefaultQuality = Integer.parseInt((String)map.get("quality-default"));
+ } catch (NumberFormatException e) { }
+
+ mQualityScale = (String)map.get("quality-scale");
+ }
+
+ private boolean supports(
+ Integer complexity, Integer quality, Integer profile) {
+ boolean ok = true;
+ if (ok && complexity != null) {
+ ok = mComplexityRange.contains(complexity);
+ }
+ if (ok && quality != null) {
+ ok = mQualityRange.contains(quality);
+ }
+ if (ok && profile != null) {
+ for (CodecProfileLevel pl: mParent.profileLevels) {
+ if (pl.profile == profile) {
+ profile = null;
+ break;
+ }
+ }
+ ok = profile == null;
+ }
+ return ok;
+ }
+
+ /** @hide */
+ public void setDefaultFormat(MediaFormat format) {
+ // don't list trivial quality/complexity as default for now
+ if (!mQualityRange.getUpper().equals(mQualityRange.getLower())
+ && mDefaultQuality != null) {
+ format.setInteger(MediaFormat.KEY_QUALITY, mDefaultQuality);
+ }
+ if (!mComplexityRange.getUpper().equals(mComplexityRange.getLower())
+ && mDefaultComplexity != null) {
+ format.setInteger(MediaFormat.KEY_COMPLEXITY, mDefaultComplexity);
+ }
+ // bitrates are listed in order of preference
+ for (Feature feat: bitrates) {
+ if ((mBitControl & (1 << feat.mValue)) != 0) {
+ format.setInteger(MediaFormat.KEY_BITRATE_MODE, feat.mValue);
+ break;
+ }
+ }
+ }
+
+ /** @hide */
+ public boolean supportsFormat(MediaFormat format) {
+ final Map<String, Object> map = format.getMap();
+ final String mime = mParent.getMimeType();
+
+ Integer mode = (Integer)map.get(MediaFormat.KEY_BITRATE_MODE);
+ if (mode != null && !isBitrateModeSupported(mode)) {
+ return false;
+ }
+
+ Integer complexity = (Integer)map.get(MediaFormat.KEY_COMPLEXITY);
+ if (MediaFormat.MIMETYPE_AUDIO_FLAC.equalsIgnoreCase(mime)) {
+ Integer flacComplexity =
+ (Integer)map.get(MediaFormat.KEY_FLAC_COMPRESSION_LEVEL);
+ if (complexity == null) {
+ complexity = flacComplexity;
+ } else if (flacComplexity != null && !complexity.equals(flacComplexity)) {
+ throw new IllegalArgumentException(
+ "conflicting values for complexity and " +
+ "flac-compression-level");
+ }
+ }
+
+ // other audio parameters
+ Integer profile = (Integer)map.get(MediaFormat.KEY_PROFILE);
+ if (MediaFormat.MIMETYPE_AUDIO_AAC.equalsIgnoreCase(mime)) {
+ Integer aacProfile = (Integer)map.get(MediaFormat.KEY_AAC_PROFILE);
+ if (profile == null) {
+ profile = aacProfile;
+ } else if (aacProfile != null && !aacProfile.equals(profile)) {
+ throw new IllegalArgumentException(
+ "conflicting values for profile and aac-profile");
+ }
+ }
+
+ Integer quality = (Integer)map.get(MediaFormat.KEY_QUALITY);
+
+ return supports(complexity, quality, profile);
+ }
+ };
+
+ /**
+ * Encapsulates the profiles available for a codec component.
+ * <p>You can get a set of {@link MediaCodecInfo.CodecProfileLevel} objects for a given
+ * {@link MediaCodecInfo} object from the
+ * {@link MediaCodecInfo.CodecCapabilities#profileLevels} field.
+ */
+ public static final class CodecProfileLevel {
+ // from OMX_VIDEO_AVCPROFILETYPE
+ public static final int AVCProfileBaseline = 0x01;
+ public static final int AVCProfileMain = 0x02;
+ public static final int AVCProfileExtended = 0x04;
+ public static final int AVCProfileHigh = 0x08;
+ public static final int AVCProfileHigh10 = 0x10;
+ public static final int AVCProfileHigh422 = 0x20;
+ public static final int AVCProfileHigh444 = 0x40;
+ public static final int AVCProfileConstrainedBaseline = 0x10000;
+ public static final int AVCProfileConstrainedHigh = 0x80000;
+
+ // from OMX_VIDEO_AVCLEVELTYPE
+ public static final int AVCLevel1 = 0x01;
+ public static final int AVCLevel1b = 0x02;
+ public static final int AVCLevel11 = 0x04;
+ public static final int AVCLevel12 = 0x08;
+ public static final int AVCLevel13 = 0x10;
+ public static final int AVCLevel2 = 0x20;
+ public static final int AVCLevel21 = 0x40;
+ public static final int AVCLevel22 = 0x80;
+ public static final int AVCLevel3 = 0x100;
+ public static final int AVCLevel31 = 0x200;
+ public static final int AVCLevel32 = 0x400;
+ public static final int AVCLevel4 = 0x800;
+ public static final int AVCLevel41 = 0x1000;
+ public static final int AVCLevel42 = 0x2000;
+ public static final int AVCLevel5 = 0x4000;
+ public static final int AVCLevel51 = 0x8000;
+ public static final int AVCLevel52 = 0x10000;
+
+ // from OMX_VIDEO_H263PROFILETYPE
+ public static final int H263ProfileBaseline = 0x01;
+ public static final int H263ProfileH320Coding = 0x02;
+ public static final int H263ProfileBackwardCompatible = 0x04;
+ public static final int H263ProfileISWV2 = 0x08;
+ public static final int H263ProfileISWV3 = 0x10;
+ public static final int H263ProfileHighCompression = 0x20;
+ public static final int H263ProfileInternet = 0x40;
+ public static final int H263ProfileInterlace = 0x80;
+ public static final int H263ProfileHighLatency = 0x100;
+
+ // from OMX_VIDEO_H263LEVELTYPE
+ public static final int H263Level10 = 0x01;
+ public static final int H263Level20 = 0x02;
+ public static final int H263Level30 = 0x04;
+ public static final int H263Level40 = 0x08;
+ public static final int H263Level45 = 0x10;
+ public static final int H263Level50 = 0x20;
+ public static final int H263Level60 = 0x40;
+ public static final int H263Level70 = 0x80;
+
+ // from OMX_VIDEO_MPEG4PROFILETYPE
+ public static final int MPEG4ProfileSimple = 0x01;
+ public static final int MPEG4ProfileSimpleScalable = 0x02;
+ public static final int MPEG4ProfileCore = 0x04;
+ public static final int MPEG4ProfileMain = 0x08;
+ public static final int MPEG4ProfileNbit = 0x10;
+ public static final int MPEG4ProfileScalableTexture = 0x20;
+ public static final int MPEG4ProfileSimpleFace = 0x40;
+ public static final int MPEG4ProfileSimpleFBA = 0x80;
+ public static final int MPEG4ProfileBasicAnimated = 0x100;
+ public static final int MPEG4ProfileHybrid = 0x200;
+ public static final int MPEG4ProfileAdvancedRealTime = 0x400;
+ public static final int MPEG4ProfileCoreScalable = 0x800;
+ public static final int MPEG4ProfileAdvancedCoding = 0x1000;
+ public static final int MPEG4ProfileAdvancedCore = 0x2000;
+ public static final int MPEG4ProfileAdvancedScalable = 0x4000;
+ public static final int MPEG4ProfileAdvancedSimple = 0x8000;
+
+ // from OMX_VIDEO_MPEG4LEVELTYPE
+ public static final int MPEG4Level0 = 0x01;
+ public static final int MPEG4Level0b = 0x02;
+ public static final int MPEG4Level1 = 0x04;
+ public static final int MPEG4Level2 = 0x08;
+ public static final int MPEG4Level3 = 0x10;
+ public static final int MPEG4Level3b = 0x18;
+ public static final int MPEG4Level4 = 0x20;
+ public static final int MPEG4Level4a = 0x40;
+ public static final int MPEG4Level5 = 0x80;
+ public static final int MPEG4Level6 = 0x100;
+
+ // from OMX_VIDEO_MPEG2PROFILETYPE
+ public static final int MPEG2ProfileSimple = 0x00;
+ public static final int MPEG2ProfileMain = 0x01;
+ public static final int MPEG2Profile422 = 0x02;
+ public static final int MPEG2ProfileSNR = 0x03;
+ public static final int MPEG2ProfileSpatial = 0x04;
+ public static final int MPEG2ProfileHigh = 0x05;
+
+ // from OMX_VIDEO_MPEG2LEVELTYPE
+ public static final int MPEG2LevelLL = 0x00;
+ public static final int MPEG2LevelML = 0x01;
+ public static final int MPEG2LevelH14 = 0x02;
+ public static final int MPEG2LevelHL = 0x03;
+ public static final int MPEG2LevelHP = 0x04;
+
+ // from OMX_AUDIO_AACPROFILETYPE
+ public static final int AACObjectMain = 1;
+ public static final int AACObjectLC = 2;
+ public static final int AACObjectSSR = 3;
+ public static final int AACObjectLTP = 4;
+ public static final int AACObjectHE = 5;
+ public static final int AACObjectScalable = 6;
+ public static final int AACObjectERLC = 17;
+ public static final int AACObjectERScalable = 20;
+ public static final int AACObjectLD = 23;
+ public static final int AACObjectHE_PS = 29;
+ public static final int AACObjectELD = 39;
+
+ // from OMX_VIDEO_VP8LEVELTYPE
+ public static final int VP8Level_Version0 = 0x01;
+ public static final int VP8Level_Version1 = 0x02;
+ public static final int VP8Level_Version2 = 0x04;
+ public static final int VP8Level_Version3 = 0x08;
+
+ // from OMX_VIDEO_VP8PROFILETYPE
+ public static final int VP8ProfileMain = 0x01;
+
+ // from OMX_VIDEO_VP9PROFILETYPE
+ public static final int VP9Profile0 = 0x01;
+ public static final int VP9Profile1 = 0x02;
+ public static final int VP9Profile2 = 0x04;
+ public static final int VP9Profile3 = 0x08;
+ // HDR profiles also support passing HDR metadata
+ public static final int VP9Profile2HDR = 0x1000;
+ public static final int VP9Profile3HDR = 0x2000;
+
+ // from OMX_VIDEO_VP9LEVELTYPE
+ public static final int VP9Level1 = 0x1;
+ public static final int VP9Level11 = 0x2;
+ public static final int VP9Level2 = 0x4;
+ public static final int VP9Level21 = 0x8;
+ public static final int VP9Level3 = 0x10;
+ public static final int VP9Level31 = 0x20;
+ public static final int VP9Level4 = 0x40;
+ public static final int VP9Level41 = 0x80;
+ public static final int VP9Level5 = 0x100;
+ public static final int VP9Level51 = 0x200;
+ public static final int VP9Level52 = 0x400;
+ public static final int VP9Level6 = 0x800;
+ public static final int VP9Level61 = 0x1000;
+ public static final int VP9Level62 = 0x2000;
+
+ // from OMX_VIDEO_HEVCPROFILETYPE
+ public static final int HEVCProfileMain = 0x01;
+ public static final int HEVCProfileMain10 = 0x02;
+ public static final int HEVCProfileMain10HDR10 = 0x1000;
+
+ // from OMX_VIDEO_HEVCLEVELTYPE
+ public static final int HEVCMainTierLevel1 = 0x1;
+ public static final int HEVCHighTierLevel1 = 0x2;
+ public static final int HEVCMainTierLevel2 = 0x4;
+ public static final int HEVCHighTierLevel2 = 0x8;
+ public static final int HEVCMainTierLevel21 = 0x10;
+ public static final int HEVCHighTierLevel21 = 0x20;
+ public static final int HEVCMainTierLevel3 = 0x40;
+ public static final int HEVCHighTierLevel3 = 0x80;
+ public static final int HEVCMainTierLevel31 = 0x100;
+ public static final int HEVCHighTierLevel31 = 0x200;
+ public static final int HEVCMainTierLevel4 = 0x400;
+ public static final int HEVCHighTierLevel4 = 0x800;
+ public static final int HEVCMainTierLevel41 = 0x1000;
+ public static final int HEVCHighTierLevel41 = 0x2000;
+ public static final int HEVCMainTierLevel5 = 0x4000;
+ public static final int HEVCHighTierLevel5 = 0x8000;
+ public static final int HEVCMainTierLevel51 = 0x10000;
+ public static final int HEVCHighTierLevel51 = 0x20000;
+ public static final int HEVCMainTierLevel52 = 0x40000;
+ public static final int HEVCHighTierLevel52 = 0x80000;
+ public static final int HEVCMainTierLevel6 = 0x100000;
+ public static final int HEVCHighTierLevel6 = 0x200000;
+ public static final int HEVCMainTierLevel61 = 0x400000;
+ public static final int HEVCHighTierLevel61 = 0x800000;
+ public static final int HEVCMainTierLevel62 = 0x1000000;
+ public static final int HEVCHighTierLevel62 = 0x2000000;
+
+ private static final int HEVCHighTierLevels =
+ HEVCHighTierLevel1 | HEVCHighTierLevel2 | HEVCHighTierLevel21 | HEVCHighTierLevel3 |
+ HEVCHighTierLevel31 | HEVCHighTierLevel4 | HEVCHighTierLevel41 | HEVCHighTierLevel5 |
+ HEVCHighTierLevel51 | HEVCHighTierLevel52 | HEVCHighTierLevel6 | HEVCHighTierLevel61 |
+ HEVCHighTierLevel62;
+
+ // from OMX_VIDEO_DOLBYVISIONPROFILETYPE
+ public static final int DolbyVisionProfileDvavPer = 0x1;
+ public static final int DolbyVisionProfileDvavPen = 0x2;
+ public static final int DolbyVisionProfileDvheDer = 0x4;
+ public static final int DolbyVisionProfileDvheDen = 0x8;
+ public static final int DolbyVisionProfileDvheDtr = 0x10;
+ public static final int DolbyVisionProfileDvheStn = 0x20;
+ public static final int DolbyVisionProfileDvheDth = 0x40;
+ public static final int DolbyVisionProfileDvheDtb = 0x80;
+ public static final int DolbyVisionProfileDvheSt = 0x100;
+ public static final int DolbyVisionProfileDvavSe = 0x200;
+
+ // from OMX_VIDEO_DOLBYVISIONLEVELTYPE
+ public static final int DolbyVisionLevelHd24 = 0x1;
+ public static final int DolbyVisionLevelHd30 = 0x2;
+ public static final int DolbyVisionLevelFhd24 = 0x4;
+ public static final int DolbyVisionLevelFhd30 = 0x8;
+ public static final int DolbyVisionLevelFhd60 = 0x10;
+ public static final int DolbyVisionLevelUhd24 = 0x20;
+ public static final int DolbyVisionLevelUhd30 = 0x40;
+ public static final int DolbyVisionLevelUhd48 = 0x80;
+ public static final int DolbyVisionLevelUhd60 = 0x100;
+
+ /**
+ * Defined in the OpenMAX IL specs, depending on the type of media
+ * this can be OMX_VIDEO_AVCPROFILETYPE, OMX_VIDEO_H263PROFILETYPE,
+ * OMX_VIDEO_MPEG4PROFILETYPE, OMX_VIDEO_VP8PROFILETYPE or OMX_VIDEO_VP9PROFILETYPE.
+ */
+ public int profile;
+
+ /**
+ * Defined in the OpenMAX IL specs, depending on the type of media
+ * this can be OMX_VIDEO_AVCLEVELTYPE, OMX_VIDEO_H263LEVELTYPE
+ * OMX_VIDEO_MPEG4LEVELTYPE, OMX_VIDEO_VP8LEVELTYPE or OMX_VIDEO_VP9LEVELTYPE.
+ *
+ * Note that VP9 decoder on platforms before {@link android.os.Build.VERSION_CODES#N} may
+ * not advertise a profile level support. For those VP9 decoders, please use
+ * {@link VideoCapabilities} to determine the codec capabilities.
+ */
+ public int level;
+ };
+
+ /**
+ * Enumerates the capabilities of the codec component. Since a single
+ * component can support data of a variety of types, the type has to be
+ * specified to yield a meaningful result.
+ * @param type The MIME type to query
+ */
+ public final CodecCapabilities getCapabilitiesForType(
+ String type) {
+ CodecCapabilities caps = mCaps.get(type);
+ if (caps == null) {
+ throw new IllegalArgumentException("codec does not support type");
+ }
+ // clone writable object
+ return caps.dup();
+ }
+
+ /** @hide */
+ public MediaCodecInfo makeRegular() {
+ ArrayList<CodecCapabilities> caps = new ArrayList<CodecCapabilities>();
+ for (CodecCapabilities c: mCaps.values()) {
+ if (c.isRegular()) {
+ caps.add(c);
+ }
+ }
+ if (caps.size() == 0) {
+ return null;
+ } else if (caps.size() == mCaps.size()) {
+ return this;
+ }
+
+ return new MediaCodecInfo(
+ mName, mIsEncoder,
+ caps.toArray(new CodecCapabilities[caps.size()]));
+ }
+}
diff --git a/android/media/MediaCodecList.java b/android/media/MediaCodecList.java
new file mode 100644
index 00000000..3cb4cbe9
--- /dev/null
+++ b/android/media/MediaCodecList.java
@@ -0,0 +1,258 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.util.Log;
+
+import android.media.MediaCodecInfo;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Map;
+
+/**
+ * Allows you to enumerate available codecs, each specified as a {@link MediaCodecInfo} object,
+ * find a codec supporting a given format and query the capabilities
+ * of a given codec.
+ * <p>See {@link MediaCodecInfo} for sample usage.
+ */
+final public class MediaCodecList {
+ private static final String TAG = "MediaCodecList";
+
+ /**
+ * Count the number of available (regular) codecs.
+ *
+ * @deprecated Use {@link #getCodecInfos} instead.
+ *
+ * @see #REGULAR_CODECS
+ */
+ public static final int getCodecCount() {
+ initCodecList();
+ return sRegularCodecInfos.length;
+ }
+
+ private static native final int native_getCodecCount();
+
+ /**
+ * Return the {@link MediaCodecInfo} object for the codec at
+ * the given {@code index} in the regular list.
+ *
+ * @deprecated Use {@link #getCodecInfos} instead.
+ *
+ * @see #REGULAR_CODECS
+ */
+ public static final MediaCodecInfo getCodecInfoAt(int index) {
+ initCodecList();
+ if (index < 0 || index > sRegularCodecInfos.length) {
+ throw new IllegalArgumentException();
+ }
+ return sRegularCodecInfos[index];
+ }
+
+ /* package private */ static final Map<String, Object> getGlobalSettings() {
+ synchronized (sInitLock) {
+ if (sGlobalSettings == null) {
+ sGlobalSettings = native_getGlobalSettings();
+ }
+ }
+ return sGlobalSettings;
+ }
+
+ private static Object sInitLock = new Object();
+ private static MediaCodecInfo[] sAllCodecInfos;
+ private static MediaCodecInfo[] sRegularCodecInfos;
+ private static Map<String, Object> sGlobalSettings;
+
+ private static final void initCodecList() {
+ synchronized (sInitLock) {
+ if (sRegularCodecInfos == null) {
+ int count = native_getCodecCount();
+ ArrayList<MediaCodecInfo> regulars = new ArrayList<MediaCodecInfo>();
+ ArrayList<MediaCodecInfo> all = new ArrayList<MediaCodecInfo>();
+ for (int index = 0; index < count; index++) {
+ try {
+ MediaCodecInfo info = getNewCodecInfoAt(index);
+ all.add(info);
+ info = info.makeRegular();
+ if (info != null) {
+ regulars.add(info);
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "Could not get codec capabilities", e);
+ }
+ }
+ sRegularCodecInfos =
+ regulars.toArray(new MediaCodecInfo[regulars.size()]);
+ sAllCodecInfos =
+ all.toArray(new MediaCodecInfo[all.size()]);
+ }
+ }
+ }
+
+ private static MediaCodecInfo getNewCodecInfoAt(int index) {
+ String[] supportedTypes = getSupportedTypes(index);
+ MediaCodecInfo.CodecCapabilities[] caps =
+ new MediaCodecInfo.CodecCapabilities[supportedTypes.length];
+ int typeIx = 0;
+ for (String type: supportedTypes) {
+ caps[typeIx++] = getCodecCapabilities(index, type);
+ }
+ return new MediaCodecInfo(
+ getCodecName(index), isEncoder(index), caps);
+ }
+
+ /* package private */ static native final String getCodecName(int index);
+
+ /* package private */ static native final boolean isEncoder(int index);
+
+ /* package private */ static native final String[] getSupportedTypes(int index);
+
+ /* package private */ static native final MediaCodecInfo.CodecCapabilities
+ getCodecCapabilities(int index, String type);
+
+ /* package private */ static native final Map<String, Object> native_getGlobalSettings();
+
+ /* package private */ static native final int findCodecByName(String codec);
+
+ /** @hide */
+ public static MediaCodecInfo getInfoFor(String codec) {
+ initCodecList();
+ return sAllCodecInfos[findCodecByName(codec)];
+ }
+
+ private static native final void native_init();
+
+ /**
+ * Use in {@link #MediaCodecList} to enumerate only codecs that are suitable
+ * for regular (buffer-to-buffer) decoding or encoding.
+ *
+ * <em>NOTE:</em> These are the codecs that are returned prior to API 21,
+ * using the now deprecated static methods.
+ */
+ public static final int REGULAR_CODECS = 0;
+
+ /**
+ * Use in {@link #MediaCodecList} to enumerate all codecs, even ones that are
+ * not suitable for regular (buffer-to-buffer) decoding or encoding. These
+ * include codecs, for example, that only work with special input or output
+ * surfaces, such as secure-only or tunneled-only codecs.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#isFormatSupported
+ * @see MediaCodecInfo.CodecCapabilities#FEATURE_SecurePlayback
+ * @see MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback
+ */
+ public static final int ALL_CODECS = 1;
+
+ private MediaCodecList() {
+ this(REGULAR_CODECS);
+ }
+
+ private MediaCodecInfo[] mCodecInfos;
+
+ /**
+ * Create a list of media-codecs of a specific kind.
+ * @param kind Either {@code REGULAR_CODECS} or {@code ALL_CODECS}.
+ */
+ public MediaCodecList(int kind) {
+ initCodecList();
+ if (kind == REGULAR_CODECS) {
+ mCodecInfos = sRegularCodecInfos;
+ } else {
+ mCodecInfos = sAllCodecInfos;
+ }
+ }
+
+ /**
+ * Returns the list of {@link MediaCodecInfo} objects for the list
+ * of media-codecs.
+ */
+ public final MediaCodecInfo[] getCodecInfos() {
+ return Arrays.copyOf(mCodecInfos, mCodecInfos.length);
+ }
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+
+ // mediaserver is not yet alive here
+ }
+
+ /**
+ * Find a decoder supporting a given {@link MediaFormat} in the list
+ * of media-codecs.
+ *
+ * <p class=note>
+ * <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP},
+ * {@code format} must not contain a {@linkplain MediaFormat#KEY_FRAME_RATE
+ * frame rate}. Use
+ * <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code>
+ * to clear any existing frame rate setting in the format.
+ *
+ * @see MediaCodecList.CodecCapabilities.isFormatSupported for format keys
+ * considered per android versions when evaluating suitable codecs.
+ *
+ * @param format A decoder media format with optional feature directives.
+ * @throws IllegalArgumentException if format is not a valid media format.
+ * @throws NullPointerException if format is null.
+ * @return the name of a decoder that supports the given format and feature
+ * requests, or {@code null} if no such codec has been found.
+ */
+ public final String findDecoderForFormat(MediaFormat format) {
+ return findCodecForFormat(false /* encoder */, format);
+ }
+
+ /**
+ * Find an encoder supporting a given {@link MediaFormat} in the list
+ * of media-codecs.
+ *
+ * <p class=note>
+ * <strong>Note:</strong> On {@link android.os.Build.VERSION_CODES#LOLLIPOP},
+ * {@code format} must not contain a {@linkplain MediaFormat#KEY_FRAME_RATE
+ * frame rate}. Use
+ * <code class=prettyprint>format.setString(MediaFormat.KEY_FRAME_RATE, null)</code>
+ * to clear any existing frame rate setting in the format.
+ *
+ * @see MediaCodecList.CodecCapabilities.isFormatSupported for format keys
+ * considered per android versions when evaluating suitable codecs.
+ *
+ * @param format An encoder media format with optional feature directives.
+ * @throws IllegalArgumentException if format is not a valid media format.
+ * @throws NullPointerException if format is null.
+ * @return the name of an encoder that supports the given format and feature
+ * requests, or {@code null} if no such codec has been found.
+ */
+ public final String findEncoderForFormat(MediaFormat format) {
+ return findCodecForFormat(true /* encoder */, format);
+ }
+
+ private String findCodecForFormat(boolean encoder, MediaFormat format) {
+ String mime = format.getString(MediaFormat.KEY_MIME);
+ for (MediaCodecInfo info: mCodecInfos) {
+ if (info.isEncoder() != encoder) {
+ continue;
+ }
+ try {
+ MediaCodecInfo.CodecCapabilities caps = info.getCapabilitiesForType(mime);
+ if (caps != null && caps.isFormatSupported(format)) {
+ return info.getName();
+ }
+ } catch (IllegalArgumentException e) {
+ // type is not supported
+ }
+ }
+ return null;
+ }
+}
diff --git a/android/media/MediaCrypto.java b/android/media/MediaCrypto.java
new file mode 100644
index 00000000..474d8b9e
--- /dev/null
+++ b/android/media/MediaCrypto.java
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.media.MediaCryptoException;
+import java.util.UUID;
+
+/**
+ * MediaCrypto class can be used in conjunction with {@link android.media.MediaCodec}
+ * to decode encrypted media data.
+ *
+ * Crypto schemes are assigned 16 byte UUIDs,
+ * the method {@link #isCryptoSchemeSupported} can be used to query if a given
+ * scheme is supported on the device.
+ *
+ */
+public final class MediaCrypto {
+ /**
+ * Query if the given scheme identified by its UUID is supported on
+ * this device.
+ * @param uuid The UUID of the crypto scheme.
+ */
+ public static final boolean isCryptoSchemeSupported(@NonNull UUID uuid) {
+ return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid));
+ }
+
+ @NonNull
+ private static final byte[] getByteArrayFromUUID(@NonNull UUID uuid) {
+ long msb = uuid.getMostSignificantBits();
+ long lsb = uuid.getLeastSignificantBits();
+
+ byte[] uuidBytes = new byte[16];
+ for (int i = 0; i < 8; ++i) {
+ uuidBytes[i] = (byte)(msb >>> (8 * (7 - i)));
+ uuidBytes[8 + i] = (byte)(lsb >>> (8 * (7 - i)));
+ }
+
+ return uuidBytes;
+ }
+
+ private static final native boolean isCryptoSchemeSupportedNative(@NonNull byte[] uuid);
+
+ /**
+ * Instantiate a MediaCrypto object using opaque, crypto scheme specific
+ * data.
+ * @param uuid The UUID of the crypto scheme.
+ * @param initData Opaque initialization data specific to the crypto scheme.
+ */
+ public MediaCrypto(@NonNull UUID uuid, @NonNull byte[] initData) throws MediaCryptoException {
+ native_setup(getByteArrayFromUUID(uuid), initData);
+ }
+
+ /**
+ * Query if the crypto scheme requires the use of a secure decoder
+ * to decode data of the given mime type.
+ * @param mime The mime type of the media data
+ */
+ public final native boolean requiresSecureDecoderComponent(@NonNull String mime);
+
+ /**
+ * Associate a MediaDrm session with this MediaCrypto instance. The
+ * MediaDrm session is used to securely load decryption keys for a
+ * crypto scheme. The crypto keys loaded through the MediaDrm session
+ * may be selected for use during the decryption operation performed
+ * by {@link android.media.MediaCodec#queueSecureInputBuffer} by specifying
+ * their key ids in the {@link android.media.MediaCodec.CryptoInfo#key} field.
+ * @param sessionId the MediaDrm sessionId to associate with this
+ * MediaCrypto instance
+ * @throws MediaCryptoException on failure to set the sessionId
+ */
+ public final native void setMediaDrmSession(@NonNull byte[] sessionId)
+ throws MediaCryptoException;
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ public native final void release();
+ private static native final void native_init();
+
+ private native final void native_setup(@NonNull byte[] uuid, @NonNull byte[] initData)
+ throws MediaCryptoException;
+
+ private native final void native_finalize();
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private long mNativeContext;
+}
diff --git a/android/media/MediaCryptoException.java b/android/media/MediaCryptoException.java
new file mode 100644
index 00000000..32ddf473
--- /dev/null
+++ b/android/media/MediaCryptoException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.Nullable;
+
+/**
+ * Exception thrown if MediaCrypto object could not be instantiated or
+ * if unable to perform an operation on the MediaCrypto object.
+ */
+public final class MediaCryptoException extends Exception {
+ public MediaCryptoException(@Nullable String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/android/media/MediaDataSource.java b/android/media/MediaDataSource.java
new file mode 100644
index 00000000..948da0b9
--- /dev/null
+++ b/android/media/MediaDataSource.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media;
+
+import java.io.Closeable;
+import java.io.IOException;
+
+/**
+ * For supplying media data to the framework. Implement this if your app has
+ * special requirements for the way media data is obtained.
+ *
+ * <p class="note">Methods of this interface may be called on multiple different
+ * threads. There will be a thread synchronization point between each call to ensure that
+ * modifications to the state of your MediaDataSource are visible to future calls. This means
+ * you don't need to do your own synchronization unless you're modifying the
+ * MediaDataSource from another thread while it's being used by the framework.</p>
+ */
+public abstract class MediaDataSource implements Closeable {
+ /**
+ * Called to request data from the given position.
+ *
+ * Implementations should should write up to {@code size} bytes into
+ * {@code buffer}, and return the number of bytes written.
+ *
+ * Return {@code 0} if size is zero (thus no bytes are read).
+ *
+ * Return {@code -1} to indicate that end of stream is reached.
+ *
+ * @param position the position in the data source to read from.
+ * @param buffer the buffer to read the data into.
+ * @param offset the offset within buffer to read the data into.
+ * @param size the number of bytes to read.
+ * @throws IOException on fatal errors.
+ * @return the number of bytes read, or -1 if there was an error.
+ */
+ public abstract int readAt(long position, byte[] buffer, int offset, int size)
+ throws IOException;
+
+ /**
+ * Called to get the size of the data source.
+ *
+ * @throws IOException on fatal errors
+ * @return the size of data source in bytes, or -1 if the size is unknown.
+ */
+ public abstract long getSize() throws IOException;
+}
diff --git a/android/media/MediaDescrambler.java b/android/media/MediaDescrambler.java
new file mode 100644
index 00000000..40c837b1
--- /dev/null
+++ b/android/media/MediaDescrambler.java
@@ -0,0 +1,217 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.hardware.cas.V1_0.*;
+import android.media.MediaCasException.UnsupportedCasException;
+import android.os.IHwBinder;
+import android.os.RemoteException;
+import android.os.ServiceSpecificException;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+
+/**
+ * MediaDescrambler class can be used in conjunction with {@link android.media.MediaCodec}
+ * and {@link android.media.MediaExtractor} to decode media data scrambled by conditional
+ * access (CA) systems such as those in the ISO/IEC13818-1.
+ *
+ * A MediaDescrambler object is initialized from a session opened by a MediaCas object,
+ * and can be used to descramble media streams scrambled with that session's keys.
+ *
+ * Scrambling schemes are identified by 16-bit unsigned integer as in CA_system_id.
+ *
+ */
+public final class MediaDescrambler implements AutoCloseable {
+ private static final String TAG = "MediaDescrambler";
+ private IDescramblerBase mIDescrambler;
+
+ private final void validateInternalStates() {
+ if (mIDescrambler == null) {
+ throw new IllegalStateException();
+ }
+ }
+
+ private final void cleanupAndRethrowIllegalState() {
+ mIDescrambler = null;
+ throw new IllegalStateException();
+ }
+
+ /**
+ * Instantiate a MediaDescrambler.
+ *
+ * @param CA_system_id The system id of the scrambling scheme.
+ *
+ * @throws UnsupportedCasException if the scrambling scheme is not supported.
+ */
+ public MediaDescrambler(int CA_system_id) throws UnsupportedCasException {
+ try {
+ mIDescrambler = MediaCas.getService().createDescrambler(CA_system_id);
+ } catch(Exception e) {
+ Log.e(TAG, "Failed to create descrambler: " + e);
+ mIDescrambler = null;
+ } finally {
+ if (mIDescrambler == null) {
+ throw new UnsupportedCasException("Unsupported CA_system_id " + CA_system_id);
+ }
+ }
+ native_setup(mIDescrambler.asBinder());
+ }
+
+ IHwBinder getBinder() {
+ validateInternalStates();
+
+ return mIDescrambler.asBinder();
+ }
+
+ /**
+ * Query if the scrambling scheme requires the use of a secure decoder
+ * to decode data of the given mime type.
+ *
+ * @param mime The mime type of the media data
+ *
+ * @throws IllegalStateException if the descrambler instance is not valid.
+ */
+ public final boolean requiresSecureDecoderComponent(@NonNull String mime) {
+ validateInternalStates();
+
+ try {
+ return mIDescrambler.requiresSecureDecoderComponent(mime);
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ return true;
+ }
+
+ /**
+ * Associate a MediaCas session with this MediaDescrambler instance.
+ * The MediaCas session is used to securely load decryption keys for
+ * the descrambler. The crypto keys loaded through the MediaCas session
+ * may be selected for use during the descrambling operation performed
+ * by {@link android.media.MediaExtractor or @link
+ * android.media.MediaCodec#queueSecureInputBuffer} by specifying even
+ * or odd key in the {@link android.media.MediaCodec.CryptoInfo#key} field.
+ *
+ * @param session the MediaCas session to associate with this
+ * MediaDescrambler instance.
+ *
+ * @throws IllegalStateException if the descrambler instance is not valid.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public final void setMediaCasSession(@NonNull MediaCas.Session session) {
+ validateInternalStates();
+
+ try {
+ MediaCasStateException.throwExceptionIfNeeded(
+ mIDescrambler.setMediaCasSession(session.mSessionId));
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ }
+
+ /**
+ * Descramble a ByteBuffer of data described by a
+ * {@link android.media.MediaCodec.CryptoInfo} structure.
+ *
+ * @param srcBuf ByteBuffer containing the scrambled data, which starts at
+ * srcBuf.position().
+ * @param dstBuf ByteBuffer to hold the descrambled data, which starts at
+ * dstBuf.position().
+ * @param cryptoInfo a {@link android.media.MediaCodec.CryptoInfo} structure
+ * describing the subsamples contained in src.
+ *
+ * @return number of bytes that have been successfully descrambled, with negative
+ * values indicating errors.
+ *
+ * @throws IllegalStateException if the descrambler instance is not valid.
+ * @throws MediaCasStateException for CAS-specific state exceptions.
+ */
+ public final int descramble(
+ @NonNull ByteBuffer srcBuf, @NonNull ByteBuffer dstBuf,
+ @NonNull MediaCodec.CryptoInfo cryptoInfo) {
+ validateInternalStates();
+
+ if (cryptoInfo.numSubSamples <= 0) {
+ throw new IllegalArgumentException(
+ "Invalid CryptoInfo: invalid numSubSamples=" + cryptoInfo.numSubSamples);
+ } else if (cryptoInfo.numBytesOfClearData == null
+ && cryptoInfo.numBytesOfEncryptedData == null) {
+ throw new IllegalArgumentException(
+ "Invalid CryptoInfo: clearData and encryptedData size arrays are both null!");
+ } else if (cryptoInfo.numBytesOfClearData != null
+ && cryptoInfo.numBytesOfClearData.length < cryptoInfo.numSubSamples) {
+ throw new IllegalArgumentException(
+ "Invalid CryptoInfo: numBytesOfClearData is too small!");
+ } else if (cryptoInfo.numBytesOfEncryptedData != null
+ && cryptoInfo.numBytesOfEncryptedData.length < cryptoInfo.numSubSamples) {
+ throw new IllegalArgumentException(
+ "Invalid CryptoInfo: numBytesOfEncryptedData is too small!");
+ } else if (cryptoInfo.key == null || cryptoInfo.key.length != 16) {
+ throw new IllegalArgumentException(
+ "Invalid CryptoInfo: key array is invalid!");
+ }
+
+ try {
+ return native_descramble(
+ cryptoInfo.key[0],
+ cryptoInfo.numSubSamples,
+ cryptoInfo.numBytesOfClearData,
+ cryptoInfo.numBytesOfEncryptedData,
+ srcBuf, srcBuf.position(), srcBuf.limit(),
+ dstBuf, dstBuf.position(), dstBuf.limit());
+ } catch (ServiceSpecificException e) {
+ MediaCasStateException.throwExceptionIfNeeded(e.errorCode, e.getMessage());
+ } catch (RemoteException e) {
+ cleanupAndRethrowIllegalState();
+ }
+ return -1;
+ }
+
+ @Override
+ public void close() {
+ if (mIDescrambler != null) {
+ try {
+ mIDescrambler.release();
+ } catch (RemoteException e) {
+ } finally {
+ mIDescrambler = null;
+ }
+ }
+ native_release();
+ }
+
+ @Override
+ protected void finalize() {
+ close();
+ }
+
+ private static native final void native_init();
+ private native final void native_setup(@NonNull IHwBinder decramblerBinder);
+ private native final void native_release();
+ private native final int native_descramble(
+ byte key, int numSubSamples, int[] numBytesOfClearData, int[] numBytesOfEncryptedData,
+ @NonNull ByteBuffer srcBuf, int srcOffset, int srcLimit,
+ ByteBuffer dstBuf, int dstOffset, int dstLimit) throws RemoteException;
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private long mNativeContext;
+} \ No newline at end of file
diff --git a/android/media/MediaDescription.java b/android/media/MediaDescription.java
new file mode 100644
index 00000000..e6aea99e
--- /dev/null
+++ b/android/media/MediaDescription.java
@@ -0,0 +1,382 @@
+package android.media;
+
+import android.annotation.Nullable;
+import android.graphics.Bitmap;
+import android.media.browse.MediaBrowser;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * A simple set of metadata for a media item suitable for display. This can be
+ * created using the Builder or retrieved from existing metadata using
+ * {@link MediaMetadata#getDescription()}.
+ */
+public class MediaDescription implements Parcelable {
+ /**
+ * A unique persistent id for the content or null.
+ */
+ private final String mMediaId;
+ /**
+ * A primary title suitable for display or null.
+ */
+ private final CharSequence mTitle;
+ /**
+ * A subtitle suitable for display or null.
+ */
+ private final CharSequence mSubtitle;
+ /**
+ * A description suitable for display or null.
+ */
+ private final CharSequence mDescription;
+ /**
+ * A bitmap icon suitable for display or null.
+ */
+ private final Bitmap mIcon;
+ /**
+ * A Uri for an icon suitable for display or null.
+ */
+ private final Uri mIconUri;
+ /**
+ * Extras for opaque use by apps/system.
+ */
+ private final Bundle mExtras;
+ /**
+ * A Uri to identify this content.
+ */
+ private final Uri mMediaUri;
+
+ /**
+ * Used as a long extra field to indicate the bluetooth folder type of the media item as
+ * specified in the section 6.10.2.2 of the Bluetooth AVRCP 1.5. This is valid only for
+ * {@link MediaBrowser.MediaItem} with {@link MediaBrowser.MediaItem#FLAG_BROWSABLE}. The value
+ * should be one of the following:
+ * <ul>
+ * <li>{@link #BT_FOLDER_TYPE_MIXED}</li>
+ * <li>{@link #BT_FOLDER_TYPE_TITLES}</li>
+ * <li>{@link #BT_FOLDER_TYPE_ALBUMS}</li>
+ * <li>{@link #BT_FOLDER_TYPE_ARTISTS}</li>
+ * <li>{@link #BT_FOLDER_TYPE_GENRES}</li>
+ * <li>{@link #BT_FOLDER_TYPE_PLAYLISTS}</li>
+ * <li>{@link #BT_FOLDER_TYPE_YEARS}</li>
+ * </ul>
+ *
+ * @see #getExtras()
+ */
+ public static final String EXTRA_BT_FOLDER_TYPE = "android.media.extra.BT_FOLDER_TYPE";
+
+ /**
+ * The type of folder that is unknown or contains media elements of mixed types as specified in
+ * the section 6.10.2.2 of the Bluetooth AVRCP 1.5.
+ */
+ public static final long BT_FOLDER_TYPE_MIXED = 0;
+
+ /**
+ * The type of folder that contains media elements only as specified in the section 6.10.2.2 of
+ * the Bluetooth AVRCP 1.5.
+ */
+ public static final long BT_FOLDER_TYPE_TITLES = 1;
+
+ /**
+ * The type of folder that contains folders categorized by album as specified in the section
+ * 6.10.2.2 of the Bluetooth AVRCP 1.5.
+ */
+ public static final long BT_FOLDER_TYPE_ALBUMS = 2;
+
+ /**
+ * The type of folder that contains folders categorized by artist as specified in the section
+ * 6.10.2.2 of the Bluetooth AVRCP 1.5.
+ */
+ public static final long BT_FOLDER_TYPE_ARTISTS = 3;
+
+ /**
+ * The type of folder that contains folders categorized by genre as specified in the section
+ * 6.10.2.2 of the Bluetooth AVRCP 1.5.
+ */
+ public static final long BT_FOLDER_TYPE_GENRES = 4;
+
+ /**
+ * The type of folder that contains folders categorized by playlist as specified in the section
+ * 6.10.2.2 of the Bluetooth AVRCP 1.5.
+ */
+ public static final long BT_FOLDER_TYPE_PLAYLISTS = 5;
+
+ /**
+ * The type of folder that contains folders categorized by year as specified in the section
+ * 6.10.2.2 of the Bluetooth AVRCP 1.5.
+ */
+ public static final long BT_FOLDER_TYPE_YEARS = 6;
+
+ private MediaDescription(String mediaId, CharSequence title, CharSequence subtitle,
+ CharSequence description, Bitmap icon, Uri iconUri, Bundle extras, Uri mediaUri) {
+ mMediaId = mediaId;
+ mTitle = title;
+ mSubtitle = subtitle;
+ mDescription = description;
+ mIcon = icon;
+ mIconUri = iconUri;
+ mExtras = extras;
+ mMediaUri = mediaUri;
+ }
+
+ private MediaDescription(Parcel in) {
+ mMediaId = in.readString();
+ mTitle = in.readCharSequence();
+ mSubtitle = in.readCharSequence();
+ mDescription = in.readCharSequence();
+ mIcon = in.readParcelable(null);
+ mIconUri = in.readParcelable(null);
+ mExtras = in.readBundle();
+ mMediaUri = in.readParcelable(null);
+ }
+
+ /**
+ * Returns the media id or null. See
+ * {@link MediaMetadata#METADATA_KEY_MEDIA_ID}.
+ */
+ public @Nullable String getMediaId() {
+ return mMediaId;
+ }
+
+ /**
+ * Returns a title suitable for display or null.
+ *
+ * @return A title or null.
+ */
+ public @Nullable CharSequence getTitle() {
+ return mTitle;
+ }
+
+ /**
+ * Returns a subtitle suitable for display or null.
+ *
+ * @return A subtitle or null.
+ */
+ public @Nullable CharSequence getSubtitle() {
+ return mSubtitle;
+ }
+
+ /**
+ * Returns a description suitable for display or null.
+ *
+ * @return A description or null.
+ */
+ public @Nullable CharSequence getDescription() {
+ return mDescription;
+ }
+
+ /**
+ * Returns a bitmap icon suitable for display or null.
+ *
+ * @return An icon or null.
+ */
+ public @Nullable Bitmap getIconBitmap() {
+ return mIcon;
+ }
+
+ /**
+ * Returns a Uri for an icon suitable for display or null.
+ *
+ * @return An icon uri or null.
+ */
+ public @Nullable Uri getIconUri() {
+ return mIconUri;
+ }
+
+ /**
+ * Returns any extras that were added to the description.
+ *
+ * @return A bundle of extras or null.
+ */
+ public @Nullable Bundle getExtras() {
+ return mExtras;
+ }
+
+ /**
+ * Returns a Uri representing this content or null.
+ *
+ * @return A media Uri or null.
+ */
+ public @Nullable Uri getMediaUri() {
+ return mMediaUri;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeString(mMediaId);
+ dest.writeCharSequence(mTitle);
+ dest.writeCharSequence(mSubtitle);
+ dest.writeCharSequence(mDescription);
+ dest.writeParcelable(mIcon, flags);
+ dest.writeParcelable(mIconUri, flags);
+ dest.writeBundle(mExtras);
+ dest.writeParcelable(mMediaUri, flags);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null) {
+ return false;
+ }
+
+ if (!(o instanceof MediaDescription)){
+ return false;
+ }
+
+ final MediaDescription d = (MediaDescription) o;
+
+ if (!String.valueOf(mTitle).equals(String.valueOf(d.mTitle))) {
+ return false;
+ }
+
+ if (!String.valueOf(mSubtitle).equals(String.valueOf(d.mSubtitle))) {
+ return false;
+ }
+
+ if (!String.valueOf(mDescription).equals(String.valueOf(d.mDescription))) {
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public String toString() {
+ return mTitle + ", " + mSubtitle + ", " + mDescription;
+ }
+
+ public static final Parcelable.Creator<MediaDescription> CREATOR =
+ new Parcelable.Creator<MediaDescription>() {
+ @Override
+ public MediaDescription createFromParcel(Parcel in) {
+ return new MediaDescription(in);
+ }
+
+ @Override
+ public MediaDescription[] newArray(int size) {
+ return new MediaDescription[size];
+ }
+ };
+
+ /**
+ * Builder for {@link MediaDescription} objects.
+ */
+ public static class Builder {
+ private String mMediaId;
+ private CharSequence mTitle;
+ private CharSequence mSubtitle;
+ private CharSequence mDescription;
+ private Bitmap mIcon;
+ private Uri mIconUri;
+ private Bundle mExtras;
+ private Uri mMediaUri;
+
+ /**
+ * Creates an initially empty builder.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Sets the media id.
+ *
+ * @param mediaId The unique id for the item or null.
+ * @return this
+ */
+ public Builder setMediaId(@Nullable String mediaId) {
+ mMediaId = mediaId;
+ return this;
+ }
+
+ /**
+ * Sets the title.
+ *
+ * @param title A title suitable for display to the user or null.
+ * @return this
+ */
+ public Builder setTitle(@Nullable CharSequence title) {
+ mTitle = title;
+ return this;
+ }
+
+ /**
+ * Sets the subtitle.
+ *
+ * @param subtitle A subtitle suitable for display to the user or null.
+ * @return this
+ */
+ public Builder setSubtitle(@Nullable CharSequence subtitle) {
+ mSubtitle = subtitle;
+ return this;
+ }
+
+ /**
+ * Sets the description.
+ *
+ * @param description A description suitable for display to the user or
+ * null.
+ * @return this
+ */
+ public Builder setDescription(@Nullable CharSequence description) {
+ mDescription = description;
+ return this;
+ }
+
+ /**
+ * Sets the icon.
+ *
+ * @param icon A {@link Bitmap} icon suitable for display to the user or
+ * null.
+ * @return this
+ */
+ public Builder setIconBitmap(@Nullable Bitmap icon) {
+ mIcon = icon;
+ return this;
+ }
+
+ /**
+ * Sets the icon uri.
+ *
+ * @param iconUri A {@link Uri} for an icon suitable for display to the
+ * user or null.
+ * @return this
+ */
+ public Builder setIconUri(@Nullable Uri iconUri) {
+ mIconUri = iconUri;
+ return this;
+ }
+
+ /**
+ * Sets a bundle of extras.
+ *
+ * @param extras The extras to include with this description or null.
+ * @return this
+ */
+ public Builder setExtras(@Nullable Bundle extras) {
+ mExtras = extras;
+ return this;
+ }
+
+ /**
+ * Sets the media uri.
+ *
+ * @param mediaUri The content's {@link Uri} for the item or null.
+ * @return this
+ */
+ public Builder setMediaUri(@Nullable Uri mediaUri) {
+ mMediaUri = mediaUri;
+ return this;
+ }
+
+ public MediaDescription build() {
+ return new MediaDescription(mMediaId, mTitle, mSubtitle, mDescription, mIcon, mIconUri,
+ mExtras, mMediaUri);
+ }
+ }
+}
diff --git a/android/media/MediaDrm.java b/android/media/MediaDrm.java
new file mode 100644
index 00000000..88b1c5ff
--- /dev/null
+++ b/android/media/MediaDrm.java
@@ -0,0 +1,1330 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.UUID;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.StringDef;
+import android.annotation.SystemApi;
+import android.app.ActivityThread;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.os.Parcel;
+import android.util.Log;
+
+/**
+ * MediaDrm can be used to obtain keys for decrypting protected media streams, in
+ * conjunction with {@link android.media.MediaCrypto}. The MediaDrm APIs
+ * are designed to support the ISO/IEC 23001-7: Common Encryption standard, but
+ * may also be used to implement other encryption schemes.
+ * <p>
+ * Encrypted content is prepared using an encryption server and stored in a content
+ * library. The encrypted content is streamed or downloaded from the content library to
+ * client devices via content servers. Licenses to view the content are obtained from
+ * a License Server.
+ * <p>
+ * <p><img src="../../../images/mediadrm_overview.png"
+ * alt="MediaDrm Overview diagram"
+ * border="0" /></p>
+ * <p>
+ * Keys are requested from the license server using a key request. The key
+ * response is delivered to the client app, which provides the response to the
+ * MediaDrm API.
+ * <p>
+ * A Provisioning server may be required to distribute device-unique credentials to
+ * the devices.
+ * <p>
+ * Enforcing requirements related to the number of devices that may play content
+ * simultaneously can be performed either through key renewal or using the secure
+ * stop methods.
+ * <p>
+ * The following sequence diagram shows the interactions between the objects
+ * involved while playing back encrypted content:
+ * <p>
+ * <p><img src="../../../images/mediadrm_decryption_sequence.png"
+ * alt="MediaDrm Overview diagram"
+ * border="0" /></p>
+ * <p>
+ * The app first constructs {@link android.media.MediaExtractor} and
+ * {@link android.media.MediaCodec} objects. It accesses the DRM-scheme-identifying UUID,
+ * typically from metadata in the content, and uses this UUID to construct an instance
+ * of a MediaDrm object that is able to support the DRM scheme required by the content.
+ * Crypto schemes are assigned 16 byte UUIDs. The method {@link #isCryptoSchemeSupported}
+ * can be used to query if a given scheme is supported on the device.
+ * <p>
+ * The app calls {@link #openSession} to generate a sessionId that will uniquely identify
+ * the session in subsequent interactions. The app next uses the MediaDrm object to
+ * obtain a key request message and send it to the license server, then provide
+ * the server's response to the MediaDrm object.
+ * <p>
+ * Once the app has a sessionId, it can construct a MediaCrypto object from the UUID and
+ * sessionId. The MediaCrypto object is registered with the MediaCodec in the
+ * {@link MediaCodec.#configure} method to enable the codec to decrypt content.
+ * <p>
+ * When the app has constructed {@link android.media.MediaExtractor},
+ * {@link android.media.MediaCodec} and {@link android.media.MediaCrypto} objects,
+ * it proceeds to pull samples from the extractor and queue them into the decoder. For
+ * encrypted content, the samples returned from the extractor remain encrypted, they
+ * are only decrypted when the samples are delivered to the decoder.
+ * <p>
+ * MediaDrm methods throw {@link android.media.MediaDrm.MediaDrmStateException}
+ * when a method is called on a MediaDrm object that has had an unrecoverable failure
+ * in the DRM plugin or security hardware.
+ * {@link android.media.MediaDrm.MediaDrmStateException} extends
+ * {@link java.lang.IllegalStateException} with the addition of a developer-readable
+ * diagnostic information string associated with the exception.
+ * <p>
+ * In the event of a mediaserver process crash or restart while a MediaDrm object
+ * is active, MediaDrm methods may throw {@link android.media.MediaDrmResetException}.
+ * To recover, the app must release the MediaDrm object, then create and initialize
+ * a new one.
+ * <p>
+ * As {@link android.media.MediaDrmResetException} and
+ * {@link android.media.MediaDrm.MediaDrmStateException} both extend
+ * {@link java.lang.IllegalStateException}, they should be in an earlier catch()
+ * block than {@link java.lang.IllegalStateException} if handled separately.
+ * <p>
+ * <a name="Callbacks"></a>
+ * <h3>Callbacks</h3>
+ * <p>Applications should register for informational events in order
+ * to be informed of key state updates during playback or streaming.
+ * Registration for these events is done via a call to
+ * {@link #setOnEventListener}. In order to receive the respective
+ * callback associated with this listener, applications are required to create
+ * MediaDrm objects on a thread with its own Looper running (main UI
+ * thread by default has a Looper running).
+ */
+public final class MediaDrm {
+
+ private static final String TAG = "MediaDrm";
+
+ private static final String PERMISSION = android.Manifest.permission.ACCESS_DRM_CERTIFICATES;
+
+ private EventHandler mEventHandler;
+ private EventHandler mOnKeyStatusChangeEventHandler;
+ private EventHandler mOnExpirationUpdateEventHandler;
+ private OnEventListener mOnEventListener;
+ private OnKeyStatusChangeListener mOnKeyStatusChangeListener;
+ private OnExpirationUpdateListener mOnExpirationUpdateListener;
+
+ private long mNativeContext;
+
+ /**
+ * Specify no certificate type
+ *
+ * @hide - not part of the public API at this time
+ */
+ public static final int CERTIFICATE_TYPE_NONE = 0;
+
+ /**
+ * Specify X.509 certificate type
+ *
+ * @hide - not part of the public API at this time
+ */
+ public static final int CERTIFICATE_TYPE_X509 = 1;
+
+ /** @hide */
+ @IntDef({
+ CERTIFICATE_TYPE_NONE,
+ CERTIFICATE_TYPE_X509,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface CertificateType {}
+
+ /**
+ * Query if the given scheme identified by its UUID is supported on
+ * this device.
+ * @param uuid The UUID of the crypto scheme.
+ */
+ public static final boolean isCryptoSchemeSupported(@NonNull UUID uuid) {
+ return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid), null);
+ }
+
+ /**
+ * Query if the given scheme identified by its UUID is supported on
+ * this device, and whether the drm plugin is able to handle the
+ * media container format specified by mimeType.
+ * @param uuid The UUID of the crypto scheme.
+ * @param mimeType The MIME type of the media container, e.g. "video/mp4"
+ * or "video/webm"
+ */
+ public static final boolean isCryptoSchemeSupported(
+ @NonNull UUID uuid, @NonNull String mimeType) {
+ return isCryptoSchemeSupportedNative(getByteArrayFromUUID(uuid), mimeType);
+ }
+
+ private static final byte[] getByteArrayFromUUID(@NonNull UUID uuid) {
+ long msb = uuid.getMostSignificantBits();
+ long lsb = uuid.getLeastSignificantBits();
+
+ byte[] uuidBytes = new byte[16];
+ for (int i = 0; i < 8; ++i) {
+ uuidBytes[i] = (byte)(msb >>> (8 * (7 - i)));
+ uuidBytes[8 + i] = (byte)(lsb >>> (8 * (7 - i)));
+ }
+
+ return uuidBytes;
+ }
+
+ private static final native boolean isCryptoSchemeSupportedNative(
+ @NonNull byte[] uuid, @Nullable String mimeType);
+
+ /**
+ * Instantiate a MediaDrm object
+ *
+ * @param uuid The UUID of the crypto scheme.
+ *
+ * @throws UnsupportedSchemeException if the device does not support the
+ * specified scheme UUID
+ */
+ public MediaDrm(@NonNull UUID uuid) throws UnsupportedSchemeException {
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else {
+ mEventHandler = null;
+ }
+
+ /* Native setup requires a weak reference to our object.
+ * It's easier to create it here than in C++.
+ */
+ native_setup(new WeakReference<MediaDrm>(this),
+ getByteArrayFromUUID(uuid), ActivityThread.currentOpPackageName());
+ }
+
+ /**
+ * Thrown when an unrecoverable failure occurs during a MediaDrm operation.
+ * Extends java.lang.IllegalStateException with the addition of an error
+ * code that may be useful in diagnosing the failure.
+ */
+ public static final class MediaDrmStateException extends java.lang.IllegalStateException {
+ private final int mErrorCode;
+ private final String mDiagnosticInfo;
+
+ /**
+ * @hide
+ */
+ public MediaDrmStateException(int errorCode, @Nullable String detailMessage) {
+ super(detailMessage);
+ mErrorCode = errorCode;
+
+ // TODO get this from DRM session
+ final String sign = errorCode < 0 ? "neg_" : "";
+ mDiagnosticInfo =
+ "android.media.MediaDrm.error_" + sign + Math.abs(errorCode);
+
+ }
+
+ /**
+ * Retrieve the associated error code
+ *
+ * @hide
+ */
+ public int getErrorCode() {
+ return mErrorCode;
+ }
+
+ /**
+ * Retrieve a developer-readable diagnostic information string
+ * associated with the exception. Do not show this to end-users,
+ * since this string will not be localized or generally comprehensible
+ * to end-users.
+ */
+ @NonNull
+ public String getDiagnosticInfo() {
+ return mDiagnosticInfo;
+ }
+ }
+
+ /**
+ * Register a callback to be invoked when a session expiration update
+ * occurs. The app's OnExpirationUpdateListener will be notified
+ * when the expiration time of the keys in the session have changed.
+ * @param listener the callback that will be run, or {@code null} to unregister the
+ * previously registered callback.
+ * @param handler the handler on which the listener should be invoked, or
+ * {@code null} if the listener should be invoked on the calling thread's looper.
+ */
+ public void setOnExpirationUpdateListener(
+ @Nullable OnExpirationUpdateListener listener, @Nullable Handler handler) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper != null) {
+ if (mEventHandler == null || mEventHandler.getLooper() != looper) {
+ mEventHandler = new EventHandler(this, looper);
+ }
+ }
+ }
+ mOnExpirationUpdateListener = listener;
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when a drm session
+ * expiration update occurs
+ */
+ public interface OnExpirationUpdateListener
+ {
+ /**
+ * Called when a session expiration update occurs, to inform the app
+ * about the change in expiration time
+ *
+ * @param md the MediaDrm object on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred
+ * @param expirationTime the new expiration time for the keys in the session.
+ * The time is in milliseconds, relative to the Unix epoch. A time of
+ * 0 indicates that the keys never expire.
+ */
+ void onExpirationUpdate(
+ @NonNull MediaDrm md, @NonNull byte[] sessionId, long expirationTime);
+ }
+
+ /**
+ * Register a callback to be invoked when the state of keys in a session
+ * change, e.g. when a license update occurs or when a license expires.
+ *
+ * @param listener the callback that will be run when key status changes, or
+ * {@code null} to unregister the previously registered callback.
+ * @param handler the handler on which the listener should be invoked, or
+ * null if the listener should be invoked on the calling thread's looper.
+ */
+ public void setOnKeyStatusChangeListener(
+ @Nullable OnKeyStatusChangeListener listener, @Nullable Handler handler) {
+ if (listener != null) {
+ Looper looper = handler != null ? handler.getLooper() : Looper.myLooper();
+ if (looper != null) {
+ if (mEventHandler == null || mEventHandler.getLooper() != looper) {
+ mEventHandler = new EventHandler(this, looper);
+ }
+ }
+ }
+ mOnKeyStatusChangeListener = listener;
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when the keys in a drm
+ * session change states.
+ */
+ public interface OnKeyStatusChangeListener
+ {
+ /**
+ * Called when the keys in a session change status, such as when the license
+ * is renewed or expires.
+ *
+ * @param md the MediaDrm object on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred
+ * @param keyInformation a list of {@link MediaDrm.KeyStatus}
+ * instances indicating the status for each key in the session
+ * @param hasNewUsableKey indicates if a key has been added that is usable,
+ * which may trigger an attempt to resume playback on the media stream
+ * if it is currently blocked waiting for a key.
+ */
+ void onKeyStatusChange(
+ @NonNull MediaDrm md, @NonNull byte[] sessionId,
+ @NonNull List<KeyStatus> keyInformation,
+ boolean hasNewUsableKey);
+ }
+
+ /**
+ * Defines the status of a key.
+ * A KeyStatus for each key in a session is provided to the
+ * {@link OnKeyStatusChangeListener#onKeyStatusChange}
+ * listener.
+ */
+ public static final class KeyStatus {
+ private final byte[] mKeyId;
+ private final int mStatusCode;
+
+ /**
+ * The key is currently usable to decrypt media data
+ */
+ public static final int STATUS_USABLE = 0;
+
+ /**
+ * The key is no longer usable to decrypt media data because its
+ * expiration time has passed.
+ */
+ public static final int STATUS_EXPIRED = 1;
+
+ /**
+ * The key is not currently usable to decrypt media data because its
+ * output requirements cannot currently be met.
+ */
+ public static final int STATUS_OUTPUT_NOT_ALLOWED = 2;
+
+ /**
+ * The status of the key is not yet known and is being determined.
+ * The status will be updated with the actual status when it has
+ * been determined.
+ */
+ public static final int STATUS_PENDING = 3;
+
+ /**
+ * The key is not currently usable to decrypt media data because of an
+ * internal error in processing unrelated to input parameters. This error
+ * is not actionable by an app.
+ */
+ public static final int STATUS_INTERNAL_ERROR = 4;
+
+ /** @hide */
+ @IntDef({
+ STATUS_USABLE,
+ STATUS_EXPIRED,
+ STATUS_OUTPUT_NOT_ALLOWED,
+ STATUS_PENDING,
+ STATUS_INTERNAL_ERROR,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface KeyStatusCode {}
+
+ KeyStatus(@NonNull byte[] keyId, @KeyStatusCode int statusCode) {
+ mKeyId = keyId;
+ mStatusCode = statusCode;
+ }
+
+ /**
+ * Returns the status code for the key
+ * @return one of {@link #STATUS_USABLE}, {@link #STATUS_EXPIRED},
+ * {@link #STATUS_OUTPUT_NOT_ALLOWED}, {@link #STATUS_PENDING}
+ * or {@link #STATUS_INTERNAL_ERROR}.
+ */
+ @KeyStatusCode
+ public int getStatusCode() { return mStatusCode; }
+
+ /**
+ * Returns the id for the key
+ */
+ @NonNull
+ public byte[] getKeyId() { return mKeyId; }
+ }
+
+ /**
+ * Register a callback to be invoked when an event occurs
+ *
+ * @param listener the callback that will be run. Use {@code null} to
+ * stop receiving event callbacks.
+ */
+ public void setOnEventListener(@Nullable OnEventListener listener)
+ {
+ mOnEventListener = listener;
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when a drm event
+ * occurs
+ */
+ public interface OnEventListener
+ {
+ /**
+ * Called when an event occurs that requires the app to be notified
+ *
+ * @param md the MediaDrm object on which the event occurred
+ * @param sessionId the DRM session ID on which the event occurred,
+ * or {@code null} if there is no session ID associated with the event.
+ * @param event indicates the event type
+ * @param extra an secondary error code
+ * @param data optional byte array of data that may be associated with the event
+ */
+ void onEvent(
+ @NonNull MediaDrm md, @Nullable byte[] sessionId,
+ @DrmEvent int event, int extra,
+ @Nullable byte[] data);
+ }
+
+ /**
+ * This event type indicates that the app needs to request a certificate from
+ * the provisioning server. The request message data is obtained using
+ * {@link #getProvisionRequest}
+ *
+ * @deprecated Handle provisioning via {@link android.media.NotProvisionedException}
+ * instead.
+ */
+ public static final int EVENT_PROVISION_REQUIRED = 1;
+
+ /**
+ * This event type indicates that the app needs to request keys from a license
+ * server. The request message data is obtained using {@link #getKeyRequest}.
+ */
+ public static final int EVENT_KEY_REQUIRED = 2;
+
+ /**
+ * This event type indicates that the licensed usage duration for keys in a session
+ * has expired. The keys are no longer valid.
+ * @deprecated Use {@link OnKeyStatusChangeListener#onKeyStatusChange}
+ * and check for {@link MediaDrm.KeyStatus#STATUS_EXPIRED} in the {@link MediaDrm.KeyStatus}
+ * instead.
+ */
+ public static final int EVENT_KEY_EXPIRED = 3;
+
+ /**
+ * This event may indicate some specific vendor-defined condition, see your
+ * DRM provider documentation for details
+ */
+ public static final int EVENT_VENDOR_DEFINED = 4;
+
+ /**
+ * This event indicates that a session opened by the app has been reclaimed by the resource
+ * manager.
+ */
+ public static final int EVENT_SESSION_RECLAIMED = 5;
+
+ /** @hide */
+ @IntDef({
+ EVENT_PROVISION_REQUIRED,
+ EVENT_KEY_REQUIRED,
+ EVENT_KEY_EXPIRED,
+ EVENT_VENDOR_DEFINED,
+ EVENT_SESSION_RECLAIMED,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface DrmEvent {}
+
+ private static final int DRM_EVENT = 200;
+ private static final int EXPIRATION_UPDATE = 201;
+ private static final int KEY_STATUS_CHANGE = 202;
+
+ private class EventHandler extends Handler
+ {
+ private MediaDrm mMediaDrm;
+
+ public EventHandler(@NonNull MediaDrm md, @NonNull Looper looper) {
+ super(looper);
+ mMediaDrm = md;
+ }
+
+ @Override
+ public void handleMessage(@NonNull Message msg) {
+ if (mMediaDrm.mNativeContext == 0) {
+ Log.w(TAG, "MediaDrm went away with unhandled events");
+ return;
+ }
+ switch(msg.what) {
+
+ case DRM_EVENT:
+ if (mOnEventListener != null) {
+ if (msg.obj != null && msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel)msg.obj;
+ byte[] sessionId = parcel.createByteArray();
+ if (sessionId.length == 0) {
+ sessionId = null;
+ }
+ byte[] data = parcel.createByteArray();
+ if (data.length == 0) {
+ data = null;
+ }
+
+ Log.i(TAG, "Drm event (" + msg.arg1 + "," + msg.arg2 + ")");
+ mOnEventListener.onEvent(mMediaDrm, sessionId, msg.arg1, msg.arg2, data);
+ }
+ }
+ return;
+
+ case KEY_STATUS_CHANGE:
+ if (mOnKeyStatusChangeListener != null) {
+ if (msg.obj != null && msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel)msg.obj;
+ byte[] sessionId = parcel.createByteArray();
+ if (sessionId.length > 0) {
+ List<KeyStatus> keyStatusList = keyStatusListFromParcel(parcel);
+ boolean hasNewUsableKey = (parcel.readInt() != 0);
+
+ Log.i(TAG, "Drm key status changed");
+ mOnKeyStatusChangeListener.onKeyStatusChange(mMediaDrm, sessionId,
+ keyStatusList, hasNewUsableKey);
+ }
+ }
+ }
+ return;
+
+ case EXPIRATION_UPDATE:
+ if (mOnExpirationUpdateListener != null) {
+ if (msg.obj != null && msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel)msg.obj;
+ byte[] sessionId = parcel.createByteArray();
+ if (sessionId.length > 0) {
+ long expirationTime = parcel.readLong();
+
+ Log.i(TAG, "Drm key expiration update: " + expirationTime);
+ mOnExpirationUpdateListener.onExpirationUpdate(mMediaDrm, sessionId,
+ expirationTime);
+ }
+ }
+ }
+ return;
+
+ default:
+ Log.e(TAG, "Unknown message type " + msg.what);
+ return;
+ }
+ }
+ }
+
+ /**
+ * Parse a list of KeyStatus objects from an event parcel
+ */
+ @NonNull
+ private List<KeyStatus> keyStatusListFromParcel(@NonNull Parcel parcel) {
+ int nelems = parcel.readInt();
+ List<KeyStatus> keyStatusList = new ArrayList(nelems);
+ while (nelems-- > 0) {
+ byte[] keyId = parcel.createByteArray();
+ int keyStatusCode = parcel.readInt();
+ keyStatusList.add(new KeyStatus(keyId, keyStatusCode));
+ }
+ return keyStatusList;
+ }
+
+ /**
+ * This method is called from native code when an event occurs. This method
+ * just uses the EventHandler system to post the event back to the main app thread.
+ * We use a weak reference to the original MediaPlayer object so that the native
+ * code is safe from the object disappearing from underneath it. (This is
+ * the cookie passed to native_setup().)
+ */
+ private static void postEventFromNative(@NonNull Object mediadrm_ref,
+ int what, int eventType, int extra, @Nullable Object obj)
+ {
+ MediaDrm md = (MediaDrm)((WeakReference<MediaDrm>)mediadrm_ref).get();
+ if (md == null) {
+ return;
+ }
+ if (md.mEventHandler != null) {
+ Message m = md.mEventHandler.obtainMessage(what, eventType, extra, obj);
+ md.mEventHandler.sendMessage(m);
+ }
+ }
+
+ /**
+ * Open a new session with the MediaDrm object. A session ID is returned.
+ *
+ * @throws NotProvisionedException if provisioning is needed
+ * @throws ResourceBusyException if required resources are in use
+ */
+ @NonNull
+ public native byte[] openSession() throws NotProvisionedException,
+ ResourceBusyException;
+
+ /**
+ * Close a session on the MediaDrm object that was previously opened
+ * with {@link #openSession}.
+ */
+ public native void closeSession(@NonNull byte[] sessionId);
+
+ /**
+ * This key request type species that the keys will be for online use, they will
+ * not be saved to the device for subsequent use when the device is not connected
+ * to a network.
+ */
+ public static final int KEY_TYPE_STREAMING = 1;
+
+ /**
+ * This key request type specifies that the keys will be for offline use, they
+ * will be saved to the device for use when the device is not connected to a network.
+ */
+ public static final int KEY_TYPE_OFFLINE = 2;
+
+ /**
+ * This key request type specifies that previously saved offline keys should be released.
+ */
+ public static final int KEY_TYPE_RELEASE = 3;
+
+ /** @hide */
+ @IntDef({
+ KEY_TYPE_STREAMING,
+ KEY_TYPE_OFFLINE,
+ KEY_TYPE_RELEASE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface KeyType {}
+
+ /**
+ * Contains the opaque data an app uses to request keys from a license server
+ */
+ public static final class KeyRequest {
+ private byte[] mData;
+ private String mDefaultUrl;
+ private int mRequestType;
+
+ /**
+ * Key request type is initial license request
+ */
+ public static final int REQUEST_TYPE_INITIAL = 0;
+
+ /**
+ * Key request type is license renewal
+ */
+ public static final int REQUEST_TYPE_RENEWAL = 1;
+
+ /**
+ * Key request type is license release
+ */
+ public static final int REQUEST_TYPE_RELEASE = 2;
+
+ /** @hide */
+ @IntDef({
+ REQUEST_TYPE_INITIAL,
+ REQUEST_TYPE_RENEWAL,
+ REQUEST_TYPE_RELEASE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface RequestType {}
+
+ KeyRequest() {}
+
+ /**
+ * Get the opaque message data
+ */
+ @NonNull
+ public byte[] getData() {
+ if (mData == null) {
+ // this should never happen as mData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("KeyRequest is not initialized");
+ }
+ return mData;
+ }
+
+ /**
+ * Get the default URL to use when sending the key request message to a
+ * server, if known. The app may prefer to use a different license
+ * server URL from other sources.
+ * This method returns an empty string if the default URL is not known.
+ */
+ @NonNull
+ public String getDefaultUrl() {
+ if (mDefaultUrl == null) {
+ // this should never happen as mDefaultUrl is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("KeyRequest is not initialized");
+ }
+ return mDefaultUrl;
+ }
+
+ /**
+ * Get the type of the request
+ * @return one of {@link #REQUEST_TYPE_INITIAL},
+ * {@link #REQUEST_TYPE_RENEWAL} or {@link #REQUEST_TYPE_RELEASE}
+ */
+ @RequestType
+ public int getRequestType() { return mRequestType; }
+ };
+
+ /**
+ * A key request/response exchange occurs between the app and a license server
+ * to obtain or release keys used to decrypt encrypted content.
+ * <p>
+ * getKeyRequest() is used to obtain an opaque key request byte array that is
+ * delivered to the license server. The opaque key request byte array is returned
+ * in KeyRequest.data. The recommended URL to deliver the key request to is
+ * returned in KeyRequest.defaultUrl.
+ * <p>
+ * After the app has received the key request response from the server,
+ * it should deliver to the response to the DRM engine plugin using the method
+ * {@link #provideKeyResponse}.
+ *
+ * @param scope may be a sessionId or a keySetId, depending on the specified keyType.
+ * When the keyType is KEY_TYPE_STREAMING or KEY_TYPE_OFFLINE,
+ * scope should be set to the sessionId the keys will be provided to. When the keyType
+ * is KEY_TYPE_RELEASE, scope should be set to the keySetId of the keys
+ * being released. Releasing keys from a device invalidates them for all sessions.
+ * @param init container-specific data, its meaning is interpreted based on the
+ * mime type provided in the mimeType parameter. It could contain, for example,
+ * the content ID, key ID or other data obtained from the content metadata that is
+ * required in generating the key request. May be null when keyType is
+ * KEY_TYPE_RELEASE or if the request is a renewal, i.e. not the first key
+ * request for the session.
+ * @param mimeType identifies the mime type of the content. May be null if the
+ * keyType is KEY_TYPE_RELEASE or if the request is a renewal, i.e. not the
+ * first key request for the session.
+ * @param keyType specifes the type of the request. The request may be to acquire
+ * keys for streaming or offline content, or to release previously acquired
+ * keys, which are identified by a keySetId.
+ * @param optionalParameters are included in the key request message to
+ * allow a client application to provide additional message parameters to the server.
+ * This may be {@code null} if no additional parameters are to be sent.
+ * @throws NotProvisionedException if reprovisioning is needed, due to a
+ * problem with the certifcate
+ */
+ @NonNull
+ public native KeyRequest getKeyRequest(
+ @NonNull byte[] scope, @Nullable byte[] init,
+ @Nullable String mimeType, @KeyType int keyType,
+ @Nullable HashMap<String, String> optionalParameters)
+ throws NotProvisionedException;
+
+
+ /**
+ * A key response is received from the license server by the app, then it is
+ * provided to the DRM engine plugin using provideKeyResponse. When the
+ * response is for an offline key request, a keySetId is returned that can be
+ * used to later restore the keys to a new session with the method
+ * {@link #restoreKeys}.
+ * When the response is for a streaming or release request, an empty byte array
+ * is returned.
+ *
+ * @param scope may be a sessionId or keySetId depending on the type of the
+ * response. Scope should be set to the sessionId when the response is for either
+ * streaming or offline key requests. Scope should be set to the keySetId when
+ * the response is for a release request.
+ * @param response the byte array response from the server
+ * @return If the response is for an offline request, the keySetId for the offline
+ * keys will be returned. If the response is for a streaming or release request
+ * an empty byte array will be returned.
+ *
+ * @throws NotProvisionedException if the response indicates that
+ * reprovisioning is required
+ * @throws DeniedByServerException if the response indicates that the
+ * server rejected the request
+ */
+ @Nullable
+ public native byte[] provideKeyResponse(
+ @NonNull byte[] scope, @NonNull byte[] response)
+ throws NotProvisionedException, DeniedByServerException;
+
+
+ /**
+ * Restore persisted offline keys into a new session. keySetId identifies the
+ * keys to load, obtained from a prior call to {@link #provideKeyResponse}.
+ *
+ * @param sessionId the session ID for the DRM session
+ * @param keySetId identifies the saved key set to restore
+ */
+ public native void restoreKeys(@NonNull byte[] sessionId, @NonNull byte[] keySetId);
+
+ /**
+ * Remove the current keys from a session.
+ *
+ * @param sessionId the session ID for the DRM session
+ */
+ public native void removeKeys(@NonNull byte[] sessionId);
+
+ /**
+ * Request an informative description of the key status for the session. The status is
+ * in the form of {name, value} pairs. Since DRM license policies vary by vendor,
+ * the specific status field names are determined by each DRM vendor. Refer to your
+ * DRM provider documentation for definitions of the field names for a particular
+ * DRM engine plugin.
+ *
+ * @param sessionId the session ID for the DRM session
+ */
+ @NonNull
+ public native HashMap<String, String> queryKeyStatus(@NonNull byte[] sessionId);
+
+ /**
+ * Contains the opaque data an app uses to request a certificate from a provisioning
+ * server
+ */
+ public static final class ProvisionRequest {
+ ProvisionRequest() {}
+
+ /**
+ * Get the opaque message data
+ */
+ @NonNull
+ public byte[] getData() {
+ if (mData == null) {
+ // this should never happen as mData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("ProvisionRequest is not initialized");
+ }
+ return mData;
+ }
+
+ /**
+ * Get the default URL to use when sending the provision request
+ * message to a server, if known. The app may prefer to use a different
+ * provisioning server URL obtained from other sources.
+ * This method returns an empty string if the default URL is not known.
+ */
+ @NonNull
+ public String getDefaultUrl() {
+ if (mDefaultUrl == null) {
+ // this should never happen as mDefaultUrl is initialized in
+ // JNI after construction of the ProvisionRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("ProvisionRequest is not initialized");
+ }
+ return mDefaultUrl;
+ }
+
+ private byte[] mData;
+ private String mDefaultUrl;
+ }
+
+ /**
+ * A provision request/response exchange occurs between the app and a provisioning
+ * server to retrieve a device certificate. If provisionining is required, the
+ * EVENT_PROVISION_REQUIRED event will be sent to the event handler.
+ * getProvisionRequest is used to obtain the opaque provision request byte array that
+ * should be delivered to the provisioning server. The provision request byte array
+ * is returned in ProvisionRequest.data. The recommended URL to deliver the provision
+ * request to is returned in ProvisionRequest.defaultUrl.
+ */
+ @NonNull
+ public ProvisionRequest getProvisionRequest() {
+ return getProvisionRequestNative(CERTIFICATE_TYPE_NONE, "");
+ }
+
+ @NonNull
+ private native ProvisionRequest getProvisionRequestNative(int certType,
+ @NonNull String certAuthority);
+
+ /**
+ * After a provision response is received by the app, it is provided to the DRM
+ * engine plugin using this method.
+ *
+ * @param response the opaque provisioning response byte array to provide to the
+ * DRM engine plugin.
+ *
+ * @throws DeniedByServerException if the response indicates that the
+ * server rejected the request
+ */
+ public void provideProvisionResponse(@NonNull byte[] response)
+ throws DeniedByServerException {
+ provideProvisionResponseNative(response);
+ }
+
+ @NonNull
+ /* could there be a valid response with 0-sized certificate or key? */
+ private native Certificate provideProvisionResponseNative(@NonNull byte[] response)
+ throws DeniedByServerException;
+
+ /**
+ * A means of enforcing limits on the number of concurrent streams per subscriber
+ * across devices is provided via SecureStop. This is achieved by securely
+ * monitoring the lifetime of sessions.
+ * <p>
+ * Information from the server related to the current playback session is written
+ * to persistent storage on the device when each MediaCrypto object is created.
+ * <p>
+ * In the normal case, playback will be completed, the session destroyed and the
+ * Secure Stops will be queried. The app queries secure stops and forwards the
+ * secure stop message to the server which verifies the signature and notifies the
+ * server side database that the session destruction has been confirmed. The persisted
+ * record on the client is only removed after positive confirmation that the server
+ * received the message using releaseSecureStops().
+ */
+ @NonNull
+ public native List<byte[]> getSecureStops();
+
+ /**
+ * Access secure stop by secure stop ID.
+ *
+ * @param ssid - The secure stop ID provided by the license server.
+ */
+ @NonNull
+ public native byte[] getSecureStop(@NonNull byte[] ssid);
+
+ /**
+ * Process the SecureStop server response message ssRelease. After authenticating
+ * the message, remove the SecureStops identified in the response.
+ *
+ * @param ssRelease the server response indicating which secure stops to release
+ */
+ public native void releaseSecureStops(@NonNull byte[] ssRelease);
+
+ /**
+ * Remove all secure stops without requiring interaction with the server.
+ */
+ public native void releaseAllSecureStops();
+
+ /**
+ * String property name: identifies the maker of the DRM engine plugin
+ */
+ public static final String PROPERTY_VENDOR = "vendor";
+
+ /**
+ * String property name: identifies the version of the DRM engine plugin
+ */
+ public static final String PROPERTY_VERSION = "version";
+
+ /**
+ * String property name: describes the DRM engine plugin
+ */
+ public static final String PROPERTY_DESCRIPTION = "description";
+
+ /**
+ * String property name: a comma-separated list of cipher and mac algorithms
+ * supported by CryptoSession. The list may be empty if the DRM engine
+ * plugin does not support CryptoSession operations.
+ */
+ public static final String PROPERTY_ALGORITHMS = "algorithms";
+
+ /** @hide */
+ @StringDef({
+ PROPERTY_VENDOR,
+ PROPERTY_VERSION,
+ PROPERTY_DESCRIPTION,
+ PROPERTY_ALGORITHMS,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface StringProperty {}
+
+ /**
+ * Read a DRM engine plugin String property value, given the property name string.
+ * <p>
+ * Standard fields names are:
+ * {@link #PROPERTY_VENDOR}, {@link #PROPERTY_VERSION},
+ * {@link #PROPERTY_DESCRIPTION}, {@link #PROPERTY_ALGORITHMS}
+ */
+ /* FIXME this throws IllegalStateException for invalid property names */
+ @NonNull
+ public native String getPropertyString(@NonNull @StringProperty String propertyName);
+
+ /**
+ * Byte array property name: the device unique identifier is established during
+ * device provisioning and provides a means of uniquely identifying each device.
+ */
+ /* FIXME this throws IllegalStateException for invalid property names */
+ public static final String PROPERTY_DEVICE_UNIQUE_ID = "deviceUniqueId";
+
+ /** @hide */
+ @StringDef({
+ PROPERTY_DEVICE_UNIQUE_ID,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ArrayProperty {}
+
+ /**
+ * Read a DRM engine plugin byte array property value, given the property name string.
+ * <p>
+ * Standard fields names are {@link #PROPERTY_DEVICE_UNIQUE_ID}
+ */
+ @NonNull
+ public native byte[] getPropertyByteArray(@ArrayProperty String propertyName);
+
+ /**
+ * Set a DRM engine plugin String property value.
+ */
+ public native void setPropertyString(
+ String propertyName, @NonNull String value);
+
+ /**
+ * Set a DRM engine plugin byte array property value.
+ */
+ public native void setPropertyByteArray(
+ String propertyName, @NonNull byte[] value);
+
+ private static final native void setCipherAlgorithmNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId, @NonNull String algorithm);
+
+ private static final native void setMacAlgorithmNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId, @NonNull String algorithm);
+
+ @NonNull
+ private static final native byte[] encryptNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] input, @NonNull byte[] iv);
+
+ @NonNull
+ private static final native byte[] decryptNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] input, @NonNull byte[] iv);
+
+ @NonNull
+ private static final native byte[] signNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] message);
+
+ private static final native boolean verifyNative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull byte[] keyId, @NonNull byte[] message, @NonNull byte[] signature);
+
+ /**
+ * In addition to supporting decryption of DASH Common Encrypted Media, the
+ * MediaDrm APIs provide the ability to securely deliver session keys from
+ * an operator's session key server to a client device, based on the factory-installed
+ * root of trust, and then perform encrypt, decrypt, sign and verify operations
+ * with the session key on arbitrary user data.
+ * <p>
+ * The CryptoSession class implements generic encrypt/decrypt/sign/verify methods
+ * based on the established session keys. These keys are exchanged using the
+ * getKeyRequest/provideKeyResponse methods.
+ * <p>
+ * Applications of this capability could include securing various types of
+ * purchased or private content, such as applications, books and other media,
+ * photos or media delivery protocols.
+ * <p>
+ * Operators can create session key servers that are functionally similar to a
+ * license key server, except that instead of receiving license key requests and
+ * providing encrypted content keys which are used specifically to decrypt A/V media
+ * content, the session key server receives session key requests and provides
+ * encrypted session keys which can be used for general purpose crypto operations.
+ * <p>
+ * A CryptoSession is obtained using {@link #getCryptoSession}
+ */
+ public final class CryptoSession {
+ private byte[] mSessionId;
+
+ CryptoSession(@NonNull byte[] sessionId,
+ @NonNull String cipherAlgorithm,
+ @NonNull String macAlgorithm)
+ {
+ mSessionId = sessionId;
+ setCipherAlgorithmNative(MediaDrm.this, sessionId, cipherAlgorithm);
+ setMacAlgorithmNative(MediaDrm.this, sessionId, macAlgorithm);
+ }
+
+ /**
+ * Encrypt data using the CryptoSession's cipher algorithm
+ *
+ * @param keyid specifies which key to use
+ * @param input the data to encrypt
+ * @param iv the initialization vector to use for the cipher
+ */
+ @NonNull
+ public byte[] encrypt(
+ @NonNull byte[] keyid, @NonNull byte[] input, @NonNull byte[] iv) {
+ return encryptNative(MediaDrm.this, mSessionId, keyid, input, iv);
+ }
+
+ /**
+ * Decrypt data using the CryptoSessions's cipher algorithm
+ *
+ * @param keyid specifies which key to use
+ * @param input the data to encrypt
+ * @param iv the initialization vector to use for the cipher
+ */
+ @NonNull
+ public byte[] decrypt(
+ @NonNull byte[] keyid, @NonNull byte[] input, @NonNull byte[] iv) {
+ return decryptNative(MediaDrm.this, mSessionId, keyid, input, iv);
+ }
+
+ /**
+ * Sign data using the CryptoSessions's mac algorithm.
+ *
+ * @param keyid specifies which key to use
+ * @param message the data for which a signature is to be computed
+ */
+ @NonNull
+ public byte[] sign(@NonNull byte[] keyid, @NonNull byte[] message) {
+ return signNative(MediaDrm.this, mSessionId, keyid, message);
+ }
+
+ /**
+ * Verify a signature using the CryptoSessions's mac algorithm. Return true
+ * if the signatures match, false if they do no.
+ *
+ * @param keyid specifies which key to use
+ * @param message the data to verify
+ * @param signature the reference signature which will be compared with the
+ * computed signature
+ */
+ public boolean verify(
+ @NonNull byte[] keyid, @NonNull byte[] message, @NonNull byte[] signature) {
+ return verifyNative(MediaDrm.this, mSessionId, keyid, message, signature);
+ }
+ };
+
+ /**
+ * Obtain a CryptoSession object which can be used to encrypt, decrypt,
+ * sign and verify messages or data using the session keys established
+ * for the session using methods {@link #getKeyRequest} and
+ * {@link #provideKeyResponse} using a session key server.
+ *
+ * @param sessionId the session ID for the session containing keys
+ * to be used for encrypt, decrypt, sign and/or verify
+ * @param cipherAlgorithm the algorithm to use for encryption and
+ * decryption ciphers. The algorithm string conforms to JCA Standard
+ * Names for Cipher Transforms and is case insensitive. For example
+ * "AES/CBC/NoPadding".
+ * @param macAlgorithm the algorithm to use for sign and verify
+ * The algorithm string conforms to JCA Standard Names for Mac
+ * Algorithms and is case insensitive. For example "HmacSHA256".
+ * <p>
+ * The list of supported algorithms for a DRM engine plugin can be obtained
+ * using the method {@link #getPropertyString} with the property name
+ * "algorithms".
+ */
+ public CryptoSession getCryptoSession(
+ @NonNull byte[] sessionId,
+ @NonNull String cipherAlgorithm, @NonNull String macAlgorithm)
+ {
+ return new CryptoSession(sessionId, cipherAlgorithm, macAlgorithm);
+ }
+
+ /**
+ * Contains the opaque data an app uses to request a certificate from a provisioning
+ * server
+ *
+ * @hide - not part of the public API at this time
+ */
+ public static final class CertificateRequest {
+ private byte[] mData;
+ private String mDefaultUrl;
+
+ CertificateRequest(@NonNull byte[] data, @NonNull String defaultUrl) {
+ mData = data;
+ mDefaultUrl = defaultUrl;
+ }
+
+ /**
+ * Get the opaque message data
+ */
+ @NonNull
+ public byte[] getData() { return mData; }
+
+ /**
+ * Get the default URL to use when sending the certificate request
+ * message to a server, if known. The app may prefer to use a different
+ * certificate server URL obtained from other sources.
+ */
+ @NonNull
+ public String getDefaultUrl() { return mDefaultUrl; }
+ }
+
+ /**
+ * Generate a certificate request, specifying the certificate type
+ * and authority. The response received should be passed to
+ * provideCertificateResponse.
+ *
+ * @param certType Specifies the certificate type.
+ *
+ * @param certAuthority is passed to the certificate server to specify
+ * the chain of authority.
+ *
+ * @hide - not part of the public API at this time
+ */
+ @NonNull
+ public CertificateRequest getCertificateRequest(
+ @CertificateType int certType, @NonNull String certAuthority)
+ {
+ ProvisionRequest provisionRequest = getProvisionRequestNative(certType, certAuthority);
+ return new CertificateRequest(provisionRequest.getData(),
+ provisionRequest.getDefaultUrl());
+ }
+
+ /**
+ * Contains the wrapped private key and public certificate data associated
+ * with a certificate.
+ *
+ * @hide - not part of the public API at this time
+ */
+ public static final class Certificate {
+ Certificate() {}
+
+ /**
+ * Get the wrapped private key data
+ */
+ @NonNull
+ public byte[] getWrappedPrivateKey() {
+ if (mWrappedKey == null) {
+ // this should never happen as mWrappedKey is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("Cerfificate is not initialized");
+ }
+ return mWrappedKey;
+ }
+
+ /**
+ * Get the PEM-encoded certificate chain
+ */
+ @NonNull
+ public byte[] getContent() {
+ if (mCertificateData == null) {
+ // this should never happen as mCertificateData is initialized in
+ // JNI after construction of the KeyRequest object. The check
+ // is needed here to guarantee @NonNull annotation.
+ throw new RuntimeException("Cerfificate is not initialized");
+ }
+ return mCertificateData;
+ }
+
+ private byte[] mWrappedKey;
+ private byte[] mCertificateData;
+ }
+
+
+ /**
+ * Process a response from the certificate server. The response
+ * is obtained from an HTTP Post to the url provided by getCertificateRequest.
+ * <p>
+ * The public X509 certificate chain and wrapped private key are returned
+ * in the returned Certificate objec. The certificate chain is in PEM format.
+ * The wrapped private key should be stored in application private
+ * storage, and used when invoking the signRSA method.
+ *
+ * @param response the opaque certificate response byte array to provide to the
+ * DRM engine plugin.
+ *
+ * @throws DeniedByServerException if the response indicates that the
+ * server rejected the request
+ *
+ * @hide - not part of the public API at this time
+ */
+ @NonNull
+ public Certificate provideCertificateResponse(@NonNull byte[] response)
+ throws DeniedByServerException {
+ return provideProvisionResponseNative(response);
+ }
+
+ @NonNull
+ private static final native byte[] signRSANative(
+ @NonNull MediaDrm drm, @NonNull byte[] sessionId,
+ @NonNull String algorithm, @NonNull byte[] wrappedKey, @NonNull byte[] message);
+
+ /**
+ * Sign data using an RSA key
+ *
+ * @param sessionId a sessionId obtained from openSession on the MediaDrm object
+ * @param algorithm the signing algorithm to use, e.g. "PKCS1-BlockType1"
+ * @param wrappedKey - the wrapped (encrypted) RSA private key obtained
+ * from provideCertificateResponse
+ * @param message the data for which a signature is to be computed
+ *
+ * @hide - not part of the public API at this time
+ */
+ @NonNull
+ public byte[] signRSA(
+ @NonNull byte[] sessionId, @NonNull String algorithm,
+ @NonNull byte[] wrappedKey, @NonNull byte[] message) {
+ return signRSANative(this, sessionId, algorithm, wrappedKey, message);
+ }
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ public native final void release();
+ private static native final void native_init();
+
+ private native final void native_setup(Object mediadrm_this, byte[] uuid,
+ String appPackageName);
+
+ private native final void native_finalize();
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+}
diff --git a/android/media/MediaDrmException.java b/android/media/MediaDrmException.java
new file mode 100644
index 00000000..d547574e
--- /dev/null
+++ b/android/media/MediaDrmException.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Base class for MediaDrm exceptions
+ */
+public class MediaDrmException extends Exception {
+ public MediaDrmException(String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/android/media/MediaDrmResetException.java b/android/media/MediaDrmResetException.java
new file mode 100644
index 00000000..3b2da1e8
--- /dev/null
+++ b/android/media/MediaDrmResetException.java
@@ -0,0 +1,28 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * This exception is thrown when the MediaDrm instance has become unusable
+ * due to a restart of the mediaserver process. To continue, the app must
+ * release the MediaDrm object, then create and initialize a new one.
+ */
+public class MediaDrmResetException extends IllegalStateException {
+ public MediaDrmResetException(String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/android/media/MediaExtractor.java b/android/media/MediaExtractor.java
new file mode 100644
index 00000000..2c1b4b35
--- /dev/null
+++ b/android/media/MediaExtractor.java
@@ -0,0 +1,755 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.content.ContentResolver;
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.media.MediaHTTPService;
+import android.net.Uri;
+import android.os.IBinder;
+import android.os.IHwBinder;
+import android.os.PersistableBundle;
+
+import com.android.internal.util.Preconditions;
+
+import java.io.FileDescriptor;
+import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
+/**
+ * MediaExtractor facilitates extraction of demuxed, typically encoded, media data
+ * from a data source.
+ * <p>It is generally used like this:
+ * <pre>
+ * MediaExtractor extractor = new MediaExtractor();
+ * extractor.setDataSource(...);
+ * int numTracks = extractor.getTrackCount();
+ * for (int i = 0; i &lt; numTracks; ++i) {
+ * MediaFormat format = extractor.getTrackFormat(i);
+ * String mime = format.getString(MediaFormat.KEY_MIME);
+ * if (weAreInterestedInThisTrack) {
+ * extractor.selectTrack(i);
+ * }
+ * }
+ * ByteBuffer inputBuffer = ByteBuffer.allocate(...)
+ * while (extractor.readSampleData(inputBuffer, ...) &gt;= 0) {
+ * int trackIndex = extractor.getSampleTrackIndex();
+ * long presentationTimeUs = extractor.getSampleTime();
+ * ...
+ * extractor.advance();
+ * }
+ *
+ * extractor.release();
+ * extractor = null;
+ * </pre>
+ *
+ * <p>This class requires the {@link android.Manifest.permission#INTERNET} permission
+ * when used with network-based content.
+ */
+final public class MediaExtractor {
+ public MediaExtractor() {
+ native_setup();
+ }
+
+ /**
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to extract from
+ *
+ * @throws IllegalArgumentException if dataSource is invalid.
+ */
+ public native final void setDataSource(@NonNull MediaDataSource dataSource)
+ throws IOException;
+
+ /**
+ * Sets the data source as a content Uri.
+ *
+ * @param context the Context to use when resolving the Uri
+ * @param uri the Content URI of the data you want to extract from.
+ *
+ * <p>When <code>uri</code> refers to a network file the
+ * {@link android.Manifest.permission#INTERNET} permission is required.
+ *
+ * @param headers the headers to be sent together with the request for the data.
+ * This can be {@code null} if no specific headers are to be sent with the
+ * request.
+ */
+ public final void setDataSource(
+ @NonNull Context context, @NonNull Uri uri, @Nullable Map<String, String> headers)
+ throws IOException {
+ String scheme = uri.getScheme();
+ if (scheme == null || scheme.equals("file")) {
+ setDataSource(uri.getPath());
+ return;
+ }
+
+ AssetFileDescriptor fd = null;
+ try {
+ ContentResolver resolver = context.getContentResolver();
+ fd = resolver.openAssetFileDescriptor(uri, "r");
+ if (fd == null) {
+ return;
+ }
+ // Note: using getDeclaredLength so that our behavior is the same
+ // as previous versions when the content provider is returning
+ // a full file.
+ if (fd.getDeclaredLength() < 0) {
+ setDataSource(fd.getFileDescriptor());
+ } else {
+ setDataSource(
+ fd.getFileDescriptor(),
+ fd.getStartOffset(),
+ fd.getDeclaredLength());
+ }
+ return;
+ } catch (SecurityException ex) {
+ } catch (IOException ex) {
+ } finally {
+ if (fd != null) {
+ fd.close();
+ }
+ }
+
+ setDataSource(uri.toString(), headers);
+ }
+
+ /**
+ * Sets the data source (file-path or http URL) to use.
+ *
+ * @param path the path of the file, or the http URL
+ *
+ * <p>When <code>path</code> refers to a network file the
+ * {@link android.Manifest.permission#INTERNET} permission is required.
+ *
+ * @param headers the headers associated with the http request for the stream you want to play.
+ * This can be {@code null} if no specific headers are to be sent with the
+ * request.
+ */
+ public final void setDataSource(@NonNull String path, @Nullable Map<String, String> headers)
+ throws IOException {
+ String[] keys = null;
+ String[] values = null;
+
+ if (headers != null) {
+ keys = new String[headers.size()];
+ values = new String[headers.size()];
+
+ int i = 0;
+ for (Map.Entry<String, String> entry: headers.entrySet()) {
+ keys[i] = entry.getKey();
+ values[i] = entry.getValue();
+ ++i;
+ }
+ }
+
+ nativeSetDataSource(
+ MediaHTTPService.createHttpServiceBinderIfNecessary(path),
+ path,
+ keys,
+ values);
+ }
+
+ private native final void nativeSetDataSource(
+ @NonNull IBinder httpServiceBinder,
+ @NonNull String path,
+ @Nullable String[] keys,
+ @Nullable String[] values) throws IOException;
+
+ /**
+ * Sets the data source (file-path or http URL) to use.
+ *
+ * @param path the path of the file, or the http URL of the stream
+ *
+ * <p>When <code>path</code> refers to a local file, the file may actually be opened by a
+ * process other than the calling application. This implies that the pathname
+ * should be an absolute path (as any other process runs with unspecified current working
+ * directory), and that the pathname should reference a world-readable file.
+ * As an alternative, the application could first open the file for reading,
+ * and then use the file descriptor form {@link #setDataSource(FileDescriptor)}.
+ *
+ * <p>When <code>path</code> refers to a network file the
+ * {@link android.Manifest.permission#INTERNET} permission is required.
+ */
+ public final void setDataSource(@NonNull String path) throws IOException {
+ nativeSetDataSource(
+ MediaHTTPService.createHttpServiceBinderIfNecessary(path),
+ path,
+ null,
+ null);
+ }
+
+ /**
+ * Sets the data source (AssetFileDescriptor) to use. It is the caller's
+ * responsibility to close the file descriptor. It is safe to do so as soon
+ * as this call returns.
+ *
+ * @param afd the AssetFileDescriptor for the file you want to extract from.
+ */
+ public final void setDataSource(@NonNull AssetFileDescriptor afd)
+ throws IOException, IllegalArgumentException, IllegalStateException {
+ Preconditions.checkNotNull(afd);
+ // Note: using getDeclaredLength so that our behavior is the same
+ // as previous versions when the content provider is returning
+ // a full file.
+ if (afd.getDeclaredLength() < 0) {
+ setDataSource(afd.getFileDescriptor());
+ } else {
+ setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getDeclaredLength());
+ }
+ }
+
+ /**
+ * Sets the data source (FileDescriptor) to use. It is the caller's responsibility
+ * to close the file descriptor. It is safe to do so as soon as this call returns.
+ *
+ * @param fd the FileDescriptor for the file you want to extract from.
+ */
+ public final void setDataSource(@NonNull FileDescriptor fd) throws IOException {
+ setDataSource(fd, 0, 0x7ffffffffffffffL);
+ }
+
+ /**
+ * Sets the data source (FileDescriptor) to use. The FileDescriptor must be
+ * seekable (N.B. a LocalSocket is not seekable). It is the caller's responsibility
+ * to close the file descriptor. It is safe to do so as soon as this call returns.
+ *
+ * @param fd the FileDescriptor for the file you want to extract from.
+ * @param offset the offset into the file where the data to be extracted starts, in bytes
+ * @param length the length in bytes of the data to be extracted
+ */
+ public native final void setDataSource(
+ @NonNull FileDescriptor fd, long offset, long length) throws IOException;
+
+ /**
+ * Sets the MediaCas instance to use. This should be called after a
+ * successful setDataSource() if at least one track reports mime type
+ * of {@link android.media.MediaFormat#MIMETYPE_AUDIO_SCRAMBLED}
+ * or {@link android.media.MediaFormat#MIMETYPE_VIDEO_SCRAMBLED}.
+ * Stream parsing will not proceed until a valid MediaCas object
+ * is provided.
+ *
+ * @param mediaCas the MediaCas object to use.
+ */
+ public final void setMediaCas(@NonNull MediaCas mediaCas) {
+ mMediaCas = mediaCas;
+ nativeSetMediaCas(mediaCas.getBinder());
+ }
+
+ private native final void nativeSetMediaCas(@NonNull IHwBinder casBinder);
+
+ /**
+ * Describes the conditional access system used to scramble a track.
+ */
+ public static final class CasInfo {
+ private final int mSystemId;
+ private final MediaCas.Session mSession;
+
+ CasInfo(int systemId, @Nullable MediaCas.Session session) {
+ mSystemId = systemId;
+ mSession = session;
+ }
+
+ /**
+ * Retrieves the system id of the conditional access system.
+ *
+ * @return CA system id of the CAS used to scramble the track.
+ */
+ public int getSystemId() {
+ return mSystemId;
+ }
+
+ /**
+ * Retrieves the {@link MediaCas.Session} associated with a track. The
+ * session is needed to initialize a descrambler in order to decode the
+ * scrambled track.
+ * <p>
+ * @see MediaDescrambler#setMediaCasSession
+ * <p>
+ * @return a {@link MediaCas.Session} object associated with a track.
+ */
+ public MediaCas.Session getSession() {
+ return mSession;
+ }
+ }
+
+ private ArrayList<Byte> toByteArray(@NonNull byte[] data) {
+ ArrayList<Byte> byteArray = new ArrayList<Byte>(data.length);
+ for (int i = 0; i < data.length; i++) {
+ byteArray.add(i, Byte.valueOf(data[i]));
+ }
+ return byteArray;
+ }
+
+ /**
+ * Retrieves the information about the conditional access system used to scramble
+ * a track.
+ *
+ * @param index of the track.
+ * @return an {@link CasInfo} object describing the conditional access system.
+ */
+ public CasInfo getCasInfo(int index) {
+ Map<String, Object> formatMap = getTrackFormatNative(index);
+ if (formatMap.containsKey(MediaFormat.KEY_CA_SYSTEM_ID)) {
+ int systemId = ((Integer)formatMap.get(MediaFormat.KEY_CA_SYSTEM_ID)).intValue();
+ MediaCas.Session session = null;
+ if (mMediaCas != null && formatMap.containsKey(MediaFormat.KEY_CA_SESSION_ID)) {
+ ByteBuffer buf = (ByteBuffer) formatMap.get(MediaFormat.KEY_CA_SESSION_ID);
+ buf.rewind();
+ final byte[] sessionId = new byte[buf.remaining()];
+ buf.get(sessionId);
+ session = mMediaCas.createFromSessionId(toByteArray(sessionId));
+ }
+ return new CasInfo(systemId, session);
+ }
+ return null;
+ }
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ /**
+ * Make sure you call this when you're done to free up any resources
+ * instead of relying on the garbage collector to do this for you at
+ * some point in the future.
+ */
+ public native final void release();
+
+ /**
+ * Count the number of tracks found in the data source.
+ */
+ public native final int getTrackCount();
+
+ /**
+ * Extract DRM initialization data if it exists
+ *
+ * @return DRM initialization data in the content, or {@code null}
+ * if no recognizable DRM format is found;
+ * @see DrmInitData
+ */
+ public DrmInitData getDrmInitData() {
+ Map<String, Object> formatMap = getFileFormatNative();
+ if (formatMap == null) {
+ return null;
+ }
+ if (formatMap.containsKey("pssh")) {
+ Map<UUID, byte[]> psshMap = getPsshInfo();
+ final Map<UUID, DrmInitData.SchemeInitData> initDataMap =
+ new HashMap<UUID, DrmInitData.SchemeInitData>();
+ for (Map.Entry<UUID, byte[]> e: psshMap.entrySet()) {
+ UUID uuid = e.getKey();
+ byte[] data = e.getValue();
+ initDataMap.put(uuid, new DrmInitData.SchemeInitData("cenc", data));
+ }
+ return new DrmInitData() {
+ public SchemeInitData get(UUID schemeUuid) {
+ return initDataMap.get(schemeUuid);
+ }
+ };
+ } else {
+ int numTracks = getTrackCount();
+ for (int i = 0; i < numTracks; ++i) {
+ Map<String, Object> trackFormatMap = getTrackFormatNative(i);
+ if (!trackFormatMap.containsKey("crypto-key")) {
+ continue;
+ }
+ ByteBuffer buf = (ByteBuffer) trackFormatMap.get("crypto-key");
+ buf.rewind();
+ final byte[] data = new byte[buf.remaining()];
+ buf.get(data);
+ return new DrmInitData() {
+ public SchemeInitData get(UUID schemeUuid) {
+ return new DrmInitData.SchemeInitData("webm", data);
+ }
+ };
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Get the PSSH info if present.
+ * @return a map of uuid-to-bytes, with the uuid specifying
+ * the crypto scheme, and the bytes being the data specific to that scheme.
+ * This can be {@code null} if the source does not contain PSSH info.
+ */
+ @Nullable
+ public Map<UUID, byte[]> getPsshInfo() {
+ Map<UUID, byte[]> psshMap = null;
+ Map<String, Object> formatMap = getFileFormatNative();
+ if (formatMap != null && formatMap.containsKey("pssh")) {
+ ByteBuffer rawpssh = (ByteBuffer) formatMap.get("pssh");
+ rawpssh.order(ByteOrder.nativeOrder());
+ rawpssh.rewind();
+ formatMap.remove("pssh");
+ // parse the flat pssh bytebuffer into something more manageable
+ psshMap = new HashMap<UUID, byte[]>();
+ while (rawpssh.remaining() > 0) {
+ rawpssh.order(ByteOrder.BIG_ENDIAN);
+ long msb = rawpssh.getLong();
+ long lsb = rawpssh.getLong();
+ UUID uuid = new UUID(msb, lsb);
+ rawpssh.order(ByteOrder.nativeOrder());
+ int datalen = rawpssh.getInt();
+ byte [] psshdata = new byte[datalen];
+ rawpssh.get(psshdata);
+ psshMap.put(uuid, psshdata);
+ }
+ }
+ return psshMap;
+ }
+
+ @NonNull
+ private native Map<String, Object> getFileFormatNative();
+
+ /**
+ * Get the track format at the specified index.
+ *
+ * More detail on the representation can be found at {@link android.media.MediaCodec}
+ * <p>
+ * The following table summarizes support for format keys across android releases:
+ *
+ * <table style="width: 0%">
+ * <thead>
+ * <tr>
+ * <th rowspan=2>OS Version(s)</th>
+ * <td colspan=3>{@code MediaFormat} keys used for</th>
+ * </tr><tr>
+ * <th>All Tracks</th>
+ * <th>Audio Tracks</th>
+ * <th>Video Tracks</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN}</td>
+ * <td rowspan=8>{@link MediaFormat#KEY_MIME},<br>
+ * {@link MediaFormat#KEY_DURATION},<br>
+ * {@link MediaFormat#KEY_MAX_INPUT_SIZE}</td>
+ * <td rowspan=5>{@link MediaFormat#KEY_SAMPLE_RATE},<br>
+ * {@link MediaFormat#KEY_CHANNEL_COUNT},<br>
+ * {@link MediaFormat#KEY_CHANNEL_MASK},<br>
+ * gapless playback information<sup>.mp3, .mp4</sup>,<br>
+ * {@link MediaFormat#KEY_IS_ADTS}<sup>AAC if streaming</sup>,<br>
+ * codec-specific data<sup>AAC, Vorbis</sup></td>
+ * <td rowspan=2>{@link MediaFormat#KEY_WIDTH},<br>
+ * {@link MediaFormat#KEY_HEIGHT},<br>
+ * codec-specific data<sup>AVC, MPEG4</sup></td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR1}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2}</td>
+ * <td rowspan=3>as above, plus<br>
+ * Pixel aspect ratio information<sup>AVC, *</sup></td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#KITKAT}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#KITKAT_WATCH}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td>
+ * <td rowspan=2>as above, plus<br>
+ * {@link MediaFormat#KEY_BIT_RATE}<sup>AAC</sup>,<br>
+ * codec-specific data<sup>Opus</sup></td>
+ * <td rowspan=2>as above, plus<br>
+ * {@link MediaFormat#KEY_ROTATION}<sup>.mp4</sup>,<br>
+ * {@link MediaFormat#KEY_BIT_RATE}<sup>MPEG4</sup>,<br>
+ * codec-specific data<sup>HEVC</sup></td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#M}</td>
+ * <td>as above, plus<br>
+ * gapless playback information<sup>Opus</sup></td>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#KEY_FRAME_RATE} (integer)</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#N}</td>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#KEY_TRACK_ID},<br>
+ * <!-- {link MediaFormat#KEY_MAX_BIT_RATE}<sup>#, .mp4</sup>,<br> -->
+ * {@link MediaFormat#KEY_BIT_RATE}<sup>#, .mp4</sup></td>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#KEY_PCM_ENCODING},<br>
+ * {@link MediaFormat#KEY_PROFILE}<sup>AAC</sup></td>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#KEY_HDR_STATIC_INFO}<sup>#, .webm</sup>,<br>
+ * {@link MediaFormat#KEY_COLOR_STANDARD}<sup>#</sup>,<br>
+ * {@link MediaFormat#KEY_COLOR_TRANSFER}<sup>#</sup>,<br>
+ * {@link MediaFormat#KEY_COLOR_RANGE}<sup>#</sup>,<br>
+ * {@link MediaFormat#KEY_PROFILE}<sup>MPEG2, H.263, MPEG4, AVC, HEVC, VP9</sup>,<br>
+ * {@link MediaFormat#KEY_LEVEL}<sup>H.263, MPEG4, AVC, HEVC, VP9</sup>,<br>
+ * codec-specific data<sup>VP9</sup></td>
+ * </tr>
+ * <tr>
+ * <td colspan=4>
+ * <p class=note><strong>Notes:</strong><br>
+ * #: container-specified value only.<br>
+ * .mp4, .webm&hellip;: for listed containers<br>
+ * MPEG4, AAC&hellip;: for listed codecs
+ * </td>
+ * </tr><tr>
+ * <td colspan=4>
+ * <p class=note>Note that that level information contained in the container many times
+ * does not match the level of the actual bitstream. You may want to clear the level using
+ * {@code MediaFormat.setString(KEY_LEVEL, null)} before using the track format to find a
+ * decoder that can play back a particular track.
+ * </td>
+ * </tr><tr>
+ * <td colspan=4>
+ * <p class=note><strong>*Pixel (sample) aspect ratio</strong> is returned in the following
+ * keys. The display width can be calculated for example as:
+ * <p align=center>
+ * display-width = display-height * crop-width / crop-height * sar-width / sar-height
+ * </td>
+ * </tr><tr>
+ * <th>Format Key</th><th>Value Type</th><th colspan=2>Description</th>
+ * </tr><tr>
+ * <td>{@code "sar-width"}</td><td>Integer</td><td colspan=2>Pixel aspect ratio width</td>
+ * </tr><tr>
+ * <td>{@code "sar-height"}</td><td>Integer</td><td colspan=2>Pixel aspect ratio height</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ *
+ */
+ @NonNull
+ public MediaFormat getTrackFormat(int index) {
+ return new MediaFormat(getTrackFormatNative(index));
+ }
+
+ @NonNull
+ private native Map<String, Object> getTrackFormatNative(int index);
+
+ /**
+ * Subsequent calls to {@link #readSampleData}, {@link #getSampleTrackIndex} and
+ * {@link #getSampleTime} only retrieve information for the subset of tracks
+ * selected.
+ * Selecting the same track multiple times has no effect, the track is
+ * only selected once.
+ */
+ public native void selectTrack(int index);
+
+ /**
+ * Subsequent calls to {@link #readSampleData}, {@link #getSampleTrackIndex} and
+ * {@link #getSampleTime} only retrieve information for the subset of tracks
+ * selected.
+ */
+ public native void unselectTrack(int index);
+
+ /**
+ * If possible, seek to a sync sample at or before the specified time
+ */
+ public static final int SEEK_TO_PREVIOUS_SYNC = 0;
+ /**
+ * If possible, seek to a sync sample at or after the specified time
+ */
+ public static final int SEEK_TO_NEXT_SYNC = 1;
+ /**
+ * If possible, seek to the sync sample closest to the specified time
+ */
+ public static final int SEEK_TO_CLOSEST_SYNC = 2;
+
+ /** @hide */
+ @IntDef({
+ SEEK_TO_PREVIOUS_SYNC,
+ SEEK_TO_NEXT_SYNC,
+ SEEK_TO_CLOSEST_SYNC,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SeekMode {}
+
+ /**
+ * All selected tracks seek near the requested time according to the
+ * specified mode.
+ */
+ public native void seekTo(long timeUs, @SeekMode int mode);
+
+ /**
+ * Advance to the next sample. Returns false if no more sample data
+ * is available (end of stream).
+ *
+ * When extracting a local file, the behaviors of {@link #advance} and
+ * {@link #readSampleData} are undefined in presence of concurrent
+ * writes to the same local file; more specifically, end of stream
+ * could be signalled earlier than expected.
+ */
+ public native boolean advance();
+
+ /**
+ * Retrieve the current encoded sample and store it in the byte buffer
+ * starting at the given offset.
+ * <p>
+ * <b>Note:</b>As of API 21, on success the position and limit of
+ * {@code byteBuf} is updated to point to the data just read.
+ * @param byteBuf the destination byte buffer
+ * @return the sample size (or -1 if no more samples are available).
+ */
+ public native int readSampleData(@NonNull ByteBuffer byteBuf, int offset);
+
+ /**
+ * Returns the track index the current sample originates from (or -1
+ * if no more samples are available)
+ */
+ public native int getSampleTrackIndex();
+
+ /**
+ * Returns the current sample's presentation time in microseconds.
+ * or -1 if no more samples are available.
+ */
+ public native long getSampleTime();
+
+ // Keep these in sync with their equivalents in NuMediaExtractor.h
+ /**
+ * The sample is a sync sample (or in {@link MediaCodec}'s terminology
+ * it is a key frame.)
+ *
+ * @see MediaCodec#BUFFER_FLAG_KEY_FRAME
+ */
+ public static final int SAMPLE_FLAG_SYNC = 1;
+
+ /**
+ * The sample is (at least partially) encrypted, see also the documentation
+ * for {@link android.media.MediaCodec#queueSecureInputBuffer}
+ */
+ public static final int SAMPLE_FLAG_ENCRYPTED = 2;
+
+ /**
+ * This indicates that the buffer only contains part of a frame,
+ * and the decoder should batch the data until a buffer without
+ * this flag appears before decoding the frame.
+ *
+ * @see MediaCodec#BUFFER_FLAG_PARTIAL_FRAME
+ */
+ public static final int SAMPLE_FLAG_PARTIAL_FRAME = 4;
+
+ /** @hide */
+ @IntDef(
+ flag = true,
+ value = {
+ SAMPLE_FLAG_SYNC,
+ SAMPLE_FLAG_ENCRYPTED,
+ SAMPLE_FLAG_PARTIAL_FRAME,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SampleFlag {}
+
+ /**
+ * Returns the current sample's flags.
+ */
+ @SampleFlag
+ public native int getSampleFlags();
+
+ /**
+ * If the sample flags indicate that the current sample is at least
+ * partially encrypted, this call returns relevant information about
+ * the structure of the sample data required for decryption.
+ * @param info The android.media.MediaCodec.CryptoInfo structure
+ * to be filled in.
+ * @return true iff the sample flags contain {@link #SAMPLE_FLAG_ENCRYPTED}
+ */
+ public native boolean getSampleCryptoInfo(@NonNull MediaCodec.CryptoInfo info);
+
+ /**
+ * Returns an estimate of how much data is presently cached in memory
+ * expressed in microseconds. Returns -1 if that information is unavailable
+ * or not applicable (no cache).
+ */
+ public native long getCachedDuration();
+
+ /**
+ * Returns true iff we are caching data and the cache has reached the
+ * end of the data stream (for now, a future seek may of course restart
+ * the fetching of data).
+ * This API only returns a meaningful result if {@link #getCachedDuration}
+ * indicates the presence of a cache, i.e. does NOT return -1.
+ */
+ public native boolean hasCacheReachedEndOfStream();
+
+ /**
+ * Return Metrics data about the current media container.
+ *
+ * @return a {@link PersistableBundle} containing the set of attributes and values
+ * available for the media container being handled by this instance
+ * of MediaExtractor.
+ * The attributes are descibed in {@link MetricsConstants}.
+ *
+ * Additional vendor-specific fields may also be present in
+ * the return value.
+ */
+
+ public PersistableBundle getMetrics() {
+ PersistableBundle bundle = native_getMetrics();
+ return bundle;
+ }
+
+ private native PersistableBundle native_getMetrics();
+
+ private static native final void native_init();
+ private native final void native_setup();
+ private native final void native_finalize();
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private MediaCas mMediaCas;
+
+ private long mNativeContext;
+
+ public final static class MetricsConstants
+ {
+ private MetricsConstants() {}
+
+ /**
+ * Key to extract the container format
+ * from the {@link MediaExtractor#getMetrics} return value.
+ * The value is a String.
+ */
+ public static final String FORMAT = "android.media.mediaextractor.fmt";
+
+ /**
+ * Key to extract the container MIME type
+ * from the {@link MediaExtractor#getMetrics} return value.
+ * The value is a String.
+ */
+ public static final String MIME_TYPE = "android.media.mediaextractor.mime";
+
+ /**
+ * Key to extract the number of tracks in the container
+ * from the {@link MediaExtractor#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String TRACKS = "android.media.mediaextractor.ntrk";
+
+ }
+
+}
diff --git a/android/media/MediaFile.java b/android/media/MediaFile.java
new file mode 100644
index 00000000..35937de2
--- /dev/null
+++ b/android/media/MediaFile.java
@@ -0,0 +1,378 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.media.DecoderCapabilities;
+import android.media.DecoderCapabilities.VideoDecoder;
+import android.media.DecoderCapabilities.AudioDecoder;
+import android.mtp.MtpConstants;
+import com.android.internal.util.Preconditions;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+
+/**
+ * MediaScanner helper class.
+ *
+ * {@hide}
+ */
+public class MediaFile {
+
+ // Audio file types
+ public static final int FILE_TYPE_MP3 = 1;
+ public static final int FILE_TYPE_M4A = 2;
+ public static final int FILE_TYPE_WAV = 3;
+ public static final int FILE_TYPE_AMR = 4;
+ public static final int FILE_TYPE_AWB = 5;
+ public static final int FILE_TYPE_WMA = 6;
+ public static final int FILE_TYPE_OGG = 7;
+ public static final int FILE_TYPE_AAC = 8;
+ public static final int FILE_TYPE_MKA = 9;
+ public static final int FILE_TYPE_FLAC = 10;
+ private static final int FIRST_AUDIO_FILE_TYPE = FILE_TYPE_MP3;
+ private static final int LAST_AUDIO_FILE_TYPE = FILE_TYPE_FLAC;
+
+ // MIDI file types
+ public static final int FILE_TYPE_MID = 11;
+ public static final int FILE_TYPE_SMF = 12;
+ public static final int FILE_TYPE_IMY = 13;
+ private static final int FIRST_MIDI_FILE_TYPE = FILE_TYPE_MID;
+ private static final int LAST_MIDI_FILE_TYPE = FILE_TYPE_IMY;
+
+ // Video file types
+ public static final int FILE_TYPE_MP4 = 21;
+ public static final int FILE_TYPE_M4V = 22;
+ public static final int FILE_TYPE_3GPP = 23;
+ public static final int FILE_TYPE_3GPP2 = 24;
+ public static final int FILE_TYPE_WMV = 25;
+ public static final int FILE_TYPE_ASF = 26;
+ public static final int FILE_TYPE_MKV = 27;
+ public static final int FILE_TYPE_MP2TS = 28;
+ public static final int FILE_TYPE_AVI = 29;
+ public static final int FILE_TYPE_WEBM = 30;
+ private static final int FIRST_VIDEO_FILE_TYPE = FILE_TYPE_MP4;
+ private static final int LAST_VIDEO_FILE_TYPE = FILE_TYPE_WEBM;
+
+ // More video file types
+ public static final int FILE_TYPE_MP2PS = 200;
+ public static final int FILE_TYPE_QT = 201;
+ private static final int FIRST_VIDEO_FILE_TYPE2 = FILE_TYPE_MP2PS;
+ private static final int LAST_VIDEO_FILE_TYPE2 = FILE_TYPE_QT;
+
+ // Image file types
+ public static final int FILE_TYPE_JPEG = 31;
+ public static final int FILE_TYPE_GIF = 32;
+ public static final int FILE_TYPE_PNG = 33;
+ public static final int FILE_TYPE_BMP = 34;
+ public static final int FILE_TYPE_WBMP = 35;
+ public static final int FILE_TYPE_WEBP = 36;
+ public static final int FILE_TYPE_HEIF = 37;
+ private static final int FIRST_IMAGE_FILE_TYPE = FILE_TYPE_JPEG;
+ private static final int LAST_IMAGE_FILE_TYPE = FILE_TYPE_HEIF;
+
+ // Raw image file types
+ public static final int FILE_TYPE_DNG = 300;
+ public static final int FILE_TYPE_CR2 = 301;
+ public static final int FILE_TYPE_NEF = 302;
+ public static final int FILE_TYPE_NRW = 303;
+ public static final int FILE_TYPE_ARW = 304;
+ public static final int FILE_TYPE_RW2 = 305;
+ public static final int FILE_TYPE_ORF = 306;
+ public static final int FILE_TYPE_RAF = 307;
+ public static final int FILE_TYPE_PEF = 308;
+ public static final int FILE_TYPE_SRW = 309;
+ private static final int FIRST_RAW_IMAGE_FILE_TYPE = FILE_TYPE_DNG;
+ private static final int LAST_RAW_IMAGE_FILE_TYPE = FILE_TYPE_SRW;
+
+ // Playlist file types
+ public static final int FILE_TYPE_M3U = 41;
+ public static final int FILE_TYPE_PLS = 42;
+ public static final int FILE_TYPE_WPL = 43;
+ public static final int FILE_TYPE_HTTPLIVE = 44;
+
+ private static final int FIRST_PLAYLIST_FILE_TYPE = FILE_TYPE_M3U;
+ private static final int LAST_PLAYLIST_FILE_TYPE = FILE_TYPE_HTTPLIVE;
+
+ // Drm file types
+ public static final int FILE_TYPE_FL = 51;
+ private static final int FIRST_DRM_FILE_TYPE = FILE_TYPE_FL;
+ private static final int LAST_DRM_FILE_TYPE = FILE_TYPE_FL;
+
+ // Other popular file types
+ public static final int FILE_TYPE_TEXT = 100;
+ public static final int FILE_TYPE_HTML = 101;
+ public static final int FILE_TYPE_PDF = 102;
+ public static final int FILE_TYPE_XML = 103;
+ public static final int FILE_TYPE_MS_WORD = 104;
+ public static final int FILE_TYPE_MS_EXCEL = 105;
+ public static final int FILE_TYPE_MS_POWERPOINT = 106;
+ public static final int FILE_TYPE_ZIP = 107;
+
+ public static class MediaFileType {
+ public final int fileType;
+ public final String mimeType;
+
+ MediaFileType(int fileType, String mimeType) {
+ this.fileType = fileType;
+ this.mimeType = mimeType;
+ }
+ }
+
+ private static final HashMap<String, MediaFileType> sFileTypeMap
+ = new HashMap<String, MediaFileType>();
+ private static final HashMap<String, Integer> sMimeTypeMap
+ = new HashMap<String, Integer>();
+ // maps file extension to MTP format code
+ private static final HashMap<String, Integer> sFileTypeToFormatMap
+ = new HashMap<String, Integer>();
+ // maps mime type to MTP format code
+ private static final HashMap<String, Integer> sMimeTypeToFormatMap
+ = new HashMap<String, Integer>();
+ // maps MTP format code to mime type
+ private static final HashMap<Integer, String> sFormatToMimeTypeMap
+ = new HashMap<Integer, String>();
+
+ static void addFileType(String extension, int fileType, String mimeType) {
+ sFileTypeMap.put(extension, new MediaFileType(fileType, mimeType));
+ sMimeTypeMap.put(mimeType, Integer.valueOf(fileType));
+ }
+
+ private static void addFileType(String extension, int fileType, String mimeType,
+ int mtpFormatCode, boolean primaryType) {
+ addFileType(extension, fileType, mimeType);
+ sFileTypeToFormatMap.put(extension, Integer.valueOf(mtpFormatCode));
+ sMimeTypeToFormatMap.put(mimeType, Integer.valueOf(mtpFormatCode));
+ if (primaryType) {
+ Preconditions.checkArgument(!sFormatToMimeTypeMap.containsKey(mtpFormatCode));
+ sFormatToMimeTypeMap.put(mtpFormatCode, mimeType);
+ }
+ }
+
+ private static boolean isWMAEnabled() {
+ List<AudioDecoder> decoders = DecoderCapabilities.getAudioDecoders();
+ int count = decoders.size();
+ for (int i = 0; i < count; i++) {
+ AudioDecoder decoder = decoders.get(i);
+ if (decoder == AudioDecoder.AUDIO_DECODER_WMA) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private static boolean isWMVEnabled() {
+ List<VideoDecoder> decoders = DecoderCapabilities.getVideoDecoders();
+ int count = decoders.size();
+ for (int i = 0; i < count; i++) {
+ VideoDecoder decoder = decoders.get(i);
+ if (decoder == VideoDecoder.VIDEO_DECODER_WMV) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ static {
+ addFileType("MP3", FILE_TYPE_MP3, "audio/mpeg", MtpConstants.FORMAT_MP3, true);
+ addFileType("MPGA", FILE_TYPE_MP3, "audio/mpeg", MtpConstants.FORMAT_MP3, false);
+ addFileType("M4A", FILE_TYPE_M4A, "audio/mp4", MtpConstants.FORMAT_MPEG, false);
+ addFileType("WAV", FILE_TYPE_WAV, "audio/x-wav", MtpConstants.FORMAT_WAV, true);
+ addFileType("AMR", FILE_TYPE_AMR, "audio/amr");
+ addFileType("AWB", FILE_TYPE_AWB, "audio/amr-wb");
+ if (isWMAEnabled()) {
+ addFileType("WMA", FILE_TYPE_WMA, "audio/x-ms-wma", MtpConstants.FORMAT_WMA, true);
+ }
+ addFileType("OGG", FILE_TYPE_OGG, "audio/ogg", MtpConstants.FORMAT_OGG, false);
+ addFileType("OGG", FILE_TYPE_OGG, "application/ogg", MtpConstants.FORMAT_OGG, true);
+ addFileType("OGA", FILE_TYPE_OGG, "application/ogg", MtpConstants.FORMAT_OGG, false);
+ addFileType("AAC", FILE_TYPE_AAC, "audio/aac", MtpConstants.FORMAT_AAC, true);
+ addFileType("AAC", FILE_TYPE_AAC, "audio/aac-adts", MtpConstants.FORMAT_AAC, false);
+ addFileType("MKA", FILE_TYPE_MKA, "audio/x-matroska");
+
+ addFileType("MID", FILE_TYPE_MID, "audio/midi");
+ addFileType("MIDI", FILE_TYPE_MID, "audio/midi");
+ addFileType("XMF", FILE_TYPE_MID, "audio/midi");
+ addFileType("RTTTL", FILE_TYPE_MID, "audio/midi");
+ addFileType("SMF", FILE_TYPE_SMF, "audio/sp-midi");
+ addFileType("IMY", FILE_TYPE_IMY, "audio/imelody");
+ addFileType("RTX", FILE_TYPE_MID, "audio/midi");
+ addFileType("OTA", FILE_TYPE_MID, "audio/midi");
+ addFileType("MXMF", FILE_TYPE_MID, "audio/midi");
+
+ addFileType("MPEG", FILE_TYPE_MP4, "video/mpeg", MtpConstants.FORMAT_MPEG, true);
+ addFileType("MPG", FILE_TYPE_MP4, "video/mpeg", MtpConstants.FORMAT_MPEG, false);
+ addFileType("MP4", FILE_TYPE_MP4, "video/mp4", MtpConstants.FORMAT_MPEG, false);
+ addFileType("M4V", FILE_TYPE_M4V, "video/mp4", MtpConstants.FORMAT_MPEG, false);
+ addFileType("MOV", FILE_TYPE_QT, "video/quicktime", MtpConstants.FORMAT_MPEG, false);
+
+ addFileType("3GP", FILE_TYPE_3GPP, "video/3gpp", MtpConstants.FORMAT_3GP_CONTAINER, true);
+ addFileType("3GPP", FILE_TYPE_3GPP, "video/3gpp", MtpConstants.FORMAT_3GP_CONTAINER, false);
+ addFileType("3G2", FILE_TYPE_3GPP2, "video/3gpp2", MtpConstants.FORMAT_3GP_CONTAINER, false);
+ addFileType("3GPP2", FILE_TYPE_3GPP2, "video/3gpp2", MtpConstants.FORMAT_3GP_CONTAINER, false);
+ addFileType("MKV", FILE_TYPE_MKV, "video/x-matroska");
+ addFileType("WEBM", FILE_TYPE_WEBM, "video/webm");
+ addFileType("TS", FILE_TYPE_MP2TS, "video/mp2ts");
+ addFileType("AVI", FILE_TYPE_AVI, "video/avi");
+
+ if (isWMVEnabled()) {
+ addFileType("WMV", FILE_TYPE_WMV, "video/x-ms-wmv", MtpConstants.FORMAT_WMV, true);
+ addFileType("ASF", FILE_TYPE_ASF, "video/x-ms-asf");
+ }
+
+ addFileType("JPG", FILE_TYPE_JPEG, "image/jpeg", MtpConstants.FORMAT_EXIF_JPEG, true);
+ addFileType("JPEG", FILE_TYPE_JPEG, "image/jpeg", MtpConstants.FORMAT_EXIF_JPEG, false);
+ addFileType("GIF", FILE_TYPE_GIF, "image/gif", MtpConstants.FORMAT_GIF, true);
+ addFileType("PNG", FILE_TYPE_PNG, "image/png", MtpConstants.FORMAT_PNG, true);
+ addFileType("BMP", FILE_TYPE_BMP, "image/x-ms-bmp", MtpConstants.FORMAT_BMP, true);
+ addFileType("WBMP", FILE_TYPE_WBMP, "image/vnd.wap.wbmp", MtpConstants.FORMAT_DEFINED, false);
+ addFileType("WEBP", FILE_TYPE_WEBP, "image/webp", MtpConstants.FORMAT_DEFINED, false);
+ addFileType("HEIC", FILE_TYPE_HEIF, "image/heif", MtpConstants.FORMAT_HEIF, true);
+ addFileType("HEIF", FILE_TYPE_HEIF, "image/heif", MtpConstants.FORMAT_HEIF, false);
+
+ addFileType("DNG", FILE_TYPE_DNG, "image/x-adobe-dng", MtpConstants.FORMAT_DNG, true);
+ addFileType("CR2", FILE_TYPE_CR2, "image/x-canon-cr2", MtpConstants.FORMAT_TIFF, false);
+ addFileType("NEF", FILE_TYPE_NEF, "image/x-nikon-nef", MtpConstants.FORMAT_TIFF_EP, false);
+ addFileType("NRW", FILE_TYPE_NRW, "image/x-nikon-nrw", MtpConstants.FORMAT_TIFF, false);
+ addFileType("ARW", FILE_TYPE_ARW, "image/x-sony-arw", MtpConstants.FORMAT_TIFF, false);
+ addFileType("RW2", FILE_TYPE_RW2, "image/x-panasonic-rw2", MtpConstants.FORMAT_TIFF, false);
+ addFileType("ORF", FILE_TYPE_ORF, "image/x-olympus-orf", MtpConstants.FORMAT_TIFF, false);
+ addFileType("RAF", FILE_TYPE_RAF, "image/x-fuji-raf", MtpConstants.FORMAT_DEFINED, false);
+ addFileType("PEF", FILE_TYPE_PEF, "image/x-pentax-pef", MtpConstants.FORMAT_TIFF, false);
+ addFileType("SRW", FILE_TYPE_SRW, "image/x-samsung-srw", MtpConstants.FORMAT_TIFF, false);
+
+ addFileType("M3U", FILE_TYPE_M3U, "audio/x-mpegurl", MtpConstants.FORMAT_M3U_PLAYLIST, true);
+ addFileType("M3U", FILE_TYPE_M3U, "application/x-mpegurl", MtpConstants.FORMAT_M3U_PLAYLIST, false);
+ addFileType("PLS", FILE_TYPE_PLS, "audio/x-scpls", MtpConstants.FORMAT_PLS_PLAYLIST, true);
+ addFileType("WPL", FILE_TYPE_WPL, "application/vnd.ms-wpl", MtpConstants.FORMAT_WPL_PLAYLIST, true);
+ addFileType("M3U8", FILE_TYPE_HTTPLIVE, "application/vnd.apple.mpegurl");
+ addFileType("M3U8", FILE_TYPE_HTTPLIVE, "audio/mpegurl");
+ addFileType("M3U8", FILE_TYPE_HTTPLIVE, "audio/x-mpegurl");
+
+ addFileType("FL", FILE_TYPE_FL, "application/x-android-drm-fl");
+
+ addFileType("TXT", FILE_TYPE_TEXT, "text/plain", MtpConstants.FORMAT_TEXT, true);
+ addFileType("HTM", FILE_TYPE_HTML, "text/html", MtpConstants.FORMAT_HTML, true);
+ addFileType("HTML", FILE_TYPE_HTML, "text/html", MtpConstants.FORMAT_HTML, false);
+ addFileType("PDF", FILE_TYPE_PDF, "application/pdf");
+ addFileType("DOC", FILE_TYPE_MS_WORD, "application/msword", MtpConstants.FORMAT_MS_WORD_DOCUMENT, true);
+ addFileType("XLS", FILE_TYPE_MS_EXCEL, "application/vnd.ms-excel", MtpConstants.FORMAT_MS_EXCEL_SPREADSHEET, true);
+ addFileType("PPT", FILE_TYPE_MS_POWERPOINT, "application/vnd.ms-powerpoint", MtpConstants.FORMAT_MS_POWERPOINT_PRESENTATION, true);
+ addFileType("FLAC", FILE_TYPE_FLAC, "audio/flac", MtpConstants.FORMAT_FLAC, true);
+ addFileType("ZIP", FILE_TYPE_ZIP, "application/zip");
+ addFileType("MPG", FILE_TYPE_MP2PS, "video/mp2p");
+ addFileType("MPEG", FILE_TYPE_MP2PS, "video/mp2p");
+ }
+
+ public static boolean isAudioFileType(int fileType) {
+ return ((fileType >= FIRST_AUDIO_FILE_TYPE &&
+ fileType <= LAST_AUDIO_FILE_TYPE) ||
+ (fileType >= FIRST_MIDI_FILE_TYPE &&
+ fileType <= LAST_MIDI_FILE_TYPE));
+ }
+
+ public static boolean isVideoFileType(int fileType) {
+ return (fileType >= FIRST_VIDEO_FILE_TYPE &&
+ fileType <= LAST_VIDEO_FILE_TYPE)
+ || (fileType >= FIRST_VIDEO_FILE_TYPE2 &&
+ fileType <= LAST_VIDEO_FILE_TYPE2);
+ }
+
+ public static boolean isImageFileType(int fileType) {
+ return (fileType >= FIRST_IMAGE_FILE_TYPE &&
+ fileType <= LAST_IMAGE_FILE_TYPE)
+ || (fileType >= FIRST_RAW_IMAGE_FILE_TYPE &&
+ fileType <= LAST_RAW_IMAGE_FILE_TYPE);
+ }
+
+ public static boolean isRawImageFileType(int fileType) {
+ return (fileType >= FIRST_RAW_IMAGE_FILE_TYPE &&
+ fileType <= LAST_RAW_IMAGE_FILE_TYPE);
+ }
+
+ public static boolean isPlayListFileType(int fileType) {
+ return (fileType >= FIRST_PLAYLIST_FILE_TYPE &&
+ fileType <= LAST_PLAYLIST_FILE_TYPE);
+ }
+
+ public static boolean isDrmFileType(int fileType) {
+ return (fileType >= FIRST_DRM_FILE_TYPE &&
+ fileType <= LAST_DRM_FILE_TYPE);
+ }
+
+ public static MediaFileType getFileType(String path) {
+ int lastDot = path.lastIndexOf('.');
+ if (lastDot < 0)
+ return null;
+ return sFileTypeMap.get(path.substring(lastDot + 1).toUpperCase(Locale.ROOT));
+ }
+
+ public static boolean isMimeTypeMedia(String mimeType) {
+ int fileType = getFileTypeForMimeType(mimeType);
+ return isAudioFileType(fileType) || isVideoFileType(fileType)
+ || isImageFileType(fileType) || isPlayListFileType(fileType);
+ }
+
+ // generates a title based on file name
+ public static String getFileTitle(String path) {
+ // extract file name after last slash
+ int lastSlash = path.lastIndexOf('/');
+ if (lastSlash >= 0) {
+ lastSlash++;
+ if (lastSlash < path.length()) {
+ path = path.substring(lastSlash);
+ }
+ }
+ // truncate the file extension (if any)
+ int lastDot = path.lastIndexOf('.');
+ if (lastDot > 0) {
+ path = path.substring(0, lastDot);
+ }
+ return path;
+ }
+
+ public static int getFileTypeForMimeType(String mimeType) {
+ Integer value = sMimeTypeMap.get(mimeType);
+ return (value == null ? 0 : value.intValue());
+ }
+
+ public static String getMimeTypeForFile(String path) {
+ MediaFileType mediaFileType = getFileType(path);
+ return (mediaFileType == null ? null : mediaFileType.mimeType);
+ }
+
+ public static int getFormatCode(String fileName, String mimeType) {
+ if (mimeType != null) {
+ Integer value = sMimeTypeToFormatMap.get(mimeType);
+ if (value != null) {
+ return value.intValue();
+ }
+ }
+ int lastDot = fileName.lastIndexOf('.');
+ if (lastDot > 0) {
+ String extension = fileName.substring(lastDot + 1).toUpperCase(Locale.ROOT);
+ Integer value = sFileTypeToFormatMap.get(extension);
+ if (value != null) {
+ return value.intValue();
+ }
+ }
+ return MtpConstants.FORMAT_UNDEFINED;
+ }
+
+ public static String getMimeTypeForFormatCode(int formatCode) {
+ return sFormatToMimeTypeMap.get(formatCode);
+ }
+}
diff --git a/android/media/MediaFormat.java b/android/media/MediaFormat.java
new file mode 100644
index 00000000..ed5f7d84
--- /dev/null
+++ b/android/media/MediaFormat.java
@@ -0,0 +1,1006 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Encapsulates the information describing the format of media data,
+ * be it audio or video.
+ *
+ * The format of the media data is specified as string/value pairs.
+ *
+ * Keys common to all audio/video formats, <b>all keys not marked optional are mandatory</b>:
+ *
+ * <table>
+ * <tr><th>Name</th><th>Value Type</th><th>Description</th></tr>
+ * <tr><td>{@link #KEY_MIME}</td><td>String</td><td>The type of the format.</td></tr>
+ * <tr><td>{@link #KEY_MAX_INPUT_SIZE}</td><td>Integer</td><td>optional, maximum size of a buffer of input data</td></tr>
+ * <tr><td>{@link #KEY_BIT_RATE}</td><td>Integer</td><td><b>encoder-only</b>, desired bitrate in bits/second</td></tr>
+ * </table>
+ *
+ * Video formats have the following keys:
+ * <table>
+ * <tr><th>Name</th><th>Value Type</th><th>Description</th></tr>
+ * <tr><td>{@link #KEY_WIDTH}</td><td>Integer</td><td></td></tr>
+ * <tr><td>{@link #KEY_HEIGHT}</td><td>Integer</td><td></td></tr>
+ * <tr><td>{@link #KEY_COLOR_FORMAT}</td><td>Integer</td><td>set by the user
+ * for encoders, readable in the output format of decoders</b></td></tr>
+ * <tr><td>{@link #KEY_FRAME_RATE}</td><td>Integer or Float</td><td>required for <b>encoders</b>,
+ * optional for <b>decoders</b></td></tr>
+ * <tr><td>{@link #KEY_CAPTURE_RATE}</td><td>Integer</td><td></td></tr>
+ * <tr><td>{@link #KEY_I_FRAME_INTERVAL}</td><td>Integer (or Float)</td><td><b>encoder-only</b>,
+ * time-interval between key frames.
+ * Float support added in {@link android.os.Build.VERSION_CODES#N_MR1}</td></tr>
+ * <tr><td>{@link #KEY_INTRA_REFRESH_PERIOD}</td><td>Integer</td><td><b>encoder-only</b>, optional</td></tr>
+ * <tr><td>{@link #KEY_LATENCY}</td><td>Integer</td><td><b>encoder-only</b>, optional</td></tr>
+ * <tr><td>{@link #KEY_MAX_WIDTH}</td><td>Integer</td><td><b>decoder-only</b>, optional, max-resolution width</td></tr>
+ * <tr><td>{@link #KEY_MAX_HEIGHT}</td><td>Integer</td><td><b>decoder-only</b>, optional, max-resolution height</td></tr>
+ * <tr><td>{@link #KEY_REPEAT_PREVIOUS_FRAME_AFTER}</td><td>Long</td><td><b>encoder in surface-mode
+ * only</b>, optional</td></tr>
+ * <tr><td>{@link #KEY_PUSH_BLANK_BUFFERS_ON_STOP}</td><td>Integer(1)</td><td><b>decoder rendering
+ * to a surface only</b>, optional</td></tr>
+ * <tr><td>{@link #KEY_TEMPORAL_LAYERING}</td><td>String</td><td><b>encoder only</b>, optional,
+ * temporal-layering schema</td></tr>
+ * </table>
+ * Specify both {@link #KEY_MAX_WIDTH} and {@link #KEY_MAX_HEIGHT} to enable
+ * adaptive playback (seamless resolution change) for a video decoder that
+ * supports it ({@link MediaCodecInfo.CodecCapabilities#FEATURE_AdaptivePlayback}).
+ * The values are used as hints for the codec: they are the maximum expected
+ * resolution to prepare for. Depending on codec support, preparing for larger
+ * maximum resolution may require more memory even if that resolution is never
+ * reached. These fields have no effect for codecs that do not support adaptive
+ * playback.<br /><br />
+ *
+ * Audio formats have the following keys:
+ * <table>
+ * <tr><th>Name</th><th>Value Type</th><th>Description</th></tr>
+ * <tr><td>{@link #KEY_CHANNEL_COUNT}</td><td>Integer</td><td></td></tr>
+ * <tr><td>{@link #KEY_SAMPLE_RATE}</td><td>Integer</td><td></td></tr>
+ * <tr><td>{@link #KEY_PCM_ENCODING}</td><td>Integer</td><td>optional</td></tr>
+ * <tr><td>{@link #KEY_IS_ADTS}</td><td>Integer</td><td>optional, if <em>decoding</em> AAC audio content, setting this key to 1 indicates that each audio frame is prefixed by the ADTS header.</td></tr>
+ * <tr><td>{@link #KEY_AAC_PROFILE}</td><td>Integer</td><td><b>encoder-only</b>, optional, if content is AAC audio, specifies the desired profile.</td></tr>
+ * <tr><td>{@link #KEY_AAC_SBR_MODE}</td><td>Integer</td><td><b>encoder-only</b>, optional, if content is AAC audio, specifies the desired SBR mode.</td></tr>
+ * <tr><td>{@link #KEY_AAC_DRC_TARGET_REFERENCE_LEVEL}</td><td>Integer</td><td><b>decoder-only</b>, optional, if content is AAC audio, specifies the target reference level.</td></tr>
+ * <tr><td>{@link #KEY_AAC_ENCODED_TARGET_LEVEL}</td><td>Integer</td><td><b>decoder-only</b>, optional, if content is AAC audio, specifies the target reference level used at encoder.</td></tr>
+ * <tr><td>{@link #KEY_AAC_DRC_BOOST_FACTOR}</td><td>Integer</td><td><b>decoder-only</b>, optional, if content is AAC audio, specifies the DRC boost factor.</td></tr>
+ * <tr><td>{@link #KEY_AAC_DRC_ATTENUATION_FACTOR}</td><td>Integer</td><td><b>decoder-only</b>, optional, if content is AAC audio, specifies the DRC attenuation factor.</td></tr>
+ * <tr><td>{@link #KEY_AAC_DRC_HEAVY_COMPRESSION}</td><td>Integer</td><td><b>decoder-only</b>, optional, if content is AAC audio, specifies whether to use heavy compression.</td></tr>
+ * <tr><td>{@link #KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT}</td><td>Integer</td><td><b>decoder-only</b>, optional, if content is AAC audio, specifies the maximum number of channels the decoder outputs.</td></tr>
+ * <tr><td>{@link #KEY_CHANNEL_MASK}</td><td>Integer</td><td>optional, a mask of audio channel assignments</td></tr>
+ * <tr><td>{@link #KEY_FLAC_COMPRESSION_LEVEL}</td><td>Integer</td><td><b>encoder-only</b>, optional, if content is FLAC audio, specifies the desired compression level.</td></tr>
+ * </table>
+ *
+ * Subtitle formats have the following keys:
+ * <table>
+ * <tr><td>{@link #KEY_MIME}</td><td>String</td><td>The type of the format.</td></tr>
+ * <tr><td>{@link #KEY_LANGUAGE}</td><td>String</td><td>The language of the content.</td></tr>
+ * </table>
+ */
+public final class MediaFormat {
+ public static final String MIMETYPE_VIDEO_VP8 = "video/x-vnd.on2.vp8";
+ public static final String MIMETYPE_VIDEO_VP9 = "video/x-vnd.on2.vp9";
+ public static final String MIMETYPE_VIDEO_AVC = "video/avc";
+ public static final String MIMETYPE_VIDEO_HEVC = "video/hevc";
+ public static final String MIMETYPE_VIDEO_MPEG4 = "video/mp4v-es";
+ public static final String MIMETYPE_VIDEO_H263 = "video/3gpp";
+ public static final String MIMETYPE_VIDEO_MPEG2 = "video/mpeg2";
+ public static final String MIMETYPE_VIDEO_RAW = "video/raw";
+ public static final String MIMETYPE_VIDEO_DOLBY_VISION = "video/dolby-vision";
+ public static final String MIMETYPE_VIDEO_SCRAMBLED = "video/scrambled";
+
+ public static final String MIMETYPE_AUDIO_AMR_NB = "audio/3gpp";
+ public static final String MIMETYPE_AUDIO_AMR_WB = "audio/amr-wb";
+ public static final String MIMETYPE_AUDIO_MPEG = "audio/mpeg";
+ public static final String MIMETYPE_AUDIO_AAC = "audio/mp4a-latm";
+ public static final String MIMETYPE_AUDIO_QCELP = "audio/qcelp";
+ public static final String MIMETYPE_AUDIO_VORBIS = "audio/vorbis";
+ public static final String MIMETYPE_AUDIO_OPUS = "audio/opus";
+ public static final String MIMETYPE_AUDIO_G711_ALAW = "audio/g711-alaw";
+ public static final String MIMETYPE_AUDIO_G711_MLAW = "audio/g711-mlaw";
+ public static final String MIMETYPE_AUDIO_RAW = "audio/raw";
+ public static final String MIMETYPE_AUDIO_FLAC = "audio/flac";
+ public static final String MIMETYPE_AUDIO_MSGSM = "audio/gsm";
+ public static final String MIMETYPE_AUDIO_AC3 = "audio/ac3";
+ public static final String MIMETYPE_AUDIO_EAC3 = "audio/eac3";
+ public static final String MIMETYPE_AUDIO_SCRAMBLED = "audio/scrambled";
+
+ /**
+ * MIME type for WebVTT subtitle data.
+ */
+ public static final String MIMETYPE_TEXT_VTT = "text/vtt";
+
+ /**
+ * MIME type for CEA-608 closed caption data.
+ */
+ public static final String MIMETYPE_TEXT_CEA_608 = "text/cea-608";
+
+ private Map<String, Object> mMap;
+
+ /**
+ * A key describing the mime type of the MediaFormat.
+ * The associated value is a string.
+ */
+ public static final String KEY_MIME = "mime";
+
+ /**
+ * A key describing the language of the content, using either ISO 639-1
+ * or 639-2/T codes. The associated value is a string.
+ */
+ public static final String KEY_LANGUAGE = "language";
+
+ /**
+ * A key describing the sample rate of an audio format.
+ * The associated value is an integer
+ */
+ public static final String KEY_SAMPLE_RATE = "sample-rate";
+
+ /**
+ * A key describing the number of channels in an audio format.
+ * The associated value is an integer
+ */
+ public static final String KEY_CHANNEL_COUNT = "channel-count";
+
+ /**
+ * A key describing the width of the content in a video format.
+ * The associated value is an integer
+ */
+ public static final String KEY_WIDTH = "width";
+
+ /**
+ * A key describing the height of the content in a video format.
+ * The associated value is an integer
+ */
+ public static final String KEY_HEIGHT = "height";
+
+ /**
+ * A key describing the maximum expected width of the content in a video
+ * decoder format, in case there are resolution changes in the video content.
+ * The associated value is an integer
+ */
+ public static final String KEY_MAX_WIDTH = "max-width";
+
+ /**
+ * A key describing the maximum expected height of the content in a video
+ * decoder format, in case there are resolution changes in the video content.
+ * The associated value is an integer
+ */
+ public static final String KEY_MAX_HEIGHT = "max-height";
+
+ /** A key describing the maximum size in bytes of a buffer of data
+ * described by this MediaFormat.
+ * The associated value is an integer
+ */
+ public static final String KEY_MAX_INPUT_SIZE = "max-input-size";
+
+ /**
+ * A key describing the average bitrate in bits/sec.
+ * The associated value is an integer
+ */
+ public static final String KEY_BIT_RATE = "bitrate";
+
+ /**
+ * A key describing the max bitrate in bits/sec.
+ * This is usually over a one-second sliding window (e.g. over any window of one second).
+ * The associated value is an integer
+ * @hide
+ */
+ public static final String KEY_MAX_BIT_RATE = "max-bitrate";
+
+ /**
+ * A key describing the color format of the content in a video format.
+ * Constants are declared in {@link android.media.MediaCodecInfo.CodecCapabilities}.
+ */
+ public static final String KEY_COLOR_FORMAT = "color-format";
+
+ /**
+ * A key describing the frame rate of a video format in frames/sec.
+ * The associated value is normally an integer when the value is used by the platform,
+ * but video codecs also accept float configuration values.
+ * Specifically, {@link MediaExtractor#getTrackFormat MediaExtractor} provides an integer
+ * value corresponding to the frame rate information of the track if specified and non-zero.
+ * Otherwise, this key is not present. {@link MediaCodec#configure MediaCodec} accepts both
+ * float and integer values. This represents the desired operating frame rate if the
+ * {@link #KEY_OPERATING_RATE} is not present and {@link #KEY_PRIORITY} is {@code 0}
+ * (realtime). For video encoders this value corresponds to the intended frame rate,
+ * although encoders are expected
+ * to support variable frame rate based on {@link MediaCodec.BufferInfo#presentationTimeUs
+ * buffer timestamp}. This key is not used in the {@code MediaCodec}
+ * {@link MediaCodec#getInputFormat input}/{@link MediaCodec#getOutputFormat output} formats,
+ * nor by {@link MediaMuxer#addTrack MediaMuxer}.
+ */
+ public static final String KEY_FRAME_RATE = "frame-rate";
+
+ /**
+ * A key describing the raw audio sample encoding/format.
+ *
+ * <p>The associated value is an integer, using one of the
+ * {@link AudioFormat}.ENCODING_PCM_ values.</p>
+ *
+ * <p>This is an optional key for audio decoders and encoders specifying the
+ * desired raw audio sample format during {@link MediaCodec#configure
+ * MediaCodec.configure(&hellip;)} call. Use {@link MediaCodec#getInputFormat
+ * MediaCodec.getInput}/{@link MediaCodec#getOutputFormat OutputFormat(&hellip;)}
+ * to confirm the actual format. For the PCM decoder this key specifies both
+ * input and output sample encodings.</p>
+ *
+ * <p>This key is also used by {@link MediaExtractor} to specify the sample
+ * format of audio data, if it is specified.</p>
+ *
+ * <p>If this key is missing, the raw audio sample format is signed 16-bit short.</p>
+ */
+ public static final String KEY_PCM_ENCODING = "pcm-encoding";
+
+ /**
+ * A key describing the capture rate of a video format in frames/sec.
+ * <p>
+ * When capture rate is different than the frame rate, it means that the
+ * video is acquired at a different rate than the playback, which produces
+ * slow motion or timelapse effect during playback. Application can use the
+ * value of this key to tell the relative speed ratio between capture and
+ * playback rates when the video was recorded.
+ * </p>
+ * <p>
+ * The associated value is an integer or a float.
+ * </p>
+ */
+ public static final String KEY_CAPTURE_RATE = "capture-rate";
+
+ /**
+ * A key describing the frequency of key frames expressed in seconds between key frames.
+ * <p>
+ * This key is used by video encoders.
+ * A negative value means no key frames are requested after the first frame.
+ * A zero value means a stream containing all key frames is requested.
+ * <p class=note>
+ * Most video encoders will convert this value of the number of non-key-frames between
+ * key-frames, using the {@linkplain #KEY_FRAME_RATE frame rate} information; therefore,
+ * if the actual frame rate differs (e.g. input frames are dropped or the frame rate
+ * changes), the <strong>time interval</strong> between key frames will not be the
+ * configured value.
+ * <p>
+ * The associated value is an integer (or float since
+ * {@link android.os.Build.VERSION_CODES#N_MR1}).
+ */
+ public static final String KEY_I_FRAME_INTERVAL = "i-frame-interval";
+
+ /**
+ * An optional key describing the period of intra refresh in frames. This is an
+ * optional parameter that applies only to video encoders. If encoder supports it
+ * ({@link MediaCodecInfo.CodecCapabilities#FEATURE_IntraRefresh}), the whole
+ * frame is completely refreshed after the specified period. Also for each frame,
+ * a fix subset of macroblocks must be intra coded which leads to more constant bitrate
+ * than inserting a key frame. This key is recommended for video streaming applications
+ * as it provides low-delay and good error-resilience. This key is ignored if the
+ * video encoder does not support the intra refresh feature. Use the output format to
+ * verify that this feature was enabled.
+ * The associated value is an integer.
+ */
+ public static final String KEY_INTRA_REFRESH_PERIOD = "intra-refresh-period";
+
+ /**
+ * A key describing the temporal layering schema. This is an optional parameter
+ * that applies only to video encoders. Use {@link MediaCodec#getOutputFormat}
+ * after {@link MediaCodec#configure configure} to query if the encoder supports
+ * the desired schema. Supported values are {@code webrtc.vp8.N-layer},
+ * {@code android.generic.N}, {@code android.generic.N+M} and {@code none}, where
+ * {@code N} denotes the total number of non-bidirectional layers (which must be at least 1)
+ * and {@code M} denotes the total number of bidirectional layers (which must be non-negative).
+ * <p class=note>{@code android.generic.*} schemas have been added in {@link
+ * android.os.Build.VERSION_CODES#N_MR1}.
+ * <p>
+ * The encoder may support fewer temporal layers, in which case the output format
+ * will contain the configured schema. If the encoder does not support temporal
+ * layering, the output format will not have an entry with this key.
+ * The associated value is a string.
+ */
+ public static final String KEY_TEMPORAL_LAYERING = "ts-schema";
+
+ /**
+ * A key describing the stride of the video bytebuffer layout.
+ * Stride (or row increment) is the difference between the index of a pixel
+ * and that of the pixel directly underneath. For YUV 420 formats, the
+ * stride corresponds to the Y plane; the stride of the U and V planes can
+ * be calculated based on the color format, though it is generally undefined
+ * and depends on the device and release.
+ * The associated value is an integer, representing number of bytes.
+ */
+ public static final String KEY_STRIDE = "stride";
+
+ /**
+ * A key describing the plane height of a multi-planar (YUV) video bytebuffer layout.
+ * Slice height (or plane height/vertical stride) is the number of rows that must be skipped
+ * to get from the top of the Y plane to the top of the U plane in the bytebuffer. In essence
+ * the offset of the U plane is sliceHeight * stride. The height of the U/V planes
+ * can be calculated based on the color format, though it is generally undefined
+ * and depends on the device and release.
+ * The associated value is an integer, representing number of rows.
+ */
+ public static final String KEY_SLICE_HEIGHT = "slice-height";
+
+ /**
+ * Applies only when configuring a video encoder in "surface-input" mode.
+ * The associated value is a long and gives the time in microseconds
+ * after which the frame previously submitted to the encoder will be
+ * repeated (once) if no new frame became available since.
+ */
+ public static final String KEY_REPEAT_PREVIOUS_FRAME_AFTER
+ = "repeat-previous-frame-after";
+
+ /**
+ * If specified when configuring a video decoder rendering to a surface,
+ * causes the decoder to output "blank", i.e. black frames to the surface
+ * when stopped to clear out any previously displayed contents.
+ * The associated value is an integer of value 1.
+ */
+ public static final String KEY_PUSH_BLANK_BUFFERS_ON_STOP
+ = "push-blank-buffers-on-shutdown";
+
+ /**
+ * A key describing the duration (in microseconds) of the content.
+ * The associated value is a long.
+ */
+ public static final String KEY_DURATION = "durationUs";
+
+ /**
+ * A key mapping to a value of 1 if the content is AAC audio and
+ * audio frames are prefixed with an ADTS header.
+ * The associated value is an integer (0 or 1).
+ * This key is only supported when _decoding_ content, it cannot
+ * be used to configure an encoder to emit ADTS output.
+ */
+ public static final String KEY_IS_ADTS = "is-adts";
+
+ /**
+ * A key describing the channel composition of audio content. This mask
+ * is composed of bits drawn from channel mask definitions in {@link android.media.AudioFormat}.
+ * The associated value is an integer.
+ */
+ public static final String KEY_CHANNEL_MASK = "channel-mask";
+
+ /**
+ * A key describing the AAC profile to be used (AAC audio formats only).
+ * Constants are declared in {@link android.media.MediaCodecInfo.CodecProfileLevel}.
+ */
+ public static final String KEY_AAC_PROFILE = "aac-profile";
+
+ /**
+ * A key describing the AAC SBR mode to be used (AAC audio formats only).
+ * The associated value is an integer and can be set to following values:
+ * <ul>
+ * <li>0 - no SBR should be applied</li>
+ * <li>1 - single rate SBR</li>
+ * <li>2 - double rate SBR</li>
+ * </ul>
+ * Note: If this key is not defined the default SRB mode for the desired AAC profile will
+ * be used.
+ * <p>This key is only used during encoding.
+ */
+ public static final String KEY_AAC_SBR_MODE = "aac-sbr-mode";
+
+ /**
+ * A key describing the maximum number of channels that can be output by the AAC decoder.
+ * By default, the decoder will output the same number of channels as present in the encoded
+ * stream, if supported. Set this value to limit the number of output channels, and use
+ * the downmix information in the stream, if available.
+ * <p>Values larger than the number of channels in the content to decode are ignored.
+ * <p>This key is only used during decoding.
+ */
+ public static final String KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT = "aac-max-output-channel_count";
+
+ /**
+ * A key describing a gain to be applied so that the output loudness matches the
+ * Target Reference Level. This is typically used to normalize loudness across program items.
+ * The gain is derived as the difference between the Target Reference Level and the
+ * Program Reference Level. The latter can be given in the bitstream and indicates the actual
+ * loudness value of the program item.
+ * <p>The value is given as an integer value between
+ * 0 and 127, and is calculated as -0.25 * Target Reference Level in dBFS.
+ * Therefore, it represents the range of Full Scale (0 dBFS) to -31.75 dBFS.
+ * <p>This key is only used during decoding.
+ */
+ public static final String KEY_AAC_DRC_TARGET_REFERENCE_LEVEL = "aac-target-ref-level";
+
+ /**
+ * A key describing the target reference level that was assumed at the encoder for
+ * calculation of attenuation gains for clipping prevention. This information can be provided
+ * if it is known, otherwise a worst-case assumption is used.
+ * <p>The value is given as an integer value between
+ * 0 and 127, and is calculated as -0.25 * Target Reference Level in dBFS.
+ * Therefore, it represents the range of Full Scale (0 dBFS) to -31.75 dBFS.
+ * The default value is the worst-case assumption of 127.
+ * <p>The value is ignored when heavy compression is used (see
+ * {@link #KEY_AAC_DRC_HEAVY_COMPRESSION}).
+ * <p>This key is only used during decoding.
+ */
+ public static final String KEY_AAC_ENCODED_TARGET_LEVEL = "aac-encoded-target-level";
+
+ /**
+ * A key describing the boost factor allowing to adapt the dynamics of the output to the
+ * actual listening requirements. This relies on DRC gain sequences that can be transmitted in
+ * the encoded bitstream to be able to reduce the dynamics of the output signal upon request.
+ * This factor enables the user to select how much of the gains are applied.
+ * <p>Positive gains (boost) and negative gains (attenuation, see
+ * {@link #KEY_AAC_DRC_ATTENUATION_FACTOR}) can be controlled separately for a better match
+ * to different use-cases.
+ * <p>Typically, attenuation gains are sent for loud signal segments, and boost gains are sent
+ * for soft signal segments. If the output is listened to in a noisy environment, for example,
+ * the boost factor is used to enable the positive gains, i.e. to amplify soft signal segments
+ * beyond the noise floor. But for listening late at night, the attenuation
+ * factor is used to enable the negative gains, to prevent loud signal from surprising
+ * the listener. In applications which generally need a low dynamic range, both the boost factor
+ * and the attenuation factor are used in order to enable all DRC gains.
+ * <p>In order to prevent clipping, it is also recommended to apply the attenuation factors
+ * in case of a downmix and/or loudness normalization to high target reference levels.
+ * <p>Both the boost and the attenuation factor parameters are given as integer values
+ * between 0 and 127, representing the range of the factor of 0 (i.e. don't apply)
+ * to 1 (i.e. fully apply boost/attenuation factors respectively).
+ * <p>This key is only used during decoding.
+ */
+ public static final String KEY_AAC_DRC_BOOST_FACTOR = "aac-drc-boost-level";
+
+ /**
+ * A key describing the attenuation factor allowing to adapt the dynamics of the output to the
+ * actual listening requirements.
+ * See {@link #KEY_AAC_DRC_BOOST_FACTOR} for a description of the role of this attenuation
+ * factor and the value range.
+ * <p>This key is only used during decoding.
+ */
+ public static final String KEY_AAC_DRC_ATTENUATION_FACTOR = "aac-drc-cut-level";
+
+ /**
+ * A key describing the selection of the heavy compression profile for DRC.
+ * Two separate DRC gain sequences can be transmitted in one bitstream: MPEG-4 DRC light
+ * compression, and DVB-specific heavy compression. When selecting the application of the heavy
+ * compression, one of the sequences is selected:
+ * <ul>
+ * <li>0 enables light compression,</li>
+ * <li>1 enables heavy compression instead.
+ * </ul>
+ * Note that only light compression offers the features of scaling of DRC gains
+ * (see {@link #KEY_AAC_DRC_BOOST_FACTOR} and {@link #KEY_AAC_DRC_ATTENUATION_FACTOR} for the
+ * boost and attenuation factors, and frequency-selective (multiband) DRC.
+ * Light compression usually contains clipping prevention for stereo downmixing while heavy
+ * compression, if additionally provided in the bitstream, is usually stronger, and contains
+ * clipping prevention for stereo and mono downmixing.
+ * <p>The default is light compression.
+ * <p>This key is only used during decoding.
+ */
+ public static final String KEY_AAC_DRC_HEAVY_COMPRESSION = "aac-drc-heavy-compression";
+
+ /**
+ * A key describing the FLAC compression level to be used (FLAC audio format only).
+ * The associated value is an integer ranging from 0 (fastest, least compression)
+ * to 8 (slowest, most compression).
+ */
+ public static final String KEY_FLAC_COMPRESSION_LEVEL = "flac-compression-level";
+
+ /**
+ * A key describing the encoding complexity.
+ * The associated value is an integer. These values are device and codec specific,
+ * but lower values generally result in faster and/or less power-hungry encoding.
+ *
+ * @see MediaCodecInfo.EncoderCapabilities#getComplexityRange()
+ */
+ public static final String KEY_COMPLEXITY = "complexity";
+
+ /**
+ * A key describing the desired encoding quality.
+ * The associated value is an integer. This key is only supported for encoders
+ * that are configured in constant-quality mode. These values are device and
+ * codec specific, but lower values generally result in more efficient
+ * (smaller-sized) encoding.
+ *
+ * @hide
+ *
+ * @see MediaCodecInfo.EncoderCapabilities#getQualityRange()
+ */
+ public static final String KEY_QUALITY = "quality";
+
+ /**
+ * A key describing the desired codec priority.
+ * <p>
+ * The associated value is an integer. Higher value means lower priority.
+ * <p>
+ * Currently, only two levels are supported:<br>
+ * 0: realtime priority - meaning that the codec shall support the given
+ * performance configuration (e.g. framerate) at realtime. This should
+ * only be used by media playback, capture, and possibly by realtime
+ * communication scenarios if best effort performance is not suitable.<br>
+ * 1: non-realtime priority (best effort).
+ * <p>
+ * This is a hint used at codec configuration and resource planning - to understand
+ * the realtime requirements of the application; however, due to the nature of
+ * media components, performance is not guaranteed.
+ *
+ */
+ public static final String KEY_PRIORITY = "priority";
+
+ /**
+ * A key describing the desired operating frame rate for video or sample rate for audio
+ * that the codec will need to operate at.
+ * <p>
+ * The associated value is an integer or a float representing frames-per-second or
+ * samples-per-second
+ * <p>
+ * This is used for cases like high-speed/slow-motion video capture, where the video encoder
+ * format contains the target playback rate (e.g. 30fps), but the component must be able to
+ * handle the high operating capture rate (e.g. 240fps).
+ * <p>
+ * This rate will be used by codec for resource planning and setting the operating points.
+ *
+ */
+ public static final String KEY_OPERATING_RATE = "operating-rate";
+
+ /**
+ * A key describing the desired profile to be used by an encoder.
+ * The associated value is an integer.
+ * Constants are declared in {@link MediaCodecInfo.CodecProfileLevel}.
+ * This key is used as a hint, and is only supported for codecs
+ * that specify a profile. Note: Codecs are free to use all the available
+ * coding tools at the specified profile.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#profileLevels
+ */
+ public static final String KEY_PROFILE = "profile";
+
+ /**
+ * A key describing the desired profile to be used by an encoder.
+ * The associated value is an integer.
+ * Constants are declared in {@link MediaCodecInfo.CodecProfileLevel}.
+ * This key is used as a further hint when specifying a desired profile,
+ * and is only supported for codecs that specify a level.
+ * <p>
+ * This key is ignored if the {@link #KEY_PROFILE profile} is not specified.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#profileLevels
+ */
+ public static final String KEY_LEVEL = "level";
+
+ /**
+ * An optional key describing the desired encoder latency in frames. This is an optional
+ * parameter that applies only to video encoders. If encoder supports it, it should ouput
+ * at least one output frame after being queued the specified number of frames. This key
+ * is ignored if the video encoder does not support the latency feature. Use the output
+ * format to verify that this feature was enabled and the actual value used by the encoder.
+ * <p>
+ * If the key is not specified, the default latency will be implenmentation specific.
+ * The associated value is an integer.
+ */
+ public static final String KEY_LATENCY = "latency";
+
+ /**
+ * A key describing the desired clockwise rotation on an output surface.
+ * This key is only used when the codec is configured using an output surface.
+ * The associated value is an integer, representing degrees. Supported values
+ * are 0, 90, 180 or 270. This is an optional field; if not specified, rotation
+ * defaults to 0.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#profileLevels
+ */
+ public static final String KEY_ROTATION = "rotation-degrees";
+
+ /**
+ * A key describing the desired bitrate mode to be used by an encoder.
+ * Constants are declared in {@link MediaCodecInfo.CodecCapabilities}.
+ *
+ * @see MediaCodecInfo.EncoderCapabilities#isBitrateModeSupported(int)
+ */
+ public static final String KEY_BITRATE_MODE = "bitrate-mode";
+
+ /**
+ * A key describing the audio session ID of the AudioTrack associated
+ * to a tunneled video codec.
+ * The associated value is an integer.
+ *
+ * @see MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback
+ */
+ public static final String KEY_AUDIO_SESSION_ID = "audio-session-id";
+
+ /**
+ * A key for boolean AUTOSELECT behavior for the track. Tracks with AUTOSELECT=true
+ * are considered when automatically selecting a track without specific user
+ * choice, based on the current locale.
+ * This is currently only used for subtitle tracks, when the user selected
+ * 'Default' for the captioning locale.
+ * The associated value is an integer, where non-0 means TRUE. This is an optional
+ * field; if not specified, AUTOSELECT defaults to TRUE.
+ */
+ public static final String KEY_IS_AUTOSELECT = "is-autoselect";
+
+ /**
+ * A key for boolean DEFAULT behavior for the track. The track with DEFAULT=true is
+ * selected in the absence of a specific user choice.
+ * This is currently only used for subtitle tracks, when the user selected
+ * 'Default' for the captioning locale.
+ * The associated value is an integer, where non-0 means TRUE. This is an optional
+ * field; if not specified, DEFAULT is considered to be FALSE.
+ */
+ public static final String KEY_IS_DEFAULT = "is-default";
+
+
+ /**
+ * A key for the FORCED field for subtitle tracks. True if it is a
+ * forced subtitle track. Forced subtitle tracks are essential for the
+ * content and are shown even when the user turns off Captions. They
+ * are used for example to translate foreign/alien dialogs or signs.
+ * The associated value is an integer, where non-0 means TRUE. This is an
+ * optional field; if not specified, FORCED defaults to FALSE.
+ */
+ public static final String KEY_IS_FORCED_SUBTITLE = "is-forced-subtitle";
+
+ /** @hide */
+ public static final String KEY_IS_TIMED_TEXT = "is-timed-text";
+
+ // The following color aspect values must be in sync with the ones in HardwareAPI.h.
+ /**
+ * An optional key describing the color primaries, white point and
+ * luminance factors for video content.
+ *
+ * The associated value is an integer: 0 if unspecified, or one of the
+ * COLOR_STANDARD_ values.
+ */
+ public static final String KEY_COLOR_STANDARD = "color-standard";
+
+ /** BT.709 color chromacity coordinates with KR = 0.2126, KB = 0.0722. */
+ public static final int COLOR_STANDARD_BT709 = 1;
+
+ /** BT.601 625 color chromacity coordinates with KR = 0.299, KB = 0.114. */
+ public static final int COLOR_STANDARD_BT601_PAL = 2;
+
+ /** BT.601 525 color chromacity coordinates with KR = 0.299, KB = 0.114. */
+ public static final int COLOR_STANDARD_BT601_NTSC = 4;
+
+ /** BT.2020 color chromacity coordinates with KR = 0.2627, KB = 0.0593. */
+ public static final int COLOR_STANDARD_BT2020 = 6;
+
+ /** @hide */
+ @IntDef({
+ COLOR_STANDARD_BT709,
+ COLOR_STANDARD_BT601_PAL,
+ COLOR_STANDARD_BT601_NTSC,
+ COLOR_STANDARD_BT2020,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ColorStandard {}
+
+ /**
+ * An optional key describing the opto-electronic transfer function used
+ * for the video content.
+ *
+ * The associated value is an integer: 0 if unspecified, or one of the
+ * COLOR_TRANSFER_ values.
+ */
+ public static final String KEY_COLOR_TRANSFER = "color-transfer";
+
+ /** Linear transfer characteristic curve. */
+ public static final int COLOR_TRANSFER_LINEAR = 1;
+
+ /** SMPTE 170M transfer characteristic curve used by BT.601/BT.709/BT.2020. This is the curve
+ * used by most non-HDR video content. */
+ public static final int COLOR_TRANSFER_SDR_VIDEO = 3;
+
+ /** SMPTE ST 2084 transfer function. This is used by some HDR video content. */
+ public static final int COLOR_TRANSFER_ST2084 = 6;
+
+ /** ARIB STD-B67 hybrid-log-gamma transfer function. This is used by some HDR video content. */
+ public static final int COLOR_TRANSFER_HLG = 7;
+
+ /** @hide */
+ @IntDef({
+ COLOR_TRANSFER_LINEAR,
+ COLOR_TRANSFER_SDR_VIDEO,
+ COLOR_TRANSFER_ST2084,
+ COLOR_TRANSFER_HLG,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ColorTransfer {}
+
+ /**
+ * An optional key describing the range of the component values of the video content.
+ *
+ * The associated value is an integer: 0 if unspecified, or one of the
+ * COLOR_RANGE_ values.
+ */
+ public static final String KEY_COLOR_RANGE = "color-range";
+
+ /** Limited range. Y component values range from 16 to 235 for 8-bit content.
+ * Cr, Cy values range from 16 to 240 for 8-bit content.
+ * This is the default for video content. */
+ public static final int COLOR_RANGE_LIMITED = 2;
+
+ /** Full range. Y, Cr and Cb component values range from 0 to 255 for 8-bit content. */
+ public static final int COLOR_RANGE_FULL = 1;
+
+ /** @hide */
+ @IntDef({
+ COLOR_RANGE_LIMITED,
+ COLOR_RANGE_FULL,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ColorRange {}
+
+ /**
+ * An optional key describing the static metadata of HDR (high-dynamic-range) video content.
+ *
+ * The associated value is a ByteBuffer. This buffer contains the raw contents of the
+ * Static Metadata Descriptor (including the descriptor ID) of an HDMI Dynamic Range and
+ * Mastering InfoFrame as defined by CTA-861.3. This key must be provided to video decoders
+ * for HDR video content unless this information is contained in the bitstream and the video
+ * decoder supports an HDR-capable profile. This key must be provided to video encoders for
+ * HDR video content.
+ */
+ public static final String KEY_HDR_STATIC_INFO = "hdr-static-info";
+
+ /**
+ * A key describing a unique ID for the content of a media track.
+ *
+ * <p>This key is used by {@link MediaExtractor}. Some extractors provide multiple encodings
+ * of the same track (e.g. float audio tracks for FLAC and WAV may be expressed as two
+ * tracks via MediaExtractor: a normal PCM track for backward compatibility, and a float PCM
+ * track for added fidelity. Similarly, Dolby Vision extractor may provide a baseline SDR
+ * version of a DV track.) This key can be used to identify which MediaExtractor tracks refer
+ * to the same underlying content.
+ * </p>
+ *
+ * The associated value is an integer.
+ */
+ public static final String KEY_TRACK_ID = "track-id";
+
+ /**
+ * A key describing the system id of the conditional access system used to scramble
+ * a media track.
+ * <p>
+ * This key is set by {@link MediaExtractor} if the track is scrambled with a conditional
+ * access system.
+ * <p>
+ * The associated value is an integer.
+ * @hide
+ */
+ public static final String KEY_CA_SYSTEM_ID = "ca-system-id";
+
+ /**
+ * A key describing the {@link MediaCas.Session} object associated with a media track.
+ * <p>
+ * This key is set by {@link MediaExtractor} if the track is scrambled with a conditional
+ * access system.
+ * <p>
+ * The associated value is a ByteBuffer.
+ * @hide
+ */
+ public static final String KEY_CA_SESSION_ID = "ca-session-id";
+
+ /* package private */ MediaFormat(Map<String, Object> map) {
+ mMap = map;
+ }
+
+ /**
+ * Creates an empty MediaFormat
+ */
+ public MediaFormat() {
+ mMap = new HashMap();
+ }
+
+ /* package private */ Map<String, Object> getMap() {
+ return mMap;
+ }
+
+ /**
+ * Returns true iff a key of the given name exists in the format.
+ */
+ public final boolean containsKey(String name) {
+ return mMap.containsKey(name);
+ }
+
+ /**
+ * A key prefix used together with a {@link MediaCodecInfo.CodecCapabilities}
+ * feature name describing a required or optional feature for a codec capabilities
+ * query.
+ * The associated value is an integer, where non-0 value means the feature is
+ * requested to be present, while 0 value means the feature is requested to be not
+ * present.
+ * @see MediaCodecList#findDecoderForFormat
+ * @see MediaCodecList#findEncoderForFormat
+ * @see MediaCodecInfo.CodecCapabilities#isFormatSupported
+ *
+ * @hide
+ */
+ public static final String KEY_FEATURE_ = "feature-";
+
+ /**
+ * Returns the value of an integer key.
+ */
+ public final int getInteger(String name) {
+ return ((Integer)mMap.get(name)).intValue();
+ }
+
+ /**
+ * Returns the value of an integer key, or the default value if the
+ * key is missing or is for another type value.
+ * @hide
+ */
+ public final int getInteger(String name, int defaultValue) {
+ try {
+ return getInteger(name);
+ }
+ catch (NullPointerException e) { /* no such field */ }
+ catch (ClassCastException e) { /* field of different type */ }
+ return defaultValue;
+ }
+
+ /**
+ * Returns the value of a long key.
+ */
+ public final long getLong(String name) {
+ return ((Long)mMap.get(name)).longValue();
+ }
+
+ /**
+ * Returns the value of a float key.
+ */
+ public final float getFloat(String name) {
+ return ((Float)mMap.get(name)).floatValue();
+ }
+
+ /**
+ * Returns the value of a string key.
+ */
+ public final String getString(String name) {
+ return (String)mMap.get(name);
+ }
+
+ /**
+ * Returns the value of a ByteBuffer key.
+ */
+ public final ByteBuffer getByteBuffer(String name) {
+ return (ByteBuffer)mMap.get(name);
+ }
+
+ /**
+ * Returns whether a feature is to be enabled ({@code true}) or disabled
+ * ({@code false}).
+ *
+ * @param feature the name of a {@link MediaCodecInfo.CodecCapabilities} feature.
+ *
+ * @throws IllegalArgumentException if the feature was neither set to be enabled
+ * nor to be disabled.
+ */
+ public boolean getFeatureEnabled(String feature) {
+ Integer enabled = (Integer)mMap.get(KEY_FEATURE_ + feature);
+ if (enabled == null) {
+ throw new IllegalArgumentException("feature is not specified");
+ }
+ return enabled != 0;
+ }
+
+ /**
+ * Sets the value of an integer key.
+ */
+ public final void setInteger(String name, int value) {
+ mMap.put(name, Integer.valueOf(value));
+ }
+
+ /**
+ * Sets the value of a long key.
+ */
+ public final void setLong(String name, long value) {
+ mMap.put(name, Long.valueOf(value));
+ }
+
+ /**
+ * Sets the value of a float key.
+ */
+ public final void setFloat(String name, float value) {
+ mMap.put(name, new Float(value));
+ }
+
+ /**
+ * Sets the value of a string key.
+ */
+ public final void setString(String name, String value) {
+ mMap.put(name, value);
+ }
+
+ /**
+ * Sets the value of a ByteBuffer key.
+ */
+ public final void setByteBuffer(String name, ByteBuffer bytes) {
+ mMap.put(name, bytes);
+ }
+
+ /**
+ * Sets whether a feature is to be enabled ({@code true}) or disabled
+ * ({@code false}).
+ *
+ * If {@code enabled} is {@code true}, the feature is requested to be present.
+ * Otherwise, the feature is requested to be not present.
+ *
+ * @param feature the name of a {@link MediaCodecInfo.CodecCapabilities} feature.
+ *
+ * @see MediaCodecList#findDecoderForFormat
+ * @see MediaCodecList#findEncoderForFormat
+ * @see MediaCodecInfo.CodecCapabilities#isFormatSupported
+ */
+ public void setFeatureEnabled(String feature, boolean enabled) {
+ setInteger(KEY_FEATURE_ + feature, enabled ? 1 : 0);
+ }
+
+ /**
+ * Creates a minimal audio format.
+ * @param mime The mime type of the content.
+ * @param sampleRate The sampling rate of the content.
+ * @param channelCount The number of audio channels in the content.
+ */
+ public static final MediaFormat createAudioFormat(
+ String mime,
+ int sampleRate,
+ int channelCount) {
+ MediaFormat format = new MediaFormat();
+ format.setString(KEY_MIME, mime);
+ format.setInteger(KEY_SAMPLE_RATE, sampleRate);
+ format.setInteger(KEY_CHANNEL_COUNT, channelCount);
+
+ return format;
+ }
+
+ /**
+ * Creates a minimal subtitle format.
+ * @param mime The mime type of the content.
+ * @param language The language of the content, using either ISO 639-1 or 639-2/T
+ * codes. Specify null or "und" if language information is only included
+ * in the content. (This will also work if there are multiple language
+ * tracks in the content.)
+ */
+ public static final MediaFormat createSubtitleFormat(
+ String mime,
+ String language) {
+ MediaFormat format = new MediaFormat();
+ format.setString(KEY_MIME, mime);
+ format.setString(KEY_LANGUAGE, language);
+
+ return format;
+ }
+
+ /**
+ * Creates a minimal video format.
+ * @param mime The mime type of the content.
+ * @param width The width of the content (in pixels)
+ * @param height The height of the content (in pixels)
+ */
+ public static final MediaFormat createVideoFormat(
+ String mime,
+ int width,
+ int height) {
+ MediaFormat format = new MediaFormat();
+ format.setString(KEY_MIME, mime);
+ format.setInteger(KEY_WIDTH, width);
+ format.setInteger(KEY_HEIGHT, height);
+
+ return format;
+ }
+
+ @Override
+ public String toString() {
+ return mMap.toString();
+ }
+}
diff --git a/android/media/MediaHTTPConnection.java b/android/media/MediaHTTPConnection.java
new file mode 100644
index 00000000..aae1f517
--- /dev/null
+++ b/android/media/MediaHTTPConnection.java
@@ -0,0 +1,418 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.net.NetworkUtils;
+import android.os.IBinder;
+import android.os.StrictMode;
+import android.util.Log;
+
+import java.io.BufferedInputStream;
+import java.io.InputStream;
+import java.io.IOException;
+import java.net.CookieHandler;
+import java.net.CookieManager;
+import java.net.Proxy;
+import java.net.URL;
+import java.net.HttpURLConnection;
+import java.net.MalformedURLException;
+import java.net.NoRouteToHostException;
+import java.net.ProtocolException;
+import java.net.UnknownServiceException;
+import java.util.HashMap;
+import java.util.Map;
+
+import static android.media.MediaPlayer.MEDIA_ERROR_UNSUPPORTED;
+
+/** @hide */
+public class MediaHTTPConnection extends IMediaHTTPConnection.Stub {
+ private static final String TAG = "MediaHTTPConnection";
+ private static final boolean VERBOSE = false;
+
+ // connection timeout - 30 sec
+ private static final int CONNECT_TIMEOUT_MS = 30 * 1000;
+
+ private long mCurrentOffset = -1;
+ private URL mURL = null;
+ private Map<String, String> mHeaders = null;
+ private HttpURLConnection mConnection = null;
+ private long mTotalSize = -1;
+ private InputStream mInputStream = null;
+
+ private boolean mAllowCrossDomainRedirect = true;
+ private boolean mAllowCrossProtocolRedirect = true;
+
+ // from com.squareup.okhttp.internal.http
+ private final static int HTTP_TEMP_REDIRECT = 307;
+ private final static int MAX_REDIRECTS = 20;
+
+ public MediaHTTPConnection() {
+ CookieHandler cookieHandler = CookieHandler.getDefault();
+ if (cookieHandler == null) {
+ Log.w(TAG, "MediaHTTPConnection: Unexpected. No CookieHandler found.");
+ }
+
+ native_setup();
+ }
+
+ @Override
+ public IBinder connect(String uri, String headers) {
+ if (VERBOSE) {
+ Log.d(TAG, "connect: uri=" + uri + ", headers=" + headers);
+ }
+
+ try {
+ disconnect();
+ mAllowCrossDomainRedirect = true;
+ mURL = new URL(uri);
+ mHeaders = convertHeaderStringToMap(headers);
+ } catch (MalformedURLException e) {
+ return null;
+ }
+
+ return native_getIMemory();
+ }
+
+ private boolean parseBoolean(String val) {
+ try {
+ return Long.parseLong(val) != 0;
+ } catch (NumberFormatException e) {
+ return "true".equalsIgnoreCase(val) ||
+ "yes".equalsIgnoreCase(val);
+ }
+ }
+
+ /* returns true iff header is internal */
+ private boolean filterOutInternalHeaders(String key, String val) {
+ if ("android-allow-cross-domain-redirect".equalsIgnoreCase(key)) {
+ mAllowCrossDomainRedirect = parseBoolean(val);
+ // cross-protocol redirects are also controlled by this flag
+ mAllowCrossProtocolRedirect = mAllowCrossDomainRedirect;
+ } else {
+ return false;
+ }
+ return true;
+ }
+
+ private Map<String, String> convertHeaderStringToMap(String headers) {
+ HashMap<String, String> map = new HashMap<String, String>();
+
+ String[] pairs = headers.split("\r\n");
+ for (String pair : pairs) {
+ int colonPos = pair.indexOf(":");
+ if (colonPos >= 0) {
+ String key = pair.substring(0, colonPos);
+ String val = pair.substring(colonPos + 1);
+
+ if (!filterOutInternalHeaders(key, val)) {
+ map.put(key, val);
+ }
+ }
+ }
+
+ return map;
+ }
+
+ @Override
+ public void disconnect() {
+ teardownConnection();
+ mHeaders = null;
+ mURL = null;
+ }
+
+ private void teardownConnection() {
+ if (mConnection != null) {
+ if (mInputStream != null) {
+ try {
+ mInputStream.close();
+ } catch (IOException e) {
+ }
+ mInputStream = null;
+ }
+
+ mConnection.disconnect();
+ mConnection = null;
+
+ mCurrentOffset = -1;
+ }
+ }
+
+ private static final boolean isLocalHost(URL url) {
+ if (url == null) {
+ return false;
+ }
+
+ String host = url.getHost();
+
+ if (host == null) {
+ return false;
+ }
+
+ try {
+ if (host.equalsIgnoreCase("localhost")) {
+ return true;
+ }
+ if (NetworkUtils.numericToInetAddress(host).isLoopbackAddress()) {
+ return true;
+ }
+ } catch (IllegalArgumentException iex) {
+ }
+ return false;
+ }
+
+ private void seekTo(long offset) throws IOException {
+ teardownConnection();
+
+ try {
+ int response;
+ int redirectCount = 0;
+
+ URL url = mURL;
+
+ // do not use any proxy for localhost (127.0.0.1)
+ boolean noProxy = isLocalHost(url);
+
+ while (true) {
+ if (noProxy) {
+ mConnection = (HttpURLConnection)url.openConnection(Proxy.NO_PROXY);
+ } else {
+ mConnection = (HttpURLConnection)url.openConnection();
+ }
+ mConnection.setConnectTimeout(CONNECT_TIMEOUT_MS);
+
+ // handle redirects ourselves if we do not allow cross-domain redirect
+ mConnection.setInstanceFollowRedirects(mAllowCrossDomainRedirect);
+
+ if (mHeaders != null) {
+ for (Map.Entry<String, String> entry : mHeaders.entrySet()) {
+ mConnection.setRequestProperty(
+ entry.getKey(), entry.getValue());
+ }
+ }
+
+ if (offset > 0) {
+ mConnection.setRequestProperty(
+ "Range", "bytes=" + offset + "-");
+ }
+
+ response = mConnection.getResponseCode();
+ if (response != HttpURLConnection.HTTP_MULT_CHOICE &&
+ response != HttpURLConnection.HTTP_MOVED_PERM &&
+ response != HttpURLConnection.HTTP_MOVED_TEMP &&
+ response != HttpURLConnection.HTTP_SEE_OTHER &&
+ response != HTTP_TEMP_REDIRECT) {
+ // not a redirect, or redirect handled by HttpURLConnection
+ break;
+ }
+
+ if (++redirectCount > MAX_REDIRECTS) {
+ throw new NoRouteToHostException("Too many redirects: " + redirectCount);
+ }
+
+ String method = mConnection.getRequestMethod();
+ if (response == HTTP_TEMP_REDIRECT &&
+ !method.equals("GET") && !method.equals("HEAD")) {
+ // "If the 307 status code is received in response to a
+ // request other than GET or HEAD, the user agent MUST NOT
+ // automatically redirect the request"
+ throw new NoRouteToHostException("Invalid redirect");
+ }
+ String location = mConnection.getHeaderField("Location");
+ if (location == null) {
+ throw new NoRouteToHostException("Invalid redirect");
+ }
+ url = new URL(mURL /* TRICKY: don't use url! */, location);
+ if (!url.getProtocol().equals("https") &&
+ !url.getProtocol().equals("http")) {
+ throw new NoRouteToHostException("Unsupported protocol redirect");
+ }
+ boolean sameProtocol = mURL.getProtocol().equals(url.getProtocol());
+ if (!mAllowCrossProtocolRedirect && !sameProtocol) {
+ throw new NoRouteToHostException("Cross-protocol redirects are disallowed");
+ }
+ boolean sameHost = mURL.getHost().equals(url.getHost());
+ if (!mAllowCrossDomainRedirect && !sameHost) {
+ throw new NoRouteToHostException("Cross-domain redirects are disallowed");
+ }
+
+ if (response != HTTP_TEMP_REDIRECT) {
+ // update effective URL, unless it is a Temporary Redirect
+ mURL = url;
+ }
+ }
+
+ if (mAllowCrossDomainRedirect) {
+ // remember the current, potentially redirected URL if redirects
+ // were handled by HttpURLConnection
+ mURL = mConnection.getURL();
+ }
+
+ if (response == HttpURLConnection.HTTP_PARTIAL) {
+ // Partial content, we cannot just use getContentLength
+ // because what we want is not just the length of the range
+ // returned but the size of the full content if available.
+
+ String contentRange =
+ mConnection.getHeaderField("Content-Range");
+
+ mTotalSize = -1;
+ if (contentRange != null) {
+ // format is "bytes xxx-yyy/zzz
+ // where "zzz" is the total number of bytes of the
+ // content or '*' if unknown.
+
+ int lastSlashPos = contentRange.lastIndexOf('/');
+ if (lastSlashPos >= 0) {
+ String total =
+ contentRange.substring(lastSlashPos + 1);
+
+ try {
+ mTotalSize = Long.parseLong(total);
+ } catch (NumberFormatException e) {
+ }
+ }
+ }
+ } else if (response != HttpURLConnection.HTTP_OK) {
+ throw new IOException();
+ } else {
+ mTotalSize = mConnection.getContentLength();
+ }
+
+ if (offset > 0 && response != HttpURLConnection.HTTP_PARTIAL) {
+ // Some servers simply ignore "Range" requests and serve
+ // data from the start of the content.
+ throw new ProtocolException();
+ }
+
+ mInputStream =
+ new BufferedInputStream(mConnection.getInputStream());
+
+ mCurrentOffset = offset;
+ } catch (IOException e) {
+ mTotalSize = -1;
+ teardownConnection();
+ mCurrentOffset = -1;
+
+ throw e;
+ }
+ }
+
+ @Override
+ public int readAt(long offset, int size) {
+ return native_readAt(offset, size);
+ }
+
+ private int readAt(long offset, byte[] data, int size) {
+ StrictMode.ThreadPolicy policy =
+ new StrictMode.ThreadPolicy.Builder().permitAll().build();
+
+ StrictMode.setThreadPolicy(policy);
+
+ try {
+ if (offset != mCurrentOffset) {
+ seekTo(offset);
+ }
+
+ int n = mInputStream.read(data, 0, size);
+
+ if (n == -1) {
+ // InputStream signals EOS using a -1 result, our semantics
+ // are to return a 0-length read.
+ n = 0;
+ }
+
+ mCurrentOffset += n;
+
+ if (VERBOSE) {
+ Log.d(TAG, "readAt " + offset + " / " + size + " => " + n);
+ }
+
+ return n;
+ } catch (ProtocolException e) {
+ Log.w(TAG, "readAt " + offset + " / " + size + " => " + e);
+ return MEDIA_ERROR_UNSUPPORTED;
+ } catch (NoRouteToHostException e) {
+ Log.w(TAG, "readAt " + offset + " / " + size + " => " + e);
+ return MEDIA_ERROR_UNSUPPORTED;
+ } catch (UnknownServiceException e) {
+ Log.w(TAG, "readAt " + offset + " / " + size + " => " + e);
+ return MEDIA_ERROR_UNSUPPORTED;
+ } catch (IOException e) {
+ if (VERBOSE) {
+ Log.d(TAG, "readAt " + offset + " / " + size + " => -1");
+ }
+ return -1;
+ } catch (Exception e) {
+ if (VERBOSE) {
+ Log.d(TAG, "unknown exception " + e);
+ Log.d(TAG, "readAt " + offset + " / " + size + " => -1");
+ }
+ return -1;
+ }
+ }
+
+ @Override
+ public long getSize() {
+ if (mConnection == null) {
+ try {
+ seekTo(0);
+ } catch (IOException e) {
+ return -1;
+ }
+ }
+
+ return mTotalSize;
+ }
+
+ @Override
+ public String getMIMEType() {
+ if (mConnection == null) {
+ try {
+ seekTo(0);
+ } catch (IOException e) {
+ return "application/octet-stream";
+ }
+ }
+
+ return mConnection.getContentType();
+ }
+
+ @Override
+ public String getUri() {
+ return mURL.toString();
+ }
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ private static native final void native_init();
+ private native final void native_setup();
+ private native final void native_finalize();
+
+ private native final IBinder native_getIMemory();
+ private native final int native_readAt(long offset, int size);
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private long mNativeContext;
+
+}
diff --git a/android/media/MediaHTTPService.java b/android/media/MediaHTTPService.java
new file mode 100644
index 00000000..3a0e58a1
--- /dev/null
+++ b/android/media/MediaHTTPService.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.IBinder;
+import android.util.Log;
+
+import java.net.CookieHandler;
+import java.net.CookieManager;
+import java.net.CookieStore;
+import java.net.HttpCookie;
+import java.util.List;
+
+/** @hide */
+public class MediaHTTPService extends IMediaHTTPService.Stub {
+ private static final String TAG = "MediaHTTPService";
+ private List<HttpCookie> mCookies;
+ private Boolean mCookieStoreInitialized = new Boolean(false);
+
+ public MediaHTTPService(List<HttpCookie> cookies) {
+ mCookies = cookies;
+ Log.v(TAG, "MediaHTTPService(" + this + "): Cookies: " + cookies);
+ }
+
+ public IMediaHTTPConnection makeHTTPConnection() {
+
+ synchronized (mCookieStoreInitialized) {
+ // Only need to do it once for all connections
+ if ( !mCookieStoreInitialized ) {
+ CookieHandler cookieHandler = CookieHandler.getDefault();
+ if (cookieHandler == null) {
+ cookieHandler = new CookieManager();
+ CookieHandler.setDefault(cookieHandler);
+ Log.v(TAG, "makeHTTPConnection: CookieManager created: " + cookieHandler);
+ } else {
+ Log.v(TAG, "makeHTTPConnection: CookieHandler (" + cookieHandler + ") exists.");
+ }
+
+ // Applying the bootstrapping cookies
+ if ( mCookies != null ) {
+ if ( cookieHandler instanceof CookieManager ) {
+ CookieManager cookieManager = (CookieManager)cookieHandler;
+ CookieStore store = cookieManager.getCookieStore();
+ for ( HttpCookie cookie : mCookies ) {
+ try {
+ store.add(null, cookie);
+ } catch ( Exception e ) {
+ Log.v(TAG, "makeHTTPConnection: CookieStore.add" + e);
+ }
+ //for extended debugging when needed
+ //Log.v(TAG, "MediaHTTPConnection adding Cookie[" + cookie.getName() +
+ // "]: " + cookie);
+ }
+ } else {
+ Log.w(TAG, "makeHTTPConnection: The installed CookieHandler is not a "
+ + "CookieManager. Can’t add the provided cookies to the cookie "
+ + "store.");
+ }
+ } // mCookies
+
+ mCookieStoreInitialized = true;
+
+ Log.v(TAG, "makeHTTPConnection(" + this + "): cookieHandler: " + cookieHandler +
+ " Cookies: " + mCookies);
+ } // mCookieStoreInitialized
+ } // synchronized
+
+ return new MediaHTTPConnection();
+ }
+
+ /* package private */static IBinder createHttpServiceBinderIfNecessary(
+ String path) {
+ return createHttpServiceBinderIfNecessary(path, null);
+ }
+
+ // when cookies are provided
+ static IBinder createHttpServiceBinderIfNecessary(
+ String path, List<HttpCookie> cookies) {
+ if (path.startsWith("http://") || path.startsWith("https://")) {
+ return (new MediaHTTPService(cookies)).asBinder();
+ } else if (path.startsWith("widevine://")) {
+ Log.d(TAG, "Widevine classic is no longer supported");
+ }
+
+ return null;
+ }
+}
diff --git a/android/media/MediaInserter.java b/android/media/MediaInserter.java
new file mode 100644
index 00000000..dd069218
--- /dev/null
+++ b/android/media/MediaInserter.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.ContentProviderClient;
+import android.content.ContentValues;
+import android.net.Uri;
+import android.os.RemoteException;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * A MediaScanner helper class which enables us to do lazy insertion on the
+ * given provider. This class manages buffers internally and flushes when they
+ * are full. Note that you should call flushAll() after using this class.
+ * {@hide}
+ */
+public class MediaInserter {
+ private final HashMap<Uri, List<ContentValues>> mRowMap =
+ new HashMap<Uri, List<ContentValues>>();
+ private final HashMap<Uri, List<ContentValues>> mPriorityRowMap =
+ new HashMap<Uri, List<ContentValues>>();
+
+ private final ContentProviderClient mProvider;
+ private final int mBufferSizePerUri;
+
+ public MediaInserter(ContentProviderClient provider, int bufferSizePerUri) {
+ mProvider = provider;
+ mBufferSizePerUri = bufferSizePerUri;
+ }
+
+ public void insert(Uri tableUri, ContentValues values) throws RemoteException {
+ insert(tableUri, values, false);
+ }
+
+ public void insertwithPriority(Uri tableUri, ContentValues values) throws RemoteException {
+ insert(tableUri, values, true);
+ }
+
+ private void insert(Uri tableUri, ContentValues values, boolean priority) throws RemoteException {
+ HashMap<Uri, List<ContentValues>> rowmap = priority ? mPriorityRowMap : mRowMap;
+ List<ContentValues> list = rowmap.get(tableUri);
+ if (list == null) {
+ list = new ArrayList<ContentValues>();
+ rowmap.put(tableUri, list);
+ }
+ list.add(new ContentValues(values));
+ if (list.size() >= mBufferSizePerUri) {
+ flushAllPriority();
+ flush(tableUri, list);
+ }
+ }
+
+ public void flushAll() throws RemoteException {
+ flushAllPriority();
+ for (Uri tableUri : mRowMap.keySet()){
+ List<ContentValues> list = mRowMap.get(tableUri);
+ flush(tableUri, list);
+ }
+ mRowMap.clear();
+ }
+
+ private void flushAllPriority() throws RemoteException {
+ for (Uri tableUri : mPriorityRowMap.keySet()){
+ List<ContentValues> list = mPriorityRowMap.get(tableUri);
+ flush(tableUri, list);
+ }
+ mPriorityRowMap.clear();
+ }
+
+ private void flush(Uri tableUri, List<ContentValues> list) throws RemoteException {
+ if (!list.isEmpty()) {
+ ContentValues[] valuesArray = new ContentValues[list.size()];
+ valuesArray = list.toArray(valuesArray);
+ mProvider.bulkInsert(tableUri, valuesArray);
+ list.clear();
+ }
+ }
+}
diff --git a/android/media/MediaMetadata.java b/android/media/MediaMetadata.java
new file mode 100644
index 00000000..bdc0fda6
--- /dev/null
+++ b/android/media/MediaMetadata.java
@@ -0,0 +1,846 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.StringDef;
+import android.content.ContentResolver;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.media.browse.MediaBrowser;
+import android.media.session.MediaController;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.text.TextUtils;
+import android.util.ArrayMap;
+import android.util.Log;
+import android.util.SparseArray;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.Set;
+
+/**
+ * Contains metadata about an item, such as the title, artist, etc.
+ */
+public final class MediaMetadata implements Parcelable {
+ private static final String TAG = "MediaMetadata";
+
+ /**
+ * @hide
+ */
+ @StringDef({METADATA_KEY_TITLE, METADATA_KEY_ARTIST, METADATA_KEY_ALBUM, METADATA_KEY_AUTHOR,
+ METADATA_KEY_WRITER, METADATA_KEY_COMPOSER, METADATA_KEY_COMPILATION,
+ METADATA_KEY_DATE, METADATA_KEY_GENRE, METADATA_KEY_ALBUM_ARTIST, METADATA_KEY_ART_URI,
+ METADATA_KEY_ALBUM_ART_URI, METADATA_KEY_DISPLAY_TITLE, METADATA_KEY_DISPLAY_SUBTITLE,
+ METADATA_KEY_DISPLAY_DESCRIPTION, METADATA_KEY_DISPLAY_ICON_URI,
+ METADATA_KEY_MEDIA_ID, METADATA_KEY_MEDIA_URI})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface TextKey {}
+
+ /**
+ * @hide
+ */
+ @StringDef({METADATA_KEY_DURATION, METADATA_KEY_YEAR, METADATA_KEY_TRACK_NUMBER,
+ METADATA_KEY_NUM_TRACKS, METADATA_KEY_DISC_NUMBER, METADATA_KEY_BT_FOLDER_TYPE})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface LongKey {}
+
+ /**
+ * @hide
+ */
+ @StringDef({METADATA_KEY_ART, METADATA_KEY_ALBUM_ART, METADATA_KEY_DISPLAY_ICON})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface BitmapKey {}
+
+ /**
+ * @hide
+ */
+ @StringDef({METADATA_KEY_USER_RATING, METADATA_KEY_RATING})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface RatingKey {}
+
+ /**
+ * The title of the media.
+ */
+ public static final String METADATA_KEY_TITLE = "android.media.metadata.TITLE";
+
+ /**
+ * The artist of the media.
+ */
+ public static final String METADATA_KEY_ARTIST = "android.media.metadata.ARTIST";
+
+ /**
+ * The duration of the media in ms. A negative duration indicates that the
+ * duration is unknown (or infinite).
+ */
+ public static final String METADATA_KEY_DURATION = "android.media.metadata.DURATION";
+
+ /**
+ * The album title for the media.
+ */
+ public static final String METADATA_KEY_ALBUM = "android.media.metadata.ALBUM";
+
+ /**
+ * The author of the media.
+ */
+ public static final String METADATA_KEY_AUTHOR = "android.media.metadata.AUTHOR";
+
+ /**
+ * The writer of the media.
+ */
+ public static final String METADATA_KEY_WRITER = "android.media.metadata.WRITER";
+
+ /**
+ * The composer of the media.
+ */
+ public static final String METADATA_KEY_COMPOSER = "android.media.metadata.COMPOSER";
+
+ /**
+ * The compilation status of the media.
+ */
+ public static final String METADATA_KEY_COMPILATION = "android.media.metadata.COMPILATION";
+
+ /**
+ * The date the media was created or published. The format is unspecified
+ * but RFC 3339 is recommended.
+ */
+ public static final String METADATA_KEY_DATE = "android.media.metadata.DATE";
+
+ /**
+ * The year the media was created or published as a long.
+ */
+ public static final String METADATA_KEY_YEAR = "android.media.metadata.YEAR";
+
+ /**
+ * The genre of the media.
+ */
+ public static final String METADATA_KEY_GENRE = "android.media.metadata.GENRE";
+
+ /**
+ * The track number for the media.
+ */
+ public static final String METADATA_KEY_TRACK_NUMBER = "android.media.metadata.TRACK_NUMBER";
+
+ /**
+ * The number of tracks in the media's original source.
+ */
+ public static final String METADATA_KEY_NUM_TRACKS = "android.media.metadata.NUM_TRACKS";
+
+ /**
+ * The disc number for the media's original source.
+ */
+ public static final String METADATA_KEY_DISC_NUMBER = "android.media.metadata.DISC_NUMBER";
+
+ /**
+ * The artist for the album of the media's original source.
+ */
+ public static final String METADATA_KEY_ALBUM_ARTIST = "android.media.metadata.ALBUM_ARTIST";
+
+ /**
+ * The artwork for the media as a {@link Bitmap}.
+ * <p>
+ * The artwork should be relatively small and may be scaled down by the
+ * system if it is too large. For higher resolution artwork
+ * {@link #METADATA_KEY_ART_URI} should be used instead.
+ */
+ public static final String METADATA_KEY_ART = "android.media.metadata.ART";
+
+ /**
+ * The artwork for the media as a Uri formatted String. The artwork can be
+ * loaded using a combination of {@link ContentResolver#openInputStream} and
+ * {@link BitmapFactory#decodeStream}.
+ * <p>
+ * For the best results, Uris should use the content:// style and support
+ * {@link ContentResolver#EXTRA_SIZE} for retrieving scaled artwork through
+ * {@link ContentResolver#openTypedAssetFileDescriptor(Uri, String, Bundle)}.
+ */
+ public static final String METADATA_KEY_ART_URI = "android.media.metadata.ART_URI";
+
+ /**
+ * The artwork for the album of the media's original source as a
+ * {@link Bitmap}.
+ * <p>
+ * The artwork should be relatively small and may be scaled down by the
+ * system if it is too large. For higher resolution artwork
+ * {@link #METADATA_KEY_ALBUM_ART_URI} should be used instead.
+ */
+ public static final String METADATA_KEY_ALBUM_ART = "android.media.metadata.ALBUM_ART";
+
+ /**
+ * The artwork for the album of the media's original source as a Uri
+ * formatted String. The artwork can be loaded using a combination of
+ * {@link ContentResolver#openInputStream} and
+ * {@link BitmapFactory#decodeStream}.
+ * <p>
+ * For the best results, Uris should use the content:// style and support
+ * {@link ContentResolver#EXTRA_SIZE} for retrieving scaled artwork through
+ * {@link ContentResolver#openTypedAssetFileDescriptor(Uri, String, Bundle)}.
+ */
+ public static final String METADATA_KEY_ALBUM_ART_URI = "android.media.metadata.ALBUM_ART_URI";
+
+ /**
+ * The user's rating for the media.
+ *
+ * @see Rating
+ */
+ public static final String METADATA_KEY_USER_RATING = "android.media.metadata.USER_RATING";
+
+ /**
+ * The overall rating for the media.
+ *
+ * @see Rating
+ */
+ public static final String METADATA_KEY_RATING = "android.media.metadata.RATING";
+
+ /**
+ * A title that is suitable for display to the user. This will generally be
+ * the same as {@link #METADATA_KEY_TITLE} but may differ for some formats.
+ * When displaying media described by this metadata this should be preferred
+ * if present.
+ */
+ public static final String METADATA_KEY_DISPLAY_TITLE = "android.media.metadata.DISPLAY_TITLE";
+
+ /**
+ * A subtitle that is suitable for display to the user. When displaying a
+ * second line for media described by this metadata this should be preferred
+ * to other fields if present.
+ */
+ public static final String METADATA_KEY_DISPLAY_SUBTITLE
+ = "android.media.metadata.DISPLAY_SUBTITLE";
+
+ /**
+ * A description that is suitable for display to the user. When displaying
+ * more information for media described by this metadata this should be
+ * preferred to other fields if present.
+ */
+ public static final String METADATA_KEY_DISPLAY_DESCRIPTION
+ = "android.media.metadata.DISPLAY_DESCRIPTION";
+
+ /**
+ * An icon or thumbnail that is suitable for display to the user. When
+ * displaying an icon for media described by this metadata this should be
+ * preferred to other fields if present. This must be a {@link Bitmap}.
+ * <p>
+ * The icon should be relatively small and may be scaled down by the system
+ * if it is too large. For higher resolution artwork
+ * {@link #METADATA_KEY_DISPLAY_ICON_URI} should be used instead.
+ */
+ public static final String METADATA_KEY_DISPLAY_ICON
+ = "android.media.metadata.DISPLAY_ICON";
+
+ /**
+ * A Uri formatted String for an icon or thumbnail that is suitable for
+ * display to the user. When displaying more information for media described
+ * by this metadata the display description should be preferred to other
+ * fields when present. The icon can be loaded using a combination of
+ * {@link ContentResolver#openInputStream} and
+ * {@link BitmapFactory#decodeStream}.
+ * <p>
+ * For the best results, Uris should use the content:// style and support
+ * {@link ContentResolver#EXTRA_SIZE} for retrieving scaled artwork through
+ * {@link ContentResolver#openTypedAssetFileDescriptor(Uri, String, Bundle)}.
+ */
+ public static final String METADATA_KEY_DISPLAY_ICON_URI
+ = "android.media.metadata.DISPLAY_ICON_URI";
+
+ /**
+ * A String key for identifying the content. This value is specific to the
+ * service providing the content. If used, this should be a persistent
+ * unique key for the underlying content. It may be used with
+ * {@link MediaController.TransportControls#playFromMediaId(String, Bundle)}
+ * to initiate playback when provided by a {@link MediaBrowser} connected to
+ * the same app.
+ */
+ public static final String METADATA_KEY_MEDIA_ID = "android.media.metadata.MEDIA_ID";
+
+ /**
+ * A Uri formatted String representing the content. This value is specific to the
+ * service providing the content. It may be used with
+ * {@link MediaController.TransportControls#playFromUri(Uri, Bundle)}
+ * to initiate playback when provided by a {@link MediaBrowser} connected to
+ * the same app.
+ */
+ public static final String METADATA_KEY_MEDIA_URI = "android.media.metadata.MEDIA_URI";
+
+ /**
+ * The bluetooth folder type of the media specified in the section 6.10.2.2 of the Bluetooth
+ * AVRCP 1.5. It should be one of the following:
+ * <ul>
+ * <li>{@link MediaDescription#BT_FOLDER_TYPE_MIXED}</li>
+ * <li>{@link MediaDescription#BT_FOLDER_TYPE_TITLES}</li>
+ * <li>{@link MediaDescription#BT_FOLDER_TYPE_ALBUMS}</li>
+ * <li>{@link MediaDescription#BT_FOLDER_TYPE_ARTISTS}</li>
+ * <li>{@link MediaDescription#BT_FOLDER_TYPE_GENRES}</li>
+ * <li>{@link MediaDescription#BT_FOLDER_TYPE_PLAYLISTS}</li>
+ * <li>{@link MediaDescription#BT_FOLDER_TYPE_YEARS}</li>
+ * </ul>
+ */
+ public static final String METADATA_KEY_BT_FOLDER_TYPE
+ = "android.media.metadata.BT_FOLDER_TYPE";
+
+ private static final @TextKey String[] PREFERRED_DESCRIPTION_ORDER = {
+ METADATA_KEY_TITLE,
+ METADATA_KEY_ARTIST,
+ METADATA_KEY_ALBUM,
+ METADATA_KEY_ALBUM_ARTIST,
+ METADATA_KEY_WRITER,
+ METADATA_KEY_AUTHOR,
+ METADATA_KEY_COMPOSER
+ };
+
+ private static final @BitmapKey String[] PREFERRED_BITMAP_ORDER = {
+ METADATA_KEY_DISPLAY_ICON,
+ METADATA_KEY_ART,
+ METADATA_KEY_ALBUM_ART
+ };
+
+ private static final @TextKey String[] PREFERRED_URI_ORDER = {
+ METADATA_KEY_DISPLAY_ICON_URI,
+ METADATA_KEY_ART_URI,
+ METADATA_KEY_ALBUM_ART_URI
+ };
+
+ private static final int METADATA_TYPE_INVALID = -1;
+ private static final int METADATA_TYPE_LONG = 0;
+ private static final int METADATA_TYPE_TEXT = 1;
+ private static final int METADATA_TYPE_BITMAP = 2;
+ private static final int METADATA_TYPE_RATING = 3;
+ private static final ArrayMap<String, Integer> METADATA_KEYS_TYPE;
+
+ static {
+ METADATA_KEYS_TYPE = new ArrayMap<String, Integer>();
+ METADATA_KEYS_TYPE.put(METADATA_KEY_TITLE, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_ARTIST, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_DURATION, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_ALBUM, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_AUTHOR, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_WRITER, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_COMPOSER, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_COMPILATION, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_DATE, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_YEAR, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_GENRE, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_TRACK_NUMBER, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_NUM_TRACKS, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_DISC_NUMBER, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_ALBUM_ARTIST, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_ART, METADATA_TYPE_BITMAP);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_ART_URI, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_ALBUM_ART, METADATA_TYPE_BITMAP);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_ALBUM_ART_URI, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_USER_RATING, METADATA_TYPE_RATING);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_RATING, METADATA_TYPE_RATING);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_DISPLAY_TITLE, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_DISPLAY_SUBTITLE, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_DISPLAY_DESCRIPTION, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_DISPLAY_ICON, METADATA_TYPE_BITMAP);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_DISPLAY_ICON_URI, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_BT_FOLDER_TYPE, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_MEDIA_ID, METADATA_TYPE_TEXT);
+ METADATA_KEYS_TYPE.put(METADATA_KEY_MEDIA_URI, METADATA_TYPE_TEXT);
+ }
+
+ private static final SparseArray<String> EDITOR_KEY_MAPPING;
+
+ static {
+ EDITOR_KEY_MAPPING = new SparseArray<String>();
+ EDITOR_KEY_MAPPING.put(MediaMetadataEditor.BITMAP_KEY_ARTWORK, METADATA_KEY_ART);
+ EDITOR_KEY_MAPPING.put(MediaMetadataEditor.RATING_KEY_BY_OTHERS, METADATA_KEY_RATING);
+ EDITOR_KEY_MAPPING.put(MediaMetadataEditor.RATING_KEY_BY_USER, METADATA_KEY_USER_RATING);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_ALBUM, METADATA_KEY_ALBUM);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_ALBUMARTIST,
+ METADATA_KEY_ALBUM_ARTIST);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_ARTIST, METADATA_KEY_ARTIST);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_AUTHOR, METADATA_KEY_AUTHOR);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_CD_TRACK_NUMBER,
+ METADATA_KEY_TRACK_NUMBER);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_COMPOSER, METADATA_KEY_COMPOSER);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_COMPILATION,
+ METADATA_KEY_COMPILATION);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_DATE, METADATA_KEY_DATE);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_DISC_NUMBER,
+ METADATA_KEY_DISC_NUMBER);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_DURATION, METADATA_KEY_DURATION);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_GENRE, METADATA_KEY_GENRE);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_NUM_TRACKS,
+ METADATA_KEY_NUM_TRACKS);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_TITLE, METADATA_KEY_TITLE);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_WRITER, METADATA_KEY_WRITER);
+ EDITOR_KEY_MAPPING.put(MediaMetadataRetriever.METADATA_KEY_YEAR, METADATA_KEY_YEAR);
+ }
+
+ private final Bundle mBundle;
+ private MediaDescription mDescription;
+
+ private MediaMetadata(Bundle bundle) {
+ mBundle = new Bundle(bundle);
+ }
+
+ private MediaMetadata(Parcel in) {
+ mBundle = Bundle.setDefusable(in.readBundle(), true);
+ }
+
+ /**
+ * Returns true if the given key is contained in the metadata
+ *
+ * @param key a String key
+ * @return true if the key exists in this metadata, false otherwise
+ */
+ public boolean containsKey(String key) {
+ return mBundle.containsKey(key);
+ }
+
+ /**
+ * Returns the value associated with the given key, or null if no mapping of
+ * the desired type exists for the given key or a null value is explicitly
+ * associated with the key.
+ *
+ * @param key The key the value is stored under
+ * @return a CharSequence value, or null
+ */
+ public CharSequence getText(@TextKey String key) {
+ return mBundle.getCharSequence(key);
+ }
+
+ /**
+ * Returns the text value associated with the given key as a String, or null
+ * if no mapping of the desired type exists for the given key or a null
+ * value is explicitly associated with the key. This is equivalent to
+ * calling {@link #getText getText().toString()} if the value is not null.
+ *
+ * @param key The key the value is stored under
+ * @return a String value, or null
+ */
+ public String getString(@TextKey String key) {
+ CharSequence text = getText(key);
+ if (text != null) {
+ return text.toString();
+ }
+ return null;
+ }
+
+ /**
+ * Returns the value associated with the given key, or 0L if no long exists
+ * for the given key.
+ *
+ * @param key The key the value is stored under
+ * @return a long value
+ */
+ public long getLong(@LongKey String key) {
+ return mBundle.getLong(key, 0);
+ }
+
+ /**
+ * Returns a {@link Rating} for the given key or null if no rating exists
+ * for the given key.
+ *
+ * @param key The key the value is stored under
+ * @return A {@link Rating} or null
+ */
+ public Rating getRating(@RatingKey String key) {
+ Rating rating = null;
+ try {
+ rating = mBundle.getParcelable(key);
+ } catch (Exception e) {
+ // ignore, value was not a bitmap
+ Log.w(TAG, "Failed to retrieve a key as Rating.", e);
+ }
+ return rating;
+ }
+
+ /**
+ * Returns a {@link Bitmap} for the given key or null if no bitmap exists
+ * for the given key.
+ *
+ * @param key The key the value is stored under
+ * @return A {@link Bitmap} or null
+ */
+ public Bitmap getBitmap(@BitmapKey String key) {
+ Bitmap bmp = null;
+ try {
+ bmp = mBundle.getParcelable(key);
+ } catch (Exception e) {
+ // ignore, value was not a bitmap
+ Log.w(TAG, "Failed to retrieve a key as Bitmap.", e);
+ }
+ return bmp;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeBundle(mBundle);
+ }
+
+ /**
+ * Returns the number of fields in this metadata.
+ *
+ * @return The number of fields in the metadata.
+ */
+ public int size() {
+ return mBundle.size();
+ }
+
+ /**
+ * Returns a Set containing the Strings used as keys in this metadata.
+ *
+ * @return a Set of String keys
+ */
+ public Set<String> keySet() {
+ return mBundle.keySet();
+ }
+
+ /**
+ * Returns a simple description of this metadata for display purposes.
+ *
+ * @return A simple description of this metadata.
+ */
+ public @NonNull MediaDescription getDescription() {
+ if (mDescription != null) {
+ return mDescription;
+ }
+
+ String mediaId = getString(METADATA_KEY_MEDIA_ID);
+
+ CharSequence[] text = new CharSequence[3];
+ Bitmap icon = null;
+ Uri iconUri = null;
+
+ // First handle the case where display data is set already
+ CharSequence displayText = getText(METADATA_KEY_DISPLAY_TITLE);
+ if (!TextUtils.isEmpty(displayText)) {
+ // If they have a display title use only display data, otherwise use
+ // our best bets
+ text[0] = displayText;
+ text[1] = getText(METADATA_KEY_DISPLAY_SUBTITLE);
+ text[2] = getText(METADATA_KEY_DISPLAY_DESCRIPTION);
+ } else {
+ // Use whatever fields we can
+ int textIndex = 0;
+ int keyIndex = 0;
+ while (textIndex < text.length && keyIndex < PREFERRED_DESCRIPTION_ORDER.length) {
+ CharSequence next = getText(PREFERRED_DESCRIPTION_ORDER[keyIndex++]);
+ if (!TextUtils.isEmpty(next)) {
+ // Fill in the next empty bit of text
+ text[textIndex++] = next;
+ }
+ }
+ }
+
+ // Get the best art bitmap we can find
+ for (int i = 0; i < PREFERRED_BITMAP_ORDER.length; i++) {
+ Bitmap next = getBitmap(PREFERRED_BITMAP_ORDER[i]);
+ if (next != null) {
+ icon = next;
+ break;
+ }
+ }
+
+ // Get the best Uri we can find
+ for (int i = 0; i < PREFERRED_URI_ORDER.length; i++) {
+ String next = getString(PREFERRED_URI_ORDER[i]);
+ if (!TextUtils.isEmpty(next)) {
+ iconUri = Uri.parse(next);
+ break;
+ }
+ }
+
+ Uri mediaUri = null;
+ String mediaUriStr = getString(METADATA_KEY_MEDIA_URI);
+ if (!TextUtils.isEmpty(mediaUriStr)) {
+ mediaUri = Uri.parse(mediaUriStr);
+ }
+
+ MediaDescription.Builder bob = new MediaDescription.Builder();
+ bob.setMediaId(mediaId);
+ bob.setTitle(text[0]);
+ bob.setSubtitle(text[1]);
+ bob.setDescription(text[2]);
+ bob.setIconBitmap(icon);
+ bob.setIconUri(iconUri);
+ bob.setMediaUri(mediaUri);
+ if (mBundle.containsKey(METADATA_KEY_BT_FOLDER_TYPE)) {
+ Bundle bundle = new Bundle();
+ bundle.putLong(MediaDescription.EXTRA_BT_FOLDER_TYPE,
+ getLong(METADATA_KEY_BT_FOLDER_TYPE));
+ bob.setExtras(bundle);
+ }
+ mDescription = bob.build();
+
+ return mDescription;
+ }
+
+ /**
+ * Helper for getting the String key used by {@link MediaMetadata} from the
+ * integer key that {@link MediaMetadataEditor} uses.
+ *
+ * @param editorKey The key used by the editor
+ * @return The key used by this class or null if no mapping exists
+ * @hide
+ */
+ public static String getKeyFromMetadataEditorKey(int editorKey) {
+ return EDITOR_KEY_MAPPING.get(editorKey, null);
+ }
+
+ public static final Parcelable.Creator<MediaMetadata> CREATOR =
+ new Parcelable.Creator<MediaMetadata>() {
+ @Override
+ public MediaMetadata createFromParcel(Parcel in) {
+ return new MediaMetadata(in);
+ }
+
+ @Override
+ public MediaMetadata[] newArray(int size) {
+ return new MediaMetadata[size];
+ }
+ };
+
+ /**
+ * Use to build MediaMetadata objects. The system defined metadata keys must
+ * use the appropriate data type.
+ */
+ public static final class Builder {
+ private final Bundle mBundle;
+
+ /**
+ * Create an empty Builder. Any field that should be included in the
+ * {@link MediaMetadata} must be added.
+ */
+ public Builder() {
+ mBundle = new Bundle();
+ }
+
+ /**
+ * Create a Builder using a {@link MediaMetadata} instance to set the
+ * initial values. All fields in the source metadata will be included in
+ * the new metadata. Fields can be overwritten by adding the same key.
+ *
+ * @param source
+ */
+ public Builder(MediaMetadata source) {
+ mBundle = new Bundle(source.mBundle);
+ }
+
+ /**
+ * Create a Builder using a {@link MediaMetadata} instance to set
+ * initial values, but replace bitmaps with a scaled down copy if they
+ * are larger than maxBitmapSize.
+ *
+ * @param source The original metadata to copy.
+ * @param maxBitmapSize The maximum height/width for bitmaps contained
+ * in the metadata.
+ * @hide
+ */
+ public Builder(MediaMetadata source, int maxBitmapSize) {
+ this(source);
+ for (String key : mBundle.keySet()) {
+ Object value = mBundle.get(key);
+ if (value != null && value instanceof Bitmap) {
+ Bitmap bmp = (Bitmap) value;
+ if (bmp.getHeight() > maxBitmapSize || bmp.getWidth() > maxBitmapSize) {
+ putBitmap(key, scaleBitmap(bmp, maxBitmapSize));
+ }
+ }
+ }
+ }
+
+ /**
+ * Put a CharSequence value into the metadata. Custom keys may be used,
+ * but if the METADATA_KEYs defined in this class are used they may only
+ * be one of the following:
+ * <ul>
+ * <li>{@link #METADATA_KEY_TITLE}</li>
+ * <li>{@link #METADATA_KEY_ARTIST}</li>
+ * <li>{@link #METADATA_KEY_ALBUM}</li>
+ * <li>{@link #METADATA_KEY_AUTHOR}</li>
+ * <li>{@link #METADATA_KEY_WRITER}</li>
+ * <li>{@link #METADATA_KEY_COMPOSER}</li>
+ * <li>{@link #METADATA_KEY_DATE}</li>
+ * <li>{@link #METADATA_KEY_GENRE}</li>
+ * <li>{@link #METADATA_KEY_ALBUM_ARTIST}</li>
+ * <li>{@link #METADATA_KEY_ART_URI}</li>
+ * <li>{@link #METADATA_KEY_ALBUM_ART_URI}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_TITLE}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_SUBTITLE}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_DESCRIPTION}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_ICON_URI}</li>
+ * </ul>
+ *
+ * @param key The key for referencing this value
+ * @param value The CharSequence value to store
+ * @return The Builder to allow chaining
+ */
+ public Builder putText(@TextKey String key, CharSequence value) {
+ if (METADATA_KEYS_TYPE.containsKey(key)) {
+ if (METADATA_KEYS_TYPE.get(key) != METADATA_TYPE_TEXT) {
+ throw new IllegalArgumentException("The " + key
+ + " key cannot be used to put a CharSequence");
+ }
+ }
+ mBundle.putCharSequence(key, value);
+ return this;
+ }
+
+ /**
+ * Put a String value into the metadata. Custom keys may be used, but if
+ * the METADATA_KEYs defined in this class are used they may only be one
+ * of the following:
+ * <ul>
+ * <li>{@link #METADATA_KEY_TITLE}</li>
+ * <li>{@link #METADATA_KEY_ARTIST}</li>
+ * <li>{@link #METADATA_KEY_ALBUM}</li>
+ * <li>{@link #METADATA_KEY_AUTHOR}</li>
+ * <li>{@link #METADATA_KEY_WRITER}</li>
+ * <li>{@link #METADATA_KEY_COMPOSER}</li>
+ * <li>{@link #METADATA_KEY_DATE}</li>
+ * <li>{@link #METADATA_KEY_GENRE}</li>
+ * <li>{@link #METADATA_KEY_ALBUM_ARTIST}</li>
+ * <li>{@link #METADATA_KEY_ART_URI}</li>
+ * <li>{@link #METADATA_KEY_ALBUM_ART_URI}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_TITLE}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_SUBTITLE}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_DESCRIPTION}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_ICON_URI}</li>
+ * </ul>
+ * <p>
+ * Uris for artwork should use the content:// style and support
+ * {@link ContentResolver#EXTRA_SIZE} for retrieving scaled artwork
+ * through {@link ContentResolver#openTypedAssetFileDescriptor(Uri,
+ * String, Bundle)}.
+ *
+ * @param key The key for referencing this value
+ * @param value The String value to store
+ * @return The Builder to allow chaining
+ */
+ public Builder putString(@TextKey String key, String value) {
+ if (METADATA_KEYS_TYPE.containsKey(key)) {
+ if (METADATA_KEYS_TYPE.get(key) != METADATA_TYPE_TEXT) {
+ throw new IllegalArgumentException("The " + key
+ + " key cannot be used to put a String");
+ }
+ }
+ mBundle.putCharSequence(key, value);
+ return this;
+ }
+
+ /**
+ * Put a long value into the metadata. Custom keys may be used, but if
+ * the METADATA_KEYs defined in this class are used they may only be one
+ * of the following:
+ * <ul>
+ * <li>{@link #METADATA_KEY_DURATION}</li>
+ * <li>{@link #METADATA_KEY_TRACK_NUMBER}</li>
+ * <li>{@link #METADATA_KEY_NUM_TRACKS}</li>
+ * <li>{@link #METADATA_KEY_DISC_NUMBER}</li>
+ * <li>{@link #METADATA_KEY_YEAR}</li>
+ * </ul>
+ *
+ * @param key The key for referencing this value
+ * @param value The long value to store
+ * @return The Builder to allow chaining
+ */
+ public Builder putLong(@LongKey String key, long value) {
+ if (METADATA_KEYS_TYPE.containsKey(key)) {
+ if (METADATA_KEYS_TYPE.get(key) != METADATA_TYPE_LONG) {
+ throw new IllegalArgumentException("The " + key
+ + " key cannot be used to put a long");
+ }
+ }
+ mBundle.putLong(key, value);
+ return this;
+ }
+
+ /**
+ * Put a {@link Rating} into the metadata. Custom keys may be used, but
+ * if the METADATA_KEYs defined in this class are used they may only be
+ * one of the following:
+ * <ul>
+ * <li>{@link #METADATA_KEY_RATING}</li>
+ * <li>{@link #METADATA_KEY_USER_RATING}</li>
+ * </ul>
+ *
+ * @param key The key for referencing this value
+ * @param value The Rating value to store
+ * @return The Builder to allow chaining
+ */
+ public Builder putRating(@RatingKey String key, Rating value) {
+ if (METADATA_KEYS_TYPE.containsKey(key)) {
+ if (METADATA_KEYS_TYPE.get(key) != METADATA_TYPE_RATING) {
+ throw new IllegalArgumentException("The " + key
+ + " key cannot be used to put a Rating");
+ }
+ }
+ mBundle.putParcelable(key, value);
+ return this;
+ }
+
+ /**
+ * Put a {@link Bitmap} into the metadata. Custom keys may be used, but
+ * if the METADATA_KEYs defined in this class are used they may only be
+ * one of the following:
+ * <ul>
+ * <li>{@link #METADATA_KEY_ART}</li>
+ * <li>{@link #METADATA_KEY_ALBUM_ART}</li>
+ * <li>{@link #METADATA_KEY_DISPLAY_ICON}</li>
+ * </ul>
+ * <p>
+ * Large bitmaps may be scaled down by the system when
+ * {@link android.media.session.MediaSession#setMetadata} is called.
+ * To pass full resolution images {@link Uri Uris} should be used with
+ * {@link #putString}.
+ *
+ * @param key The key for referencing this value
+ * @param value The Bitmap to store
+ * @return The Builder to allow chaining
+ */
+ public Builder putBitmap(@BitmapKey String key, Bitmap value) {
+ if (METADATA_KEYS_TYPE.containsKey(key)) {
+ if (METADATA_KEYS_TYPE.get(key) != METADATA_TYPE_BITMAP) {
+ throw new IllegalArgumentException("The " + key
+ + " key cannot be used to put a Bitmap");
+ }
+ }
+ mBundle.putParcelable(key, value);
+ return this;
+ }
+
+ /**
+ * Creates a {@link MediaMetadata} instance with the specified fields.
+ *
+ * @return The new MediaMetadata instance
+ */
+ public MediaMetadata build() {
+ return new MediaMetadata(mBundle);
+ }
+
+ private Bitmap scaleBitmap(Bitmap bmp, int maxSize) {
+ float maxSizeF = maxSize;
+ float widthScale = maxSizeF / bmp.getWidth();
+ float heightScale = maxSizeF / bmp.getHeight();
+ float scale = Math.min(widthScale, heightScale);
+ int height = (int) (bmp.getHeight() * scale);
+ int width = (int) (bmp.getWidth() * scale);
+ return Bitmap.createScaledBitmap(bmp, width, height, true);
+ }
+ }
+}
diff --git a/android/media/MediaMetadataEditor.java b/android/media/MediaMetadataEditor.java
new file mode 100644
index 00000000..877c8729
--- /dev/null
+++ b/android/media/MediaMetadataEditor.java
@@ -0,0 +1,470 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.Bitmap;
+import android.media.session.MediaSession;
+import android.os.Bundle;
+import android.os.Parcelable;
+import android.util.Log;
+import android.util.SparseIntArray;
+
+/**
+ * An abstract class for editing and storing metadata that can be published by
+ * {@link RemoteControlClient}. See the {@link RemoteControlClient#editMetadata(boolean)}
+ * method to instantiate a {@link RemoteControlClient.MetadataEditor} object.
+ *
+ * @deprecated Use {@link MediaMetadata} instead together with {@link MediaSession}.
+ */
+@Deprecated public abstract class MediaMetadataEditor {
+
+ private final static String TAG = "MediaMetadataEditor";
+ /**
+ * @hide
+ */
+ protected MediaMetadataEditor() {
+ }
+
+ // Public keys for metadata used by RemoteControlClient and RemoteController.
+ // Note that these keys are defined here, and not in MediaMetadataRetriever
+ // because they are not supported by the MediaMetadataRetriever features.
+ /**
+ * The metadata key for the content artwork / album art.
+ */
+ public final static int BITMAP_KEY_ARTWORK =
+ RemoteControlClient.MetadataEditor.BITMAP_KEY_ARTWORK;
+
+ /**
+ * The metadata key for the content's average rating, not the user's rating.
+ * The value associated with this key is a {@link Rating} instance.
+ * @see #RATING_KEY_BY_USER
+ */
+ public final static int RATING_KEY_BY_OTHERS = 101;
+
+ /**
+ * The metadata key for the content's user rating.
+ * The value associated with this key is a {@link Rating} instance.
+ * This key can be flagged as "editable" (with {@link #addEditableKey(int)}) to enable
+ * receiving user rating values through the
+ * {@link android.media.RemoteControlClient.OnMetadataUpdateListener} interface.
+ */
+ public final static int RATING_KEY_BY_USER = 0x10000001;
+
+ /**
+ * @hide
+ * Editable key mask
+ */
+ public final static int KEY_EDITABLE_MASK = 0x1FFFFFFF;
+
+
+ /**
+ * Applies all of the metadata changes that have been set since the MediaMetadataEditor instance
+ * was created or since {@link #clear()} was called. Subclasses should synchronize on
+ * {@code this} for thread safety.
+ */
+ public abstract void apply();
+
+
+ /**
+ * @hide
+ * Mask of editable keys.
+ */
+ protected long mEditableKeys;
+
+ /**
+ * @hide
+ */
+ protected boolean mMetadataChanged = false;
+
+ /**
+ * @hide
+ */
+ protected boolean mApplied = false;
+
+ /**
+ * @hide
+ */
+ protected boolean mArtworkChanged = false;
+
+ /**
+ * @hide
+ */
+ protected Bitmap mEditorArtwork;
+
+ /**
+ * @hide
+ */
+ protected Bundle mEditorMetadata;
+
+ /**
+ * @hide
+ */
+ protected MediaMetadata.Builder mMetadataBuilder;
+
+ /**
+ * Clears all the pending metadata changes set since the MediaMetadataEditor instance was
+ * created or since this method was last called.
+ * Note that clearing the metadata doesn't reset the editable keys
+ * (use {@link #removeEditableKeys()} instead).
+ */
+ public synchronized void clear() {
+ if (mApplied) {
+ Log.e(TAG, "Can't clear a previously applied MediaMetadataEditor");
+ return;
+ }
+ mEditorMetadata.clear();
+ mEditorArtwork = null;
+ mMetadataBuilder = new MediaMetadata.Builder();
+ }
+
+ /**
+ * Flags the given key as being editable.
+ * This should only be used by metadata publishers, such as {@link RemoteControlClient},
+ * which will declare the metadata field as eligible to be updated, with new values
+ * received through the {@link RemoteControlClient.OnMetadataUpdateListener} interface.
+ * @param key the type of metadata that can be edited. The supported key is
+ * {@link #RATING_KEY_BY_USER}.
+ */
+ public synchronized void addEditableKey(int key) {
+ if (mApplied) {
+ Log.e(TAG, "Can't change editable keys of a previously applied MetadataEditor");
+ return;
+ }
+ // only one editable key at the moment, so we're not wasting memory on an array
+ // of editable keys to check the validity of the key, just hardcode the supported key.
+ if (key == RATING_KEY_BY_USER) {
+ mEditableKeys |= (KEY_EDITABLE_MASK & key);
+ mMetadataChanged = true;
+ } else {
+ Log.e(TAG, "Metadata key " + key + " cannot be edited");
+ }
+ }
+
+ /**
+ * Causes all metadata fields to be read-only.
+ */
+ public synchronized void removeEditableKeys() {
+ if (mApplied) {
+ Log.e(TAG, "Can't remove all editable keys of a previously applied MetadataEditor");
+ return;
+ }
+ if (mEditableKeys != 0) {
+ mEditableKeys = 0;
+ mMetadataChanged = true;
+ }
+ }
+
+ /**
+ * Retrieves the keys flagged as editable.
+ * @return null if there are no editable keys, or an array containing the keys.
+ */
+ public synchronized int[] getEditableKeys() {
+ // only one editable key supported here
+ if (mEditableKeys == RATING_KEY_BY_USER) {
+ int[] keys = { RATING_KEY_BY_USER };
+ return keys;
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Adds textual information.
+ * Note that none of the information added after {@link #apply()} has been called,
+ * will be available to consumers of metadata stored by the MediaMetadataEditor.
+ * @param key The identifier of a the metadata field to set. Valid values are
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_ALBUM},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_ALBUMARTIST},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_TITLE},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_ARTIST},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_AUTHOR},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_COMPILATION},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_COMPOSER},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_DATE},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_GENRE},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_WRITER}.
+ * @param value The text for the given key, or {@code null} to signify there is no valid
+ * information for the field.
+ * @return Returns a reference to the same MediaMetadataEditor object, so you can chain put
+ * calls together.
+ */
+ public synchronized MediaMetadataEditor putString(int key, String value)
+ throws IllegalArgumentException {
+ if (mApplied) {
+ Log.e(TAG, "Can't edit a previously applied MediaMetadataEditor");
+ return this;
+ }
+ if (METADATA_KEYS_TYPE.get(key, METADATA_TYPE_INVALID) != METADATA_TYPE_STRING) {
+ throw(new IllegalArgumentException("Invalid type 'String' for key "+ key));
+ }
+ mEditorMetadata.putString(String.valueOf(key), value);
+ mMetadataChanged = true;
+ return this;
+ }
+
+ /**
+ * Adds numerical information.
+ * Note that none of the information added after {@link #apply()} has been called
+ * will be available to consumers of metadata stored by the MediaMetadataEditor.
+ * @param key the identifier of a the metadata field to set. Valid values are
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_CD_TRACK_NUMBER},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_DISC_NUMBER},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_DURATION} (with a value
+ * expressed in milliseconds),
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_YEAR}.
+ * @param value The long value for the given key
+ * @return Returns a reference to the same MediaMetadataEditor object, so you can chain put
+ * calls together.
+ * @throws IllegalArgumentException
+ */
+ public synchronized MediaMetadataEditor putLong(int key, long value)
+ throws IllegalArgumentException {
+ if (mApplied) {
+ Log.e(TAG, "Can't edit a previously applied MediaMetadataEditor");
+ return this;
+ }
+ if (METADATA_KEYS_TYPE.get(key, METADATA_TYPE_INVALID) != METADATA_TYPE_LONG) {
+ throw(new IllegalArgumentException("Invalid type 'long' for key "+ key));
+ }
+ mEditorMetadata.putLong(String.valueOf(key), value);
+ mMetadataChanged = true;
+ return this;
+ }
+
+ /**
+ * Adds image.
+ * @param key the identifier of the bitmap to set. The only valid value is
+ * {@link #BITMAP_KEY_ARTWORK}
+ * @param bitmap The bitmap for the artwork, or null if there isn't any.
+ * @return Returns a reference to the same MediaMetadataEditor object, so you can chain put
+ * calls together.
+ * @throws IllegalArgumentException
+ * @see android.graphics.Bitmap
+ */
+ public synchronized MediaMetadataEditor putBitmap(int key, Bitmap bitmap)
+ throws IllegalArgumentException {
+ if (mApplied) {
+ Log.e(TAG, "Can't edit a previously applied MediaMetadataEditor");
+ return this;
+ }
+ if (key != BITMAP_KEY_ARTWORK) {
+ throw(new IllegalArgumentException("Invalid type 'Bitmap' for key "+ key));
+ }
+ mEditorArtwork = bitmap;
+ mArtworkChanged = true;
+ return this;
+ }
+
+ /**
+ * Adds information stored as an instance.
+ * Note that none of the information added after {@link #apply()} has been called
+ * will be available to consumers of metadata stored by the MediaMetadataEditor.
+ * @param key the identifier of a the metadata field to set. Valid keys for a:
+ * <ul>
+ * <li>{@link Bitmap} object are {@link #BITMAP_KEY_ARTWORK},</li>
+ * <li>{@link String} object are the same as for {@link #putString(int, String)}</li>
+ * <li>{@link Long} object are the same as for {@link #putLong(int, long)}</li>
+ * <li>{@link Rating} object are {@link #RATING_KEY_BY_OTHERS}
+ * and {@link #RATING_KEY_BY_USER}.</li>
+ * </ul>
+ * @param value the metadata to add.
+ * @return Returns a reference to the same MediaMetadataEditor object, so you can chain put
+ * calls together.
+ * @throws IllegalArgumentException
+ */
+ public synchronized MediaMetadataEditor putObject(int key, Object value)
+ throws IllegalArgumentException {
+ if (mApplied) {
+ Log.e(TAG, "Can't edit a previously applied MediaMetadataEditor");
+ return this;
+ }
+ switch(METADATA_KEYS_TYPE.get(key, METADATA_TYPE_INVALID)) {
+ case METADATA_TYPE_LONG:
+ if (value instanceof Long) {
+ return putLong(key, ((Long)value).longValue());
+ } else {
+ throw(new IllegalArgumentException("Not a non-null Long for key "+ key));
+ }
+ case METADATA_TYPE_STRING:
+ if ((value == null) || (value instanceof String)) {
+ return putString(key, (String) value);
+ } else {
+ throw(new IllegalArgumentException("Not a String for key "+ key));
+ }
+ case METADATA_TYPE_RATING:
+ mEditorMetadata.putParcelable(String.valueOf(key), (Parcelable)value);
+ mMetadataChanged = true;
+ break;
+ case METADATA_TYPE_BITMAP:
+ if ((value == null) || (value instanceof Bitmap)) {
+ return putBitmap(key, (Bitmap) value);
+ } else {
+ throw(new IllegalArgumentException("Not a Bitmap for key "+ key));
+ }
+ default:
+ throw(new IllegalArgumentException("Invalid key "+ key));
+ }
+ return this;
+ }
+
+
+ /**
+ * Returns the long value for the key.
+ * @param key one of the keys supported in {@link #putLong(int, long)}
+ * @param defaultValue the value returned if the key is not present
+ * @return the long value for the key, or the supplied default value if the key is not present
+ * @throws IllegalArgumentException
+ */
+ public synchronized long getLong(int key, long defaultValue)
+ throws IllegalArgumentException {
+ if (METADATA_KEYS_TYPE.get(key, METADATA_TYPE_INVALID) != METADATA_TYPE_LONG) {
+ throw(new IllegalArgumentException("Invalid type 'long' for key "+ key));
+ }
+ return mEditorMetadata.getLong(String.valueOf(key), defaultValue);
+ }
+
+ /**
+ * Returns the {@link String} value for the key.
+ * @param key one of the keys supported in {@link #putString(int, String)}
+ * @param defaultValue the value returned if the key is not present
+ * @return the {@link String} value for the key, or the supplied default value if the key is
+ * not present
+ * @throws IllegalArgumentException
+ */
+ public synchronized String getString(int key, String defaultValue)
+ throws IllegalArgumentException {
+ if (METADATA_KEYS_TYPE.get(key, METADATA_TYPE_INVALID) != METADATA_TYPE_STRING) {
+ throw(new IllegalArgumentException("Invalid type 'String' for key "+ key));
+ }
+ return mEditorMetadata.getString(String.valueOf(key), defaultValue);
+ }
+
+ /**
+ * Returns the {@link Bitmap} value for the key.
+ * @param key the {@link #BITMAP_KEY_ARTWORK} key
+ * @param defaultValue the value returned if the key is not present
+ * @return the {@link Bitmap} value for the key, or the supplied default value if the key is
+ * not present
+ * @throws IllegalArgumentException
+ */
+ public synchronized Bitmap getBitmap(int key, Bitmap defaultValue)
+ throws IllegalArgumentException {
+ if (key != BITMAP_KEY_ARTWORK) {
+ throw(new IllegalArgumentException("Invalid type 'Bitmap' for key "+ key));
+ }
+ return (mEditorArtwork != null ? mEditorArtwork : defaultValue);
+ }
+
+ /**
+ * Returns an object representation of the value for the key
+ * @param key one of the keys supported in {@link #putObject(int, Object)}
+ * @param defaultValue the value returned if the key is not present
+ * @return the object for the key, as a {@link Long}, {@link Bitmap}, {@link String}, or
+ * {@link Rating} depending on the key value, or the supplied default value if the key is
+ * not present
+ * @throws IllegalArgumentException
+ */
+ public synchronized Object getObject(int key, Object defaultValue)
+ throws IllegalArgumentException {
+ switch (METADATA_KEYS_TYPE.get(key, METADATA_TYPE_INVALID)) {
+ case METADATA_TYPE_LONG:
+ if (mEditorMetadata.containsKey(String.valueOf(key))) {
+ return mEditorMetadata.getLong(String.valueOf(key));
+ } else {
+ return defaultValue;
+ }
+ case METADATA_TYPE_STRING:
+ if (mEditorMetadata.containsKey(String.valueOf(key))) {
+ return mEditorMetadata.getString(String.valueOf(key));
+ } else {
+ return defaultValue;
+ }
+ case METADATA_TYPE_RATING:
+ if (mEditorMetadata.containsKey(String.valueOf(key))) {
+ return mEditorMetadata.getParcelable(String.valueOf(key));
+ } else {
+ return defaultValue;
+ }
+ case METADATA_TYPE_BITMAP:
+ // only one key for Bitmap supported, value is not stored in mEditorMetadata Bundle
+ if (key == BITMAP_KEY_ARTWORK) {
+ return (mEditorArtwork != null ? mEditorArtwork : defaultValue);
+ } // else: fall through to invalid key handling
+ default:
+ throw(new IllegalArgumentException("Invalid key "+ key));
+ }
+ }
+
+
+ /**
+ * @hide
+ */
+ protected static final int METADATA_TYPE_INVALID = -1;
+ /**
+ * @hide
+ */
+ protected static final int METADATA_TYPE_LONG = 0;
+
+ /**
+ * @hide
+ */
+ protected static final int METADATA_TYPE_STRING = 1;
+
+ /**
+ * @hide
+ */
+ protected static final int METADATA_TYPE_BITMAP = 2;
+
+ /**
+ * @hide
+ */
+ protected static final int METADATA_TYPE_RATING = 3;
+
+ /**
+ * @hide
+ */
+ protected static final SparseIntArray METADATA_KEYS_TYPE;
+
+ static {
+ METADATA_KEYS_TYPE = new SparseIntArray(17);
+ // NOTE: if adding to the list below, make sure you increment the array initialization size
+ // keys with long values
+ METADATA_KEYS_TYPE.put(
+ MediaMetadataRetriever.METADATA_KEY_CD_TRACK_NUMBER, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_DISC_NUMBER, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_DURATION, METADATA_TYPE_LONG);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_YEAR, METADATA_TYPE_LONG);
+ // keys with String values
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_ALBUM, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(
+ MediaMetadataRetriever.METADATA_KEY_ALBUMARTIST, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_TITLE, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_ARTIST, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_AUTHOR, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(
+ MediaMetadataRetriever.METADATA_KEY_COMPILATION, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_COMPOSER, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_DATE, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_GENRE, METADATA_TYPE_STRING);
+ METADATA_KEYS_TYPE.put(MediaMetadataRetriever.METADATA_KEY_WRITER, METADATA_TYPE_STRING);
+ // keys with Bitmap values
+ METADATA_KEYS_TYPE.put(BITMAP_KEY_ARTWORK, METADATA_TYPE_BITMAP);
+ // keys with Rating values
+ METADATA_KEYS_TYPE.put(RATING_KEY_BY_OTHERS, METADATA_TYPE_RATING);
+ METADATA_KEYS_TYPE.put(RATING_KEY_BY_USER, METADATA_TYPE_RATING);
+ }
+}
diff --git a/android/media/MediaMetadataRetriever.java b/android/media/MediaMetadataRetriever.java
new file mode 100644
index 00000000..4ea4e381
--- /dev/null
+++ b/android/media/MediaMetadataRetriever.java
@@ -0,0 +1,576 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.content.ContentResolver;
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.graphics.Bitmap;
+import android.net.Uri;
+import android.os.IBinder;
+
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import java.util.Map;
+
+/**
+ * MediaMetadataRetriever class provides a unified interface for retrieving
+ * frame and meta data from an input media file.
+ */
+public class MediaMetadataRetriever
+{
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ // The field below is accessed by native methods
+ @SuppressWarnings("unused")
+ private long mNativeContext;
+
+ private static final int EMBEDDED_PICTURE_TYPE_ANY = 0xFFFF;
+
+ public MediaMetadataRetriever() {
+ native_setup();
+ }
+
+ /**
+ * Sets the data source (file pathname) to use. Call this
+ * method before the rest of the methods in this class. This method may be
+ * time-consuming.
+ *
+ * @param path The path of the input media file.
+ * @throws IllegalArgumentException If the path is invalid.
+ */
+ public void setDataSource(String path) throws IllegalArgumentException {
+ if (path == null) {
+ throw new IllegalArgumentException();
+ }
+
+ try (FileInputStream is = new FileInputStream(path)) {
+ FileDescriptor fd = is.getFD();
+ setDataSource(fd, 0, 0x7ffffffffffffffL);
+ } catch (FileNotFoundException fileEx) {
+ throw new IllegalArgumentException();
+ } catch (IOException ioEx) {
+ throw new IllegalArgumentException();
+ }
+ }
+
+ /**
+ * Sets the data source (URI) to use. Call this
+ * method before the rest of the methods in this class. This method may be
+ * time-consuming.
+ *
+ * @param uri The URI of the input media.
+ * @param headers the headers to be sent together with the request for the data
+ * @throws IllegalArgumentException If the URI is invalid.
+ */
+ public void setDataSource(String uri, Map<String, String> headers)
+ throws IllegalArgumentException {
+ int i = 0;
+ String[] keys = new String[headers.size()];
+ String[] values = new String[headers.size()];
+ for (Map.Entry<String, String> entry: headers.entrySet()) {
+ keys[i] = entry.getKey();
+ values[i] = entry.getValue();
+ ++i;
+ }
+
+ _setDataSource(
+ MediaHTTPService.createHttpServiceBinderIfNecessary(uri),
+ uri,
+ keys,
+ values);
+ }
+
+ private native void _setDataSource(
+ IBinder httpServiceBinder, String uri, String[] keys, String[] values)
+ throws IllegalArgumentException;
+
+ /**
+ * Sets the data source (FileDescriptor) to use. It is the caller's
+ * responsibility to close the file descriptor. It is safe to do so as soon
+ * as this call returns. Call this method before the rest of the methods in
+ * this class. This method may be time-consuming.
+ *
+ * @param fd the FileDescriptor for the file you want to play
+ * @param offset the offset into the file where the data to be played starts,
+ * in bytes. It must be non-negative
+ * @param length the length in bytes of the data to be played. It must be
+ * non-negative.
+ * @throws IllegalArgumentException if the arguments are invalid
+ */
+ public native void setDataSource(FileDescriptor fd, long offset, long length)
+ throws IllegalArgumentException;
+
+ /**
+ * Sets the data source (FileDescriptor) to use. It is the caller's
+ * responsibility to close the file descriptor. It is safe to do so as soon
+ * as this call returns. Call this method before the rest of the methods in
+ * this class. This method may be time-consuming.
+ *
+ * @param fd the FileDescriptor for the file you want to play
+ * @throws IllegalArgumentException if the FileDescriptor is invalid
+ */
+ public void setDataSource(FileDescriptor fd)
+ throws IllegalArgumentException {
+ // intentionally less than LONG_MAX
+ setDataSource(fd, 0, 0x7ffffffffffffffL);
+ }
+
+ /**
+ * Sets the data source as a content Uri. Call this method before
+ * the rest of the methods in this class. This method may be time-consuming.
+ *
+ * @param context the Context to use when resolving the Uri
+ * @param uri the Content URI of the data you want to play
+ * @throws IllegalArgumentException if the Uri is invalid
+ * @throws SecurityException if the Uri cannot be used due to lack of
+ * permission.
+ */
+ public void setDataSource(Context context, Uri uri)
+ throws IllegalArgumentException, SecurityException {
+ if (uri == null) {
+ throw new IllegalArgumentException();
+ }
+
+ String scheme = uri.getScheme();
+ if(scheme == null || scheme.equals("file")) {
+ setDataSource(uri.getPath());
+ return;
+ }
+
+ AssetFileDescriptor fd = null;
+ try {
+ ContentResolver resolver = context.getContentResolver();
+ try {
+ fd = resolver.openAssetFileDescriptor(uri, "r");
+ } catch(FileNotFoundException e) {
+ throw new IllegalArgumentException();
+ }
+ if (fd == null) {
+ throw new IllegalArgumentException();
+ }
+ FileDescriptor descriptor = fd.getFileDescriptor();
+ if (!descriptor.valid()) {
+ throw new IllegalArgumentException();
+ }
+ // Note: using getDeclaredLength so that our behavior is the same
+ // as previous versions when the content provider is returning
+ // a full file.
+ if (fd.getDeclaredLength() < 0) {
+ setDataSource(descriptor);
+ } else {
+ setDataSource(descriptor, fd.getStartOffset(), fd.getDeclaredLength());
+ }
+ return;
+ } catch (SecurityException ex) {
+ } finally {
+ try {
+ if (fd != null) {
+ fd.close();
+ }
+ } catch(IOException ioEx) {
+ }
+ }
+ setDataSource(uri.toString());
+ }
+
+ /**
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to play
+ */
+ public void setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException {
+ _setDataSource(dataSource);
+ }
+
+ private native void _setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException;
+
+ /**
+ * Call this method after setDataSource(). This method retrieves the
+ * meta data value associated with the keyCode.
+ *
+ * The keyCode currently supported is listed below as METADATA_XXX
+ * constants. With any other value, it returns a null pointer.
+ *
+ * @param keyCode One of the constants listed below at the end of the class.
+ * @return The meta data value associate with the given keyCode on success;
+ * null on failure.
+ */
+ public native String extractMetadata(int keyCode);
+
+ /**
+ * Call this method after setDataSource(). This method finds a
+ * representative frame close to the given time position by considering
+ * the given option if possible, and returns it as a bitmap. This is
+ * useful for generating a thumbnail for an input data source or just
+ * obtain and display a frame at the given time position.
+ *
+ * @param timeUs The time position where the frame will be retrieved.
+ * When retrieving the frame at the given time position, there is no
+ * guarantee that the data source has a frame located at the position.
+ * When this happens, a frame nearby will be returned. If timeUs is
+ * negative, time position and option will ignored, and any frame
+ * that the implementation considers as representative may be returned.
+ *
+ * @param option a hint on how the frame is found. Use
+ * {@link #OPTION_PREVIOUS_SYNC} if one wants to retrieve a sync frame
+ * that has a timestamp earlier than or the same as timeUs. Use
+ * {@link #OPTION_NEXT_SYNC} if one wants to retrieve a sync frame
+ * that has a timestamp later than or the same as timeUs. Use
+ * {@link #OPTION_CLOSEST_SYNC} if one wants to retrieve a sync frame
+ * that has a timestamp closest to or the same as timeUs. Use
+ * {@link #OPTION_CLOSEST} if one wants to retrieve a frame that may
+ * or may not be a sync frame but is closest to or the same as timeUs.
+ * {@link #OPTION_CLOSEST} often has larger performance overhead compared
+ * to the other options if there is no sync frame located at timeUs.
+ *
+ * @return A Bitmap containing a representative video frame, which
+ * can be null, if such a frame cannot be retrieved.
+ */
+ public Bitmap getFrameAtTime(long timeUs, @Option int option) {
+ if (option < OPTION_PREVIOUS_SYNC ||
+ option > OPTION_CLOSEST) {
+ throw new IllegalArgumentException("Unsupported option: " + option);
+ }
+
+ return _getFrameAtTime(timeUs, option, -1 /*dst_width*/, -1 /*dst_height*/);
+ }
+
+ /**
+ * Retrieve a video frame near a given timestamp scaled to a desired size.
+ * Call this method after setDataSource(). This method finds a representative
+ * frame close to the given time position by considering the given option
+ * if possible, and returns it as a bitmap with same aspect ratio as the source
+ * while scaling it so that it fits into the desired size of dst_width by dst_height.
+ * This is useful for generating a thumbnail for an input data source or just to
+ * obtain a scaled frame at the given time position.
+ *
+ * @param timeUs The time position in microseconds where the frame will be retrieved.
+ * When retrieving the frame at the given time position, there is no
+ * guarantee that the data source has a frame located at the position.
+ * When this happens, a frame nearby will be returned. If timeUs is
+ * negative, time position and option will ignored, and any frame
+ * that the implementation considers as representative may be returned.
+ *
+ * @param option a hint on how the frame is found. Use
+ * {@link #OPTION_PREVIOUS_SYNC} if one wants to retrieve a sync frame
+ * that has a timestamp earlier than or the same as timeUs. Use
+ * {@link #OPTION_NEXT_SYNC} if one wants to retrieve a sync frame
+ * that has a timestamp later than or the same as timeUs. Use
+ * {@link #OPTION_CLOSEST_SYNC} if one wants to retrieve a sync frame
+ * that has a timestamp closest to or the same as timeUs. Use
+ * {@link #OPTION_CLOSEST} if one wants to retrieve a frame that may
+ * or may not be a sync frame but is closest to or the same as timeUs.
+ * {@link #OPTION_CLOSEST} often has larger performance overhead compared
+ * to the other options if there is no sync frame located at timeUs.
+ *
+ * @param dstWidth expected output bitmap width
+ * @param dstHeight expected output bitmap height
+ * @return A Bitmap of size not larger than dstWidth by dstHeight containing a
+ * scaled video frame, which can be null, if such a frame cannot be retrieved.
+ * @throws IllegalArgumentException if passed in invalid option or width by height
+ * is less than or equal to 0.
+ */
+ public Bitmap getScaledFrameAtTime(
+ long timeUs, @Option int option, int dstWidth, int dstHeight) {
+ if (option < OPTION_PREVIOUS_SYNC ||
+ option > OPTION_CLOSEST) {
+ throw new IllegalArgumentException("Unsupported option: " + option);
+ }
+ if (dstWidth <= 0) {
+ throw new IllegalArgumentException("Invalid width: " + dstWidth);
+ }
+ if (dstHeight <= 0) {
+ throw new IllegalArgumentException("Invalid height: " + dstHeight);
+ }
+
+ return _getFrameAtTime(timeUs, option, dstWidth, dstHeight);
+ }
+
+ /**
+ * Call this method after setDataSource(). This method finds a
+ * representative frame close to the given time position if possible,
+ * and returns it as a bitmap. This is useful for generating a thumbnail
+ * for an input data source. Call this method if one does not care
+ * how the frame is found as long as it is close to the given time;
+ * otherwise, please call {@link #getFrameAtTime(long, int)}.
+ *
+ * @param timeUs The time position where the frame will be retrieved.
+ * When retrieving the frame at the given time position, there is no
+ * guarentee that the data source has a frame located at the position.
+ * When this happens, a frame nearby will be returned. If timeUs is
+ * negative, time position and option will ignored, and any frame
+ * that the implementation considers as representative may be returned.
+ *
+ * @return A Bitmap of size dst_widthxdst_height containing a representative
+ * video frame, which can be null, if such a frame cannot be retrieved.
+ *
+ * @see #getFrameAtTime(long, int)
+ */
+ public Bitmap getFrameAtTime(long timeUs) {
+ return getFrameAtTime(timeUs, OPTION_CLOSEST_SYNC);
+ }
+
+ /**
+ * Call this method after setDataSource(). This method finds a
+ * representative frame at any time position if possible,
+ * and returns it as a bitmap. This is useful for generating a thumbnail
+ * for an input data source. Call this method if one does not
+ * care about where the frame is located; otherwise, please call
+ * {@link #getFrameAtTime(long)} or {@link #getFrameAtTime(long, int)}
+ *
+ * @return A Bitmap containing a representative video frame, which
+ * can be null, if such a frame cannot be retrieved.
+ *
+ * @see #getFrameAtTime(long)
+ * @see #getFrameAtTime(long, int)
+ */
+ public Bitmap getFrameAtTime() {
+ return _getFrameAtTime(-1, OPTION_CLOSEST_SYNC, -1 /*dst_width*/, -1 /*dst_height*/);
+ }
+
+ private native Bitmap _getFrameAtTime(long timeUs, int option, int width, int height);
+
+ /**
+ * Call this method after setDataSource(). This method finds the optional
+ * graphic or album/cover art associated associated with the data source. If
+ * there are more than one pictures, (any) one of them is returned.
+ *
+ * @return null if no such graphic is found.
+ */
+ public byte[] getEmbeddedPicture() {
+ return getEmbeddedPicture(EMBEDDED_PICTURE_TYPE_ANY);
+ }
+
+ private native byte[] getEmbeddedPicture(int pictureType);
+
+ /**
+ * Call it when one is done with the object. This method releases the memory
+ * allocated internally.
+ */
+ public native void release();
+ private native void native_setup();
+ private static native void native_init();
+
+ private native final void native_finalize();
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ native_finalize();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Option used in method {@link #getFrameAtTime(long, int)} to get a
+ * frame at a specified location.
+ *
+ * @see #getFrameAtTime(long, int)
+ */
+ /* Do not change these option values without updating their counterparts
+ * in include/media/stagefright/MediaSource.h!
+ */
+ /**
+ * This option is used with {@link #getFrameAtTime(long, int)} to retrieve
+ * a sync (or key) frame associated with a data source that is located
+ * right before or at the given time.
+ *
+ * @see #getFrameAtTime(long, int)
+ */
+ public static final int OPTION_PREVIOUS_SYNC = 0x00;
+ /**
+ * This option is used with {@link #getFrameAtTime(long, int)} to retrieve
+ * a sync (or key) frame associated with a data source that is located
+ * right after or at the given time.
+ *
+ * @see #getFrameAtTime(long, int)
+ */
+ public static final int OPTION_NEXT_SYNC = 0x01;
+ /**
+ * This option is used with {@link #getFrameAtTime(long, int)} to retrieve
+ * a sync (or key) frame associated with a data source that is located
+ * closest to (in time) or at the given time.
+ *
+ * @see #getFrameAtTime(long, int)
+ */
+ public static final int OPTION_CLOSEST_SYNC = 0x02;
+ /**
+ * This option is used with {@link #getFrameAtTime(long, int)} to retrieve
+ * a frame (not necessarily a key frame) associated with a data source that
+ * is located closest to or at the given time.
+ *
+ * @see #getFrameAtTime(long, int)
+ */
+ public static final int OPTION_CLOSEST = 0x03;
+
+ /** @hide */
+ @IntDef(flag = true, prefix = { "OPTION_" }, value = {
+ OPTION_PREVIOUS_SYNC,
+ OPTION_NEXT_SYNC,
+ OPTION_CLOSEST_SYNC,
+ OPTION_CLOSEST,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Option {}
+
+ /*
+ * Do not change these metadata key values without updating their
+ * counterparts in include/media/mediametadataretriever.h!
+ */
+ /**
+ * The metadata key to retrieve the numeric string describing the
+ * order of the audio data source on its original recording.
+ */
+ public static final int METADATA_KEY_CD_TRACK_NUMBER = 0;
+ /**
+ * The metadata key to retrieve the information about the album title
+ * of the data source.
+ */
+ public static final int METADATA_KEY_ALBUM = 1;
+ /**
+ * The metadata key to retrieve the information about the artist of
+ * the data source.
+ */
+ public static final int METADATA_KEY_ARTIST = 2;
+ /**
+ * The metadata key to retrieve the information about the author of
+ * the data source.
+ */
+ public static final int METADATA_KEY_AUTHOR = 3;
+ /**
+ * The metadata key to retrieve the information about the composer of
+ * the data source.
+ */
+ public static final int METADATA_KEY_COMPOSER = 4;
+ /**
+ * The metadata key to retrieve the date when the data source was created
+ * or modified.
+ */
+ public static final int METADATA_KEY_DATE = 5;
+ /**
+ * The metadata key to retrieve the content type or genre of the data
+ * source.
+ */
+ public static final int METADATA_KEY_GENRE = 6;
+ /**
+ * The metadata key to retrieve the data source title.
+ */
+ public static final int METADATA_KEY_TITLE = 7;
+ /**
+ * The metadata key to retrieve the year when the data source was created
+ * or modified.
+ */
+ public static final int METADATA_KEY_YEAR = 8;
+ /**
+ * The metadata key to retrieve the playback duration of the data source.
+ */
+ public static final int METADATA_KEY_DURATION = 9;
+ /**
+ * The metadata key to retrieve the number of tracks, such as audio, video,
+ * text, in the data source, such as a mp4 or 3gpp file.
+ */
+ public static final int METADATA_KEY_NUM_TRACKS = 10;
+ /**
+ * The metadata key to retrieve the information of the writer (such as
+ * lyricist) of the data source.
+ */
+ public static final int METADATA_KEY_WRITER = 11;
+ /**
+ * The metadata key to retrieve the mime type of the data source. Some
+ * example mime types include: "video/mp4", "audio/mp4", "audio/amr-wb",
+ * etc.
+ */
+ public static final int METADATA_KEY_MIMETYPE = 12;
+ /**
+ * The metadata key to retrieve the information about the performers or
+ * artist associated with the data source.
+ */
+ public static final int METADATA_KEY_ALBUMARTIST = 13;
+ /**
+ * The metadata key to retrieve the numberic string that describes which
+ * part of a set the audio data source comes from.
+ */
+ public static final int METADATA_KEY_DISC_NUMBER = 14;
+ /**
+ * The metadata key to retrieve the music album compilation status.
+ */
+ public static final int METADATA_KEY_COMPILATION = 15;
+ /**
+ * If this key exists the media contains audio content.
+ */
+ public static final int METADATA_KEY_HAS_AUDIO = 16;
+ /**
+ * If this key exists the media contains video content.
+ */
+ public static final int METADATA_KEY_HAS_VIDEO = 17;
+ /**
+ * If the media contains video, this key retrieves its width.
+ */
+ public static final int METADATA_KEY_VIDEO_WIDTH = 18;
+ /**
+ * If the media contains video, this key retrieves its height.
+ */
+ public static final int METADATA_KEY_VIDEO_HEIGHT = 19;
+ /**
+ * This key retrieves the average bitrate (in bits/sec), if available.
+ */
+ public static final int METADATA_KEY_BITRATE = 20;
+ /**
+ * This key retrieves the language code of text tracks, if available.
+ * If multiple text tracks present, the return value will look like:
+ * "eng:chi"
+ * @hide
+ */
+ public static final int METADATA_KEY_TIMED_TEXT_LANGUAGES = 21;
+ /**
+ * If this key exists the media is drm-protected.
+ * @hide
+ */
+ public static final int METADATA_KEY_IS_DRM = 22;
+ /**
+ * This key retrieves the location information, if available.
+ * The location should be specified according to ISO-6709 standard, under
+ * a mp4/3gp box "@xyz". Location with longitude of -90 degrees and latitude
+ * of 180 degrees will be retrieved as "-90.0000+180.0000", for instance.
+ */
+ public static final int METADATA_KEY_LOCATION = 23;
+ /**
+ * This key retrieves the video rotation angle in degrees, if available.
+ * The video rotation angle may be 0, 90, 180, or 270 degrees.
+ */
+ public static final int METADATA_KEY_VIDEO_ROTATION = 24;
+ /**
+ * This key retrieves the original capture framerate, if it's
+ * available. The capture framerate will be a floating point
+ * number.
+ */
+ public static final int METADATA_KEY_CAPTURE_FRAMERATE = 25;
+ // Add more here...
+}
diff --git a/android/media/MediaMuxer.java b/android/media/MediaMuxer.java
new file mode 100644
index 00000000..832b2974
--- /dev/null
+++ b/android/media/MediaMuxer.java
@@ -0,0 +1,696 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import dalvik.system.CloseGuard;
+
+import java.io.FileDescriptor;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.util.Map;
+
+/**
+ * MediaMuxer facilitates muxing elementary streams. Currently MediaMuxer supports MP4, Webm
+ * and 3GP file as the output. It also supports muxing B-frames in MP4 since Android Nougat.
+ * <p>
+ * It is generally used like this:
+ *
+ * <pre>
+ * MediaMuxer muxer = new MediaMuxer("temp.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4);
+ * // More often, the MediaFormat will be retrieved from MediaCodec.getOutputFormat()
+ * // or MediaExtractor.getTrackFormat().
+ * MediaFormat audioFormat = new MediaFormat(...);
+ * MediaFormat videoFormat = new MediaFormat(...);
+ * int audioTrackIndex = muxer.addTrack(audioFormat);
+ * int videoTrackIndex = muxer.addTrack(videoFormat);
+ * ByteBuffer inputBuffer = ByteBuffer.allocate(bufferSize);
+ * boolean finished = false;
+ * BufferInfo bufferInfo = new BufferInfo();
+ *
+ * muxer.start();
+ * while(!finished) {
+ * // getInputBuffer() will fill the inputBuffer with one frame of encoded
+ * // sample from either MediaCodec or MediaExtractor, set isAudioSample to
+ * // true when the sample is audio data, set up all the fields of bufferInfo,
+ * // and return true if there are no more samples.
+ * finished = getInputBuffer(inputBuffer, isAudioSample, bufferInfo);
+ * if (!finished) {
+ * int currentTrackIndex = isAudioSample ? audioTrackIndex : videoTrackIndex;
+ * muxer.writeSampleData(currentTrackIndex, inputBuffer, bufferInfo);
+ * }
+ * };
+ * muxer.stop();
+ * muxer.release();
+ * </pre>
+ *
+
+ <h4>Metadata Track</h4>
+ <p>
+ Per-frame metadata is useful in carrying extra information that correlated with video or audio to
+ facilitate offline processing, e.g. gyro signals from the sensor could help video stabilization when
+ doing offline processing. Metaadata track is only supported in MP4 container. When adding a new
+ metadata track, track's mime format must start with prefix "application/", e.g. "applicaton/gyro".
+ Metadata's format/layout will be defined by the application. Writing metadata is nearly the same as
+ writing video/audio data except that the data will not be from mediacodec. Application just needs
+ to pass the bytebuffer that contains the metadata and also the associated timestamp to the
+ {@link #writeSampleData} api. The timestamp must be in the same time base as video and audio. The
+ generated MP4 file uses TextMetaDataSampleEntry defined in section 12.3.3.2 of the ISOBMFF to signal
+ the metadata's mime format. When using{@link android.media.MediaExtractor} to extract the file with
+ metadata track, the mime format of the metadata will be extracted into {@link android.media.MediaFormat}.
+
+ <pre class=prettyprint>
+ MediaMuxer muxer = new MediaMuxer("temp.mp4", OutputFormat.MUXER_OUTPUT_MPEG_4);
+ // SetUp Video/Audio Tracks.
+ MediaFormat audioFormat = new MediaFormat(...);
+ MediaFormat videoFormat = new MediaFormat(...);
+ int audioTrackIndex = muxer.addTrack(audioFormat);
+ int videoTrackIndex = muxer.addTrack(videoFormat);
+
+ // Setup Metadata Track
+ MediaFormat metadataFormat = new MediaFormat(...);
+ metadataFormat.setString(KEY_MIME, "application/gyro");
+ int metadataTrackIndex = muxer.addTrack(metadataFormat);
+
+ muxer.start();
+ while(..) {
+ // Allocate bytebuffer and write gyro data(x,y,z) into it.
+ ByteBuffer metaData = ByteBuffer.allocate(bufferSize);
+ metaData.putFloat(x);
+ metaData.putFloat(y);
+ metaData.putFloat(z);
+ BufferInfo metaInfo = new BufferInfo();
+ // Associate this metadata with the video frame by setting
+ // the same timestamp as the video frame.
+ metaInfo.presentationTimeUs = currentVideoTrackTimeUs;
+ metaInfo.offset = 0;
+ metaInfo.flags = 0;
+ metaInfo.size = bufferSize;
+ muxer.writeSampleData(metadataTrackIndex, metaData, metaInfo);
+ };
+ muxer.stop();
+ muxer.release();
+ }</pre>
+
+ <h2 id=History><a name="History"></a>Features and API History</h2>
+ <p>
+ The following table summarizes the feature support in different API version and containers.
+ For API version numbers, see {@link android.os.Build.VERSION_CODES}.
+
+ <style>
+ .api > tr > th, .api > tr > td { text-align: center; padding: 4px 4px; }
+ .api > tr > th { vertical-align: bottom; }
+ .api > tr > td { vertical-align: middle; }
+ .sml > tr > th, .sml > tr > td { text-align: center; padding: 2px 4px; }
+ .fn { text-align: center; }
+ </style>
+
+ <table align="right" style="width: 0%">
+ <thead>
+ <tbody class=api>
+ <tr><th>Symbol</th>
+ <th>Meaning</th></tr>
+ </tbody>
+ </thead>
+ <tbody class=sml>
+ <tr><td>&#9679;</td><td>Supported</td></tr>
+ <tr><td>&#9675;</td><td>Not supported</td></tr>
+ <tr><td>&#9639;</td><td>Supported in MP4/WebM/3GP</td></tr>
+ <tr><td>&#8277;</td><td>Only Supported in MP4</td></tr>
+ </tbody>
+ </table>
+<table align="center" style="width: 100%;">
+ <thead class=api>
+ <tr>
+ <th rowspan=2>Feature</th>
+ <th colspan="24">SDK Version</th>
+ </tr>
+ <tr>
+ <th>18</th>
+ <th>19</th>
+ <th>20</th>
+ <th>21</th>
+ <th>22</th>
+ <th>23</th>
+ <th>24</th>
+ <th>25</th>
+ <th>26+</th>
+ </tr>
+ </thead>
+ <tbody class=api>
+ <tr>
+ <td align="center">MP4 container</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <td align="center">WebM container</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ <td>&#9679;</td>
+ </tr>
+ <td align="center">3GP container</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9679;</td>
+ </tr>
+ <td align="center">Muxing B-Frames(bi-directional predicted frames)</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#8277;</td>
+ <td>&#8277;</td>
+ <td>&#8277;</td>
+ </tr>
+ </tr>
+ <td align="center">Muxing Single Video/Audio Track</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ <td>&#9639;</td>
+ </tr>
+ </tr>
+ <td align="center">Muxing Multiple Video/Audio Tracks</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#8277;</td>
+ </tr>
+ </tr>
+ <td align="center">Muxing Metadata Tracks</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#9675;</td>
+ <td>&#8277;</td>
+ </tr>
+ </tbody>
+ </table>
+ */
+
+final public class MediaMuxer {
+
+ static {
+ System.loadLibrary("media_jni");
+ }
+
+ /**
+ * Defines the output format. These constants are used with constructor.
+ */
+ public static final class OutputFormat {
+ /* Do not change these values without updating their counterparts
+ * in include/media/stagefright/MediaMuxer.h!
+ */
+ private OutputFormat() {}
+ /** MPEG4 media file format*/
+ public static final int MUXER_OUTPUT_MPEG_4 = 0;
+ /** WEBM media file format*/
+ public static final int MUXER_OUTPUT_WEBM = 1;
+ /** 3GPP media file format*/
+ public static final int MUXER_OUTPUT_3GPP = 2;
+ };
+
+ /** @hide */
+ @IntDef({
+ OutputFormat.MUXER_OUTPUT_MPEG_4,
+ OutputFormat.MUXER_OUTPUT_WEBM,
+ OutputFormat.MUXER_OUTPUT_3GPP,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Format {}
+
+ // All the native functions are listed here.
+ private static native long nativeSetup(@NonNull FileDescriptor fd, int format)
+ throws IllegalArgumentException, IOException;
+ private static native void nativeRelease(long nativeObject);
+ private static native void nativeStart(long nativeObject);
+ private static native void nativeStop(long nativeObject);
+ private static native int nativeAddTrack(
+ long nativeObject, @NonNull String[] keys, @NonNull Object[] values);
+ private static native void nativeSetOrientationHint(
+ long nativeObject, int degrees);
+ private static native void nativeSetLocation(long nativeObject, int latitude, int longitude);
+ private static native void nativeWriteSampleData(
+ long nativeObject, int trackIndex, @NonNull ByteBuffer byteBuf,
+ int offset, int size, long presentationTimeUs, @MediaCodec.BufferFlag int flags);
+
+ // Muxer internal states.
+ private static final int MUXER_STATE_UNINITIALIZED = -1;
+ private static final int MUXER_STATE_INITIALIZED = 0;
+ private static final int MUXER_STATE_STARTED = 1;
+ private static final int MUXER_STATE_STOPPED = 2;
+
+ private int mState = MUXER_STATE_UNINITIALIZED;
+
+ private final CloseGuard mCloseGuard = CloseGuard.get();
+ private int mLastTrackIndex = -1;
+
+ private long mNativeObject;
+
+ /**
+ * Constructor.
+ * Creates a media muxer that writes to the specified path.
+ * @param path The path of the output media file.
+ * @param format The format of the output media file.
+ * @see android.media.MediaMuxer.OutputFormat
+ * @throws IllegalArgumentException if path is invalid or format is not supported.
+ * @throws IOException if failed to open the file for write.
+ */
+ public MediaMuxer(@NonNull String path, @Format int format) throws IOException {
+ if (path == null) {
+ throw new IllegalArgumentException("path must not be null");
+ }
+ // Use RandomAccessFile so we can open the file with RW access;
+ // RW access allows the native writer to memory map the output file.
+ RandomAccessFile file = null;
+ try {
+ file = new RandomAccessFile(path, "rws");
+ FileDescriptor fd = file.getFD();
+ setUpMediaMuxer(fd, format);
+ } finally {
+ if (file != null) {
+ file.close();
+ }
+ }
+ }
+
+ /**
+ * Constructor.
+ * Creates a media muxer that writes to the specified FileDescriptor. File descriptor
+ * must be seekable and writable. Application should not use the file referenced
+ * by this file descriptor until {@link #stop}. It is the application's responsibility
+ * to close the file descriptor. It is safe to do so as soon as this call returns.
+ * @param fd The FileDescriptor of the output media file.
+ * @param format The format of the output media file.
+ * @see android.media.MediaMuxer.OutputFormat
+ * @throws IllegalArgumentException if fd is invalid or format is not supported.
+ * @throws IOException if failed to open the file for write.
+ */
+ public MediaMuxer(@NonNull FileDescriptor fd, @Format int format) throws IOException {
+ setUpMediaMuxer(fd, format);
+ }
+
+ private void setUpMediaMuxer(@NonNull FileDescriptor fd, @Format int format) throws IOException {
+ if (format != OutputFormat.MUXER_OUTPUT_MPEG_4 && format != OutputFormat.MUXER_OUTPUT_WEBM
+ && format != OutputFormat.MUXER_OUTPUT_3GPP) {
+ throw new IllegalArgumentException("format: " + format + " is invalid");
+ }
+ mNativeObject = nativeSetup(fd, format);
+ mState = MUXER_STATE_INITIALIZED;
+ mCloseGuard.open("release");
+ }
+
+ /**
+ * Sets the orientation hint for output video playback.
+ * <p>This method should be called before {@link #start}. Calling this
+ * method will not rotate the video frame when muxer is generating the file,
+ * but add a composition matrix containing the rotation angle in the output
+ * video if the output format is
+ * {@link OutputFormat#MUXER_OUTPUT_MPEG_4} so that a video player can
+ * choose the proper orientation for playback. Note that some video players
+ * may choose to ignore the composition matrix in a video during playback.
+ * By default, the rotation degree is 0.</p>
+ * @param degrees the angle to be rotated clockwise in degrees.
+ * The supported angles are 0, 90, 180, and 270 degrees.
+ * @throws IllegalArgumentException if degree is not supported.
+ * @throws IllegalStateException If this method is called after {@link #start}.
+ */
+ public void setOrientationHint(int degrees) {
+ if (degrees != 0 && degrees != 90 && degrees != 180 && degrees != 270) {
+ throw new IllegalArgumentException("Unsupported angle: " + degrees);
+ }
+ if (mState == MUXER_STATE_INITIALIZED) {
+ nativeSetOrientationHint(mNativeObject, degrees);
+ } else {
+ throw new IllegalStateException("Can't set rotation degrees due" +
+ " to wrong state.");
+ }
+ }
+
+ /**
+ * Set and store the geodata (latitude and longitude) in the output file.
+ * This method should be called before {@link #start}. The geodata is stored
+ * in udta box if the output format is
+ * {@link OutputFormat#MUXER_OUTPUT_MPEG_4}, and is ignored for other output
+ * formats. The geodata is stored according to ISO-6709 standard.
+ *
+ * @param latitude Latitude in degrees. Its value must be in the range [-90,
+ * 90].
+ * @param longitude Longitude in degrees. Its value must be in the range
+ * [-180, 180].
+ * @throws IllegalArgumentException If the given latitude or longitude is out
+ * of range.
+ * @throws IllegalStateException If this method is called after {@link #start}.
+ */
+ public void setLocation(float latitude, float longitude) {
+ int latitudex10000 = (int) (latitude * 10000 + 0.5);
+ int longitudex10000 = (int) (longitude * 10000 + 0.5);
+
+ if (latitudex10000 > 900000 || latitudex10000 < -900000) {
+ String msg = "Latitude: " + latitude + " out of range.";
+ throw new IllegalArgumentException(msg);
+ }
+ if (longitudex10000 > 1800000 || longitudex10000 < -1800000) {
+ String msg = "Longitude: " + longitude + " out of range";
+ throw new IllegalArgumentException(msg);
+ }
+
+ if (mState == MUXER_STATE_INITIALIZED && mNativeObject != 0) {
+ nativeSetLocation(mNativeObject, latitudex10000, longitudex10000);
+ } else {
+ throw new IllegalStateException("Can't set location due to wrong state.");
+ }
+ }
+
+ /**
+ * Starts the muxer.
+ * <p>Make sure this is called after {@link #addTrack} and before
+ * {@link #writeSampleData}.</p>
+ * @throws IllegalStateException If this method is called after {@link #start}
+ * or Muxer is released
+ */
+ public void start() {
+ if (mNativeObject == 0) {
+ throw new IllegalStateException("Muxer has been released!");
+ }
+ if (mState == MUXER_STATE_INITIALIZED) {
+ nativeStart(mNativeObject);
+ mState = MUXER_STATE_STARTED;
+ } else {
+ throw new IllegalStateException("Can't start due to wrong state.");
+ }
+ }
+
+ /**
+ * Stops the muxer.
+ * <p>Once the muxer stops, it can not be restarted.</p>
+ * @throws IllegalStateException if muxer is in the wrong state.
+ */
+ public void stop() {
+ if (mState == MUXER_STATE_STARTED) {
+ nativeStop(mNativeObject);
+ mState = MUXER_STATE_STOPPED;
+ } else {
+ throw new IllegalStateException("Can't stop due to wrong state.");
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mCloseGuard != null) {
+ mCloseGuard.warnIfOpen();
+ }
+ if (mNativeObject != 0) {
+ nativeRelease(mNativeObject);
+ mNativeObject = 0;
+ }
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Adds a track with the specified format.
+ * <p>
+ * The following table summarizes support for specific format keys across android releases.
+ * Keys marked with '+:' are required.
+ *
+ * <table style="width: 0%">
+ * <thead>
+ * <tr>
+ * <th rowspan=2>OS Version(s)</th>
+ * <td colspan=3>{@code MediaFormat} keys used for</th>
+ * </tr><tr>
+ * <th>All Tracks</th>
+ * <th>Audio Tracks</th>
+ * <th>Video Tracks</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2}</td>
+ * <td rowspan=7>+: {@link MediaFormat#KEY_MIME}</td>
+ * <td rowspan=3>+: {@link MediaFormat#KEY_SAMPLE_RATE},<br>
+ * +: {@link MediaFormat#KEY_CHANNEL_COUNT},<br>
+ * +: <strong>codec-specific data<sup>AAC</sup></strong></td>
+ * <td rowspan=5>+: {@link MediaFormat#KEY_WIDTH},<br>
+ * +: {@link MediaFormat#KEY_HEIGHT},<br>
+ * no {@code KEY_ROTATION},
+ * use {@link #setOrientationHint setOrientationHint()}<sup>.mp4</sup>,<br>
+ * +: <strong>codec-specific data<sup>AVC, MPEG4</sup></strong></td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#KITKAT}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#KITKAT_WATCH}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td>
+ * <td rowspan=4>as above, plus<br>
+ * +: <strong>codec-specific data<sup>Vorbis & .webm</sup></strong></td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#M}</td>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#KEY_BIT_RATE}<sup>AAC</sup></td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#N}</td>
+ * <td>as above, plus<br>
+ * <!-- {link MediaFormat#KEY_MAX_BIT_RATE}<sup>AAC, MPEG4</sup>,<br> -->
+ * {@link MediaFormat#KEY_BIT_RATE}<sup>MPEG4</sup>,<br>
+ * {@link MediaFormat#KEY_HDR_STATIC_INFO}<sup>#, .webm</sup>,<br>
+ * {@link MediaFormat#KEY_COLOR_STANDARD}<sup>#</sup>,<br>
+ * {@link MediaFormat#KEY_COLOR_TRANSFER}<sup>#</sup>,<br>
+ * {@link MediaFormat#KEY_COLOR_RANGE}<sup>#</sup>,<br>
+ * +: <strong>codec-specific data<sup>HEVC</sup></strong>,<br>
+ * codec-specific data<sup>VP9</sup></td>
+ * </tr>
+ * <tr>
+ * <td colspan=4>
+ * <p class=note><strong>Notes:</strong><br>
+ * #: storing into container metadata.<br>
+ * .mp4, .webm&hellip;: for listed containers<br>
+ * MPEG4, AAC&hellip;: for listed codecs
+ * </td>
+ * </tr><tr>
+ * <td colspan=4>
+ * <p class=note>Note that the codec-specific data for the track must be specified using
+ * this method. Furthermore, codec-specific data must not be passed/specified via the
+ * {@link #writeSampleData writeSampleData()} call.
+ * </td>
+ * </tr>
+ * </tbody>
+ * </table>
+ *
+ * <p>
+ * The following table summarizes codec support for containers across android releases:
+ *
+ * <table style="width: 0%">
+ * <thead>
+ * <tr>
+ * <th rowspan=2>OS Version(s)</th>
+ * <td colspan=3>Codec support</th>
+ * </tr><tr>
+ * <th>{@linkplain OutputFormat#MUXER_OUTPUT_MPEG_4 MP4}</th>
+ * <th>{@linkplain OutputFormat#MUXER_OUTPUT_WEBM WEBM}</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td>{@link android.os.Build.VERSION_CODES#JELLY_BEAN_MR2}</td>
+ * <td rowspan=6>{@link MediaFormat#MIMETYPE_AUDIO_AAC AAC},<br>
+ * {@link MediaFormat#MIMETYPE_AUDIO_AMR_NB NB-AMR},<br>
+ * {@link MediaFormat#MIMETYPE_AUDIO_AMR_WB WB-AMR},<br>
+ * {@link MediaFormat#MIMETYPE_VIDEO_H263 H.263},<br>
+ * {@link MediaFormat#MIMETYPE_VIDEO_MPEG4 MPEG-4},<br>
+ * {@link MediaFormat#MIMETYPE_VIDEO_AVC AVC} (H.264)</td>
+ * <td rowspan=3>Not supported</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#KITKAT}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#KITKAT_WATCH}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP}</td>
+ * <td rowspan=3>{@link MediaFormat#MIMETYPE_AUDIO_VORBIS Vorbis},<br>
+ * {@link MediaFormat#MIMETYPE_VIDEO_VP8 VP8}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#LOLLIPOP_MR1}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#M}</td>
+ * </tr><tr>
+ * <td>{@link android.os.Build.VERSION_CODES#N}</td>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#MIMETYPE_VIDEO_HEVC HEVC} (H.265)</td>
+ * <td>as above, plus<br>
+ * {@link MediaFormat#MIMETYPE_VIDEO_VP9 VP9}</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ *
+ * @param format The media format for the track. This must not be an empty
+ * MediaFormat.
+ * @return The track index for this newly added track, and it should be used
+ * in the {@link #writeSampleData}.
+ * @throws IllegalArgumentException if format is invalid.
+ * @throws IllegalStateException if muxer is in the wrong state.
+ */
+ public int addTrack(@NonNull MediaFormat format) {
+ if (format == null) {
+ throw new IllegalArgumentException("format must not be null.");
+ }
+ if (mState != MUXER_STATE_INITIALIZED) {
+ throw new IllegalStateException("Muxer is not initialized.");
+ }
+ if (mNativeObject == 0) {
+ throw new IllegalStateException("Muxer has been released!");
+ }
+ int trackIndex = -1;
+ // Convert the MediaFormat into key-value pairs and send to the native.
+ Map<String, Object> formatMap = format.getMap();
+
+ String[] keys = null;
+ Object[] values = null;
+ int mapSize = formatMap.size();
+ if (mapSize > 0) {
+ keys = new String[mapSize];
+ values = new Object[mapSize];
+ int i = 0;
+ for (Map.Entry<String, Object> entry : formatMap.entrySet()) {
+ keys[i] = entry.getKey();
+ values[i] = entry.getValue();
+ ++i;
+ }
+ trackIndex = nativeAddTrack(mNativeObject, keys, values);
+ } else {
+ throw new IllegalArgumentException("format must not be empty.");
+ }
+
+ // Track index number is expected to incremented as addTrack succeed.
+ // However, if format is invalid, it will get a negative trackIndex.
+ if (mLastTrackIndex >= trackIndex) {
+ throw new IllegalArgumentException("Invalid format.");
+ }
+ mLastTrackIndex = trackIndex;
+ return trackIndex;
+ }
+
+ /**
+ * Writes an encoded sample into the muxer.
+ * <p>The application needs to make sure that the samples are written into
+ * the right tracks. Also, it needs to make sure the samples for each track
+ * are written in chronological order (e.g. in the order they are provided
+ * by the encoder.)</p>
+ * @param byteBuf The encoded sample.
+ * @param trackIndex The track index for this sample.
+ * @param bufferInfo The buffer information related to this sample.
+ * @throws IllegalArgumentException if trackIndex, byteBuf or bufferInfo is invalid.
+ * @throws IllegalStateException if muxer is in wrong state.
+ * MediaMuxer uses the flags provided in {@link MediaCodec.BufferInfo},
+ * to signal sync frames.
+ */
+ public void writeSampleData(int trackIndex, @NonNull ByteBuffer byteBuf,
+ @NonNull BufferInfo bufferInfo) {
+ if (trackIndex < 0 || trackIndex > mLastTrackIndex) {
+ throw new IllegalArgumentException("trackIndex is invalid");
+ }
+
+ if (byteBuf == null) {
+ throw new IllegalArgumentException("byteBuffer must not be null");
+ }
+
+ if (bufferInfo == null) {
+ throw new IllegalArgumentException("bufferInfo must not be null");
+ }
+ if (bufferInfo.size < 0 || bufferInfo.offset < 0
+ || (bufferInfo.offset + bufferInfo.size) > byteBuf.capacity()
+ || bufferInfo.presentationTimeUs < 0) {
+ throw new IllegalArgumentException("bufferInfo must specify a" +
+ " valid buffer offset, size and presentation time");
+ }
+
+ if (mNativeObject == 0) {
+ throw new IllegalStateException("Muxer has been released!");
+ }
+
+ if (mState != MUXER_STATE_STARTED) {
+ throw new IllegalStateException("Can't write, muxer is not started");
+ }
+
+ nativeWriteSampleData(mNativeObject, trackIndex, byteBuf,
+ bufferInfo.offset, bufferInfo.size,
+ bufferInfo.presentationTimeUs, bufferInfo.flags);
+ }
+
+ /**
+ * Make sure you call this when you're done to free up any resources
+ * instead of relying on the garbage collector to do this for you at
+ * some point in the future.
+ */
+ public void release() {
+ if (mState == MUXER_STATE_STARTED) {
+ stop();
+ }
+ if (mNativeObject != 0) {
+ nativeRelease(mNativeObject);
+ mNativeObject = 0;
+ mCloseGuard.close();
+ }
+ mState = MUXER_STATE_UNINITIALIZED;
+ }
+}
diff --git a/android/media/MediaPlayer.java b/android/media/MediaPlayer.java
new file mode 100644
index 00000000..0d99473c
--- /dev/null
+++ b/android/media/MediaPlayer.java
@@ -0,0 +1,5642 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.app.ActivityThread;
+import android.content.ContentProvider;
+import android.content.ContentResolver;
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.PersistableBundle;
+import android.os.Process;
+import android.os.PowerManager;
+import android.os.SystemProperties;
+import android.provider.Settings;
+import android.system.ErrnoException;
+import android.system.OsConstants;
+import android.util.Log;
+import android.util.Pair;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.widget.VideoView;
+import android.graphics.SurfaceTexture;
+import android.media.AudioManager;
+import android.media.MediaDrm;
+import android.media.MediaFormat;
+import android.media.MediaTimeProvider;
+import android.media.PlaybackParams;
+import android.media.SubtitleController;
+import android.media.SubtitleController.Anchor;
+import android.media.SubtitleData;
+import android.media.SubtitleTrack.RenderingWidget;
+import android.media.SyncParams;
+
+import com.android.internal.util.Preconditions;
+
+import libcore.io.IoBridge;
+import libcore.io.Libcore;
+import libcore.io.Streams;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.Runnable;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.ref.WeakReference;
+import java.net.CookieHandler;
+import java.net.CookieManager;
+import java.net.HttpCookie;
+import java.net.HttpURLConnection;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.nio.ByteOrder;
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Scanner;
+import java.util.Set;
+import java.util.UUID;
+import java.util.Vector;
+
+
+/**
+ * MediaPlayer class can be used to control playback
+ * of audio/video files and streams. An example on how to use the methods in
+ * this class can be found in {@link android.widget.VideoView}.
+ *
+ * <p>Topics covered here are:
+ * <ol>
+ * <li><a href="#StateDiagram">State Diagram</a>
+ * <li><a href="#Valid_and_Invalid_States">Valid and Invalid States</a>
+ * <li><a href="#Permissions">Permissions</a>
+ * <li><a href="#Callbacks">Register informational and error callbacks</a>
+ * </ol>
+ *
+ * <div class="special reference">
+ * <h3>Developer Guides</h3>
+ * <p>For more information about how to use MediaPlayer, read the
+ * <a href="{@docRoot}guide/topics/media/mediaplayer.html">Media Playback</a> developer guide.</p>
+ * </div>
+ *
+ * <a name="StateDiagram"></a>
+ * <h3>State Diagram</h3>
+ *
+ * <p>Playback control of audio/video files and streams is managed as a state
+ * machine. The following diagram shows the life cycle and the states of a
+ * MediaPlayer object driven by the supported playback control operations.
+ * The ovals represent the states a MediaPlayer object may reside
+ * in. The arcs represent the playback control operations that drive the object
+ * state transition. There are two types of arcs. The arcs with a single arrow
+ * head represent synchronous method calls, while those with
+ * a double arrow head represent asynchronous method calls.</p>
+ *
+ * <p><img src="../../../images/mediaplayer_state_diagram.gif"
+ * alt="MediaPlayer State diagram"
+ * border="0" /></p>
+ *
+ * <p>From this state diagram, one can see that a MediaPlayer object has the
+ * following states:</p>
+ * <ul>
+ * <li>When a MediaPlayer object is just created using <code>new</code> or
+ * after {@link #reset()} is called, it is in the <em>Idle</em> state; and after
+ * {@link #release()} is called, it is in the <em>End</em> state. Between these
+ * two states is the life cycle of the MediaPlayer object.
+ * <ul>
+ * <li>There is a subtle but important difference between a newly constructed
+ * MediaPlayer object and the MediaPlayer object after {@link #reset()}
+ * is called. It is a programming error to invoke methods such
+ * as {@link #getCurrentPosition()},
+ * {@link #getDuration()}, {@link #getVideoHeight()},
+ * {@link #getVideoWidth()}, {@link #setAudioAttributes(AudioAttributes)},
+ * {@link #setLooping(boolean)},
+ * {@link #setVolume(float, float)}, {@link #pause()}, {@link #start()},
+ * {@link #stop()}, {@link #seekTo(long, int)}, {@link #prepare()} or
+ * {@link #prepareAsync()} in the <em>Idle</em> state for both cases. If any of these
+ * methods is called right after a MediaPlayer object is constructed,
+ * the user supplied callback method OnErrorListener.onError() won't be
+ * called by the internal player engine and the object state remains
+ * unchanged; but if these methods are called right after {@link #reset()},
+ * the user supplied callback method OnErrorListener.onError() will be
+ * invoked by the internal player engine and the object will be
+ * transfered to the <em>Error</em> state. </li>
+ * <li>It is also recommended that once
+ * a MediaPlayer object is no longer being used, call {@link #release()} immediately
+ * so that resources used by the internal player engine associated with the
+ * MediaPlayer object can be released immediately. Resource may include
+ * singleton resources such as hardware acceleration components and
+ * failure to call {@link #release()} may cause subsequent instances of
+ * MediaPlayer objects to fallback to software implementations or fail
+ * altogether. Once the MediaPlayer
+ * object is in the <em>End</em> state, it can no longer be used and
+ * there is no way to bring it back to any other state. </li>
+ * <li>Furthermore,
+ * the MediaPlayer objects created using <code>new</code> is in the
+ * <em>Idle</em> state, while those created with one
+ * of the overloaded convenient <code>create</code> methods are <em>NOT</em>
+ * in the <em>Idle</em> state. In fact, the objects are in the <em>Prepared</em>
+ * state if the creation using <code>create</code> method is successful.
+ * </li>
+ * </ul>
+ * </li>
+ * <li>In general, some playback control operation may fail due to various
+ * reasons, such as unsupported audio/video format, poorly interleaved
+ * audio/video, resolution too high, streaming timeout, and the like.
+ * Thus, error reporting and recovery is an important concern under
+ * these circumstances. Sometimes, due to programming errors, invoking a playback
+ * control operation in an invalid state may also occur. Under all these
+ * error conditions, the internal player engine invokes a user supplied
+ * OnErrorListener.onError() method if an OnErrorListener has been
+ * registered beforehand via
+ * {@link #setOnErrorListener(android.media.MediaPlayer.OnErrorListener)}.
+ * <ul>
+ * <li>It is important to note that once an error occurs, the
+ * MediaPlayer object enters the <em>Error</em> state (except as noted
+ * above), even if an error listener has not been registered by the application.</li>
+ * <li>In order to reuse a MediaPlayer object that is in the <em>
+ * Error</em> state and recover from the error,
+ * {@link #reset()} can be called to restore the object to its <em>Idle</em>
+ * state.</li>
+ * <li>It is good programming practice to have your application
+ * register a OnErrorListener to look out for error notifications from
+ * the internal player engine.</li>
+ * <li>IllegalStateException is
+ * thrown to prevent programming errors such as calling {@link #prepare()},
+ * {@link #prepareAsync()}, or one of the overloaded <code>setDataSource
+ * </code> methods in an invalid state. </li>
+ * </ul>
+ * </li>
+ * <li>Calling
+ * {@link #setDataSource(FileDescriptor)}, or
+ * {@link #setDataSource(String)}, or
+ * {@link #setDataSource(Context, Uri)}, or
+ * {@link #setDataSource(FileDescriptor, long, long)}, or
+ * {@link #setDataSource(MediaDataSource)} transfers a
+ * MediaPlayer object in the <em>Idle</em> state to the
+ * <em>Initialized</em> state.
+ * <ul>
+ * <li>An IllegalStateException is thrown if
+ * setDataSource() is called in any other state.</li>
+ * <li>It is good programming
+ * practice to always look out for <code>IllegalArgumentException</code>
+ * and <code>IOException</code> that may be thrown from the overloaded
+ * <code>setDataSource</code> methods.</li>
+ * </ul>
+ * </li>
+ * <li>A MediaPlayer object must first enter the <em>Prepared</em> state
+ * before playback can be started.
+ * <ul>
+ * <li>There are two ways (synchronous vs.
+ * asynchronous) that the <em>Prepared</em> state can be reached:
+ * either a call to {@link #prepare()} (synchronous) which
+ * transfers the object to the <em>Prepared</em> state once the method call
+ * returns, or a call to {@link #prepareAsync()} (asynchronous) which
+ * first transfers the object to the <em>Preparing</em> state after the
+ * call returns (which occurs almost right way) while the internal
+ * player engine continues working on the rest of preparation work
+ * until the preparation work completes. When the preparation completes or when {@link #prepare()} call returns,
+ * the internal player engine then calls a user supplied callback method,
+ * onPrepared() of the OnPreparedListener interface, if an
+ * OnPreparedListener is registered beforehand via {@link
+ * #setOnPreparedListener(android.media.MediaPlayer.OnPreparedListener)}.</li>
+ * <li>It is important to note that
+ * the <em>Preparing</em> state is a transient state, and the behavior
+ * of calling any method with side effect while a MediaPlayer object is
+ * in the <em>Preparing</em> state is undefined.</li>
+ * <li>An IllegalStateException is
+ * thrown if {@link #prepare()} or {@link #prepareAsync()} is called in
+ * any other state.</li>
+ * <li>While in the <em>Prepared</em> state, properties
+ * such as audio/sound volume, screenOnWhilePlaying, looping can be
+ * adjusted by invoking the corresponding set methods.</li>
+ * </ul>
+ * </li>
+ * <li>To start the playback, {@link #start()} must be called. After
+ * {@link #start()} returns successfully, the MediaPlayer object is in the
+ * <em>Started</em> state. {@link #isPlaying()} can be called to test
+ * whether the MediaPlayer object is in the <em>Started</em> state.
+ * <ul>
+ * <li>While in the <em>Started</em> state, the internal player engine calls
+ * a user supplied OnBufferingUpdateListener.onBufferingUpdate() callback
+ * method if a OnBufferingUpdateListener has been registered beforehand
+ * via {@link #setOnBufferingUpdateListener(OnBufferingUpdateListener)}.
+ * This callback allows applications to keep track of the buffering status
+ * while streaming audio/video.</li>
+ * <li>Calling {@link #start()} has not effect
+ * on a MediaPlayer object that is already in the <em>Started</em> state.</li>
+ * </ul>
+ * </li>
+ * <li>Playback can be paused and stopped, and the current playback position
+ * can be adjusted. Playback can be paused via {@link #pause()}. When the call to
+ * {@link #pause()} returns, the MediaPlayer object enters the
+ * <em>Paused</em> state. Note that the transition from the <em>Started</em>
+ * state to the <em>Paused</em> state and vice versa happens
+ * asynchronously in the player engine. It may take some time before
+ * the state is updated in calls to {@link #isPlaying()}, and it can be
+ * a number of seconds in the case of streamed content.
+ * <ul>
+ * <li>Calling {@link #start()} to resume playback for a paused
+ * MediaPlayer object, and the resumed playback
+ * position is the same as where it was paused. When the call to
+ * {@link #start()} returns, the paused MediaPlayer object goes back to
+ * the <em>Started</em> state.</li>
+ * <li>Calling {@link #pause()} has no effect on
+ * a MediaPlayer object that is already in the <em>Paused</em> state.</li>
+ * </ul>
+ * </li>
+ * <li>Calling {@link #stop()} stops playback and causes a
+ * MediaPlayer in the <em>Started</em>, <em>Paused</em>, <em>Prepared
+ * </em> or <em>PlaybackCompleted</em> state to enter the
+ * <em>Stopped</em> state.
+ * <ul>
+ * <li>Once in the <em>Stopped</em> state, playback cannot be started
+ * until {@link #prepare()} or {@link #prepareAsync()} are called to set
+ * the MediaPlayer object to the <em>Prepared</em> state again.</li>
+ * <li>Calling {@link #stop()} has no effect on a MediaPlayer
+ * object that is already in the <em>Stopped</em> state.</li>
+ * </ul>
+ * </li>
+ * <li>The playback position can be adjusted with a call to
+ * {@link #seekTo(long, int)}.
+ * <ul>
+ * <li>Although the asynchronuous {@link #seekTo(long, int)}
+ * call returns right away, the actual seek operation may take a while to
+ * finish, especially for audio/video being streamed. When the actual
+ * seek operation completes, the internal player engine calls a user
+ * supplied OnSeekComplete.onSeekComplete() if an OnSeekCompleteListener
+ * has been registered beforehand via
+ * {@link #setOnSeekCompleteListener(OnSeekCompleteListener)}.</li>
+ * <li>Please
+ * note that {@link #seekTo(long, int)} can also be called in the other states,
+ * such as <em>Prepared</em>, <em>Paused</em> and <em>PlaybackCompleted
+ * </em> state. When {@link #seekTo(long, int)} is called in those states,
+ * one video frame will be displayed if the stream has video and the requested
+ * position is valid.
+ * </li>
+ * <li>Furthermore, the actual current playback position
+ * can be retrieved with a call to {@link #getCurrentPosition()}, which
+ * is helpful for applications such as a Music player that need to keep
+ * track of the playback progress.</li>
+ * </ul>
+ * </li>
+ * <li>When the playback reaches the end of stream, the playback completes.
+ * <ul>
+ * <li>If the looping mode was being set to <var>true</var>with
+ * {@link #setLooping(boolean)}, the MediaPlayer object shall remain in
+ * the <em>Started</em> state.</li>
+ * <li>If the looping mode was set to <var>false
+ * </var>, the player engine calls a user supplied callback method,
+ * OnCompletion.onCompletion(), if a OnCompletionListener is registered
+ * beforehand via {@link #setOnCompletionListener(OnCompletionListener)}.
+ * The invoke of the callback signals that the object is now in the <em>
+ * PlaybackCompleted</em> state.</li>
+ * <li>While in the <em>PlaybackCompleted</em>
+ * state, calling {@link #start()} can restart the playback from the
+ * beginning of the audio/video source.</li>
+ * </ul>
+ *
+ *
+ * <a name="Valid_and_Invalid_States"></a>
+ * <h3>Valid and invalid states</h3>
+ *
+ * <table border="0" cellspacing="0" cellpadding="0">
+ * <tr><td>Method Name </p></td>
+ * <td>Valid Sates </p></td>
+ * <td>Invalid States </p></td>
+ * <td>Comments </p></td></tr>
+ * <tr><td>attachAuxEffect </p></td>
+ * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td>
+ * <td>{Idle, Error} </p></td>
+ * <td>This method must be called after setDataSource.
+ * Calling it does not change the object state. </p></td></tr>
+ * <tr><td>getAudioSessionId </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>getCurrentPosition </p></td>
+ * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
+ * PlaybackCompleted} </p></td>
+ * <td>{Error}</p></td>
+ * <td>Successful invoke of this method in a valid state does not change the
+ * state. Calling this method in an invalid state transfers the object
+ * to the <em>Error</em> state. </p></td></tr>
+ * <tr><td>getDuration </p></td>
+ * <td>{Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td>
+ * <td>{Idle, Initialized, Error} </p></td>
+ * <td>Successful invoke of this method in a valid state does not change the
+ * state. Calling this method in an invalid state transfers the object
+ * to the <em>Error</em> state. </p></td></tr>
+ * <tr><td>getVideoHeight </p></td>
+ * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
+ * PlaybackCompleted}</p></td>
+ * <td>{Error}</p></td>
+ * <td>Successful invoke of this method in a valid state does not change the
+ * state. Calling this method in an invalid state transfers the object
+ * to the <em>Error</em> state. </p></td></tr>
+ * <tr><td>getVideoWidth </p></td>
+ * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
+ * PlaybackCompleted}</p></td>
+ * <td>{Error}</p></td>
+ * <td>Successful invoke of this method in a valid state does not change
+ * the state. Calling this method in an invalid state transfers the
+ * object to the <em>Error</em> state. </p></td></tr>
+ * <tr><td>isPlaying </p></td>
+ * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
+ * PlaybackCompleted}</p></td>
+ * <td>{Error}</p></td>
+ * <td>Successful invoke of this method in a valid state does not change
+ * the state. Calling this method in an invalid state transfers the
+ * object to the <em>Error</em> state. </p></td></tr>
+ * <tr><td>pause </p></td>
+ * <td>{Started, Paused, PlaybackCompleted}</p></td>
+ * <td>{Idle, Initialized, Prepared, Stopped, Error}</p></td>
+ * <td>Successful invoke of this method in a valid state transfers the
+ * object to the <em>Paused</em> state. Calling this method in an
+ * invalid state transfers the object to the <em>Error</em> state.</p></td></tr>
+ * <tr><td>prepare </p></td>
+ * <td>{Initialized, Stopped} </p></td>
+ * <td>{Idle, Prepared, Started, Paused, PlaybackCompleted, Error} </p></td>
+ * <td>Successful invoke of this method in a valid state transfers the
+ * object to the <em>Prepared</em> state. Calling this method in an
+ * invalid state throws an IllegalStateException.</p></td></tr>
+ * <tr><td>prepareAsync </p></td>
+ * <td>{Initialized, Stopped} </p></td>
+ * <td>{Idle, Prepared, Started, Paused, PlaybackCompleted, Error} </p></td>
+ * <td>Successful invoke of this method in a valid state transfers the
+ * object to the <em>Preparing</em> state. Calling this method in an
+ * invalid state throws an IllegalStateException.</p></td></tr>
+ * <tr><td>release </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>After {@link #release()}, the object is no longer available. </p></td></tr>
+ * <tr><td>reset </p></td>
+ * <td>{Idle, Initialized, Prepared, Started, Paused, Stopped,
+ * PlaybackCompleted, Error}</p></td>
+ * <td>{}</p></td>
+ * <td>After {@link #reset()}, the object is like being just created.</p></td></tr>
+ * <tr><td>seekTo </p></td>
+ * <td>{Prepared, Started, Paused, PlaybackCompleted} </p></td>
+ * <td>{Idle, Initialized, Stopped, Error}</p></td>
+ * <td>Successful invoke of this method in a valid state does not change
+ * the state. Calling this method in an invalid state transfers the
+ * object to the <em>Error</em> state. </p></td></tr>
+ * <tr><td>setAudioAttributes </p></td>
+ * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
+ * PlaybackCompleted}</p></td>
+ * <td>{Error}</p></td>
+ * <td>Successful invoke of this method does not change the state. In order for the
+ * target audio attributes type to become effective, this method must be called before
+ * prepare() or prepareAsync().</p></td></tr>
+ * <tr><td>setAudioSessionId </p></td>
+ * <td>{Idle} </p></td>
+ * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted,
+ * Error} </p></td>
+ * <td>This method must be called in idle state as the audio session ID must be known before
+ * calling setDataSource. Calling it does not change the object state. </p></td></tr>
+ * <tr><td>setAudioStreamType (deprecated)</p></td>
+ * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
+ * PlaybackCompleted}</p></td>
+ * <td>{Error}</p></td>
+ * <td>Successful invoke of this method does not change the state. In order for the
+ * target audio stream type to become effective, this method must be called before
+ * prepare() or prepareAsync().</p></td></tr>
+ * <tr><td>setAuxEffectSendLevel </p></td>
+ * <td>any</p></td>
+ * <td>{} </p></td>
+ * <td>Calling this method does not change the object state. </p></td></tr>
+ * <tr><td>setDataSource </p></td>
+ * <td>{Idle} </p></td>
+ * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted,
+ * Error} </p></td>
+ * <td>Successful invoke of this method in a valid state transfers the
+ * object to the <em>Initialized</em> state. Calling this method in an
+ * invalid state throws an IllegalStateException.</p></td></tr>
+ * <tr><td>setDisplay </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setSurface </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setVideoScalingMode </p></td>
+ * <td>{Initialized, Prepared, Started, Paused, Stopped, PlaybackCompleted} </p></td>
+ * <td>{Idle, Error}</p></td>
+ * <td>Successful invoke of this method does not change the state.</p></td></tr>
+ * <tr><td>setLooping </p></td>
+ * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
+ * PlaybackCompleted}</p></td>
+ * <td>{Error}</p></td>
+ * <td>Successful invoke of this method in a valid state does not change
+ * the state. Calling this method in an
+ * invalid state transfers the object to the <em>Error</em> state.</p></td></tr>
+ * <tr><td>isLooping </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setOnBufferingUpdateListener </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setOnCompletionListener </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setOnErrorListener </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setOnPreparedListener </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setOnSeekCompleteListener </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setPlaybackParams</p></td>
+ * <td>{Initialized, Prepared, Started, Paused, PlaybackCompleted, Error}</p></td>
+ * <td>{Idle, Stopped} </p></td>
+ * <td>This method will change state in some cases, depending on when it's called.
+ * </p></td></tr>
+ * <tr><td>setScreenOnWhilePlaying</></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state. </p></td></tr>
+ * <tr><td>setVolume </p></td>
+ * <td>{Idle, Initialized, Stopped, Prepared, Started, Paused,
+ * PlaybackCompleted}</p></td>
+ * <td>{Error}</p></td>
+ * <td>Successful invoke of this method does not change the state.
+ * <tr><td>setWakeMode </p></td>
+ * <td>any </p></td>
+ * <td>{} </p></td>
+ * <td>This method can be called in any state and calling it does not change
+ * the object state.</p></td></tr>
+ * <tr><td>start </p></td>
+ * <td>{Prepared, Started, Paused, PlaybackCompleted}</p></td>
+ * <td>{Idle, Initialized, Stopped, Error}</p></td>
+ * <td>Successful invoke of this method in a valid state transfers the
+ * object to the <em>Started</em> state. Calling this method in an
+ * invalid state transfers the object to the <em>Error</em> state.</p></td></tr>
+ * <tr><td>stop </p></td>
+ * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
+ * <td>{Idle, Initialized, Error}</p></td>
+ * <td>Successful invoke of this method in a valid state transfers the
+ * object to the <em>Stopped</em> state. Calling this method in an
+ * invalid state transfers the object to the <em>Error</em> state.</p></td></tr>
+ * <tr><td>getTrackInfo </p></td>
+ * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
+ * <td>{Idle, Initialized, Error}</p></td>
+ * <td>Successful invoke of this method does not change the state.</p></td></tr>
+ * <tr><td>addTimedTextSource </p></td>
+ * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
+ * <td>{Idle, Initialized, Error}</p></td>
+ * <td>Successful invoke of this method does not change the state.</p></td></tr>
+ * <tr><td>selectTrack </p></td>
+ * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
+ * <td>{Idle, Initialized, Error}</p></td>
+ * <td>Successful invoke of this method does not change the state.</p></td></tr>
+ * <tr><td>deselectTrack </p></td>
+ * <td>{Prepared, Started, Stopped, Paused, PlaybackCompleted}</p></td>
+ * <td>{Idle, Initialized, Error}</p></td>
+ * <td>Successful invoke of this method does not change the state.</p></td></tr>
+ *
+ * </table>
+ *
+ * <a name="Permissions"></a>
+ * <h3>Permissions</h3>
+ * <p>One may need to declare a corresponding WAKE_LOCK permission {@link
+ * android.R.styleable#AndroidManifestUsesPermission &lt;uses-permission&gt;}
+ * element.
+ *
+ * <p>This class requires the {@link android.Manifest.permission#INTERNET} permission
+ * when used with network-based content.
+ *
+ * <a name="Callbacks"></a>
+ * <h3>Callbacks</h3>
+ * <p>Applications may want to register for informational and error
+ * events in order to be informed of some internal state update and
+ * possible runtime errors during playback or streaming. Registration for
+ * these events is done by properly setting the appropriate listeners (via calls
+ * to
+ * {@link #setOnPreparedListener(OnPreparedListener)}setOnPreparedListener,
+ * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)}setOnVideoSizeChangedListener,
+ * {@link #setOnSeekCompleteListener(OnSeekCompleteListener)}setOnSeekCompleteListener,
+ * {@link #setOnCompletionListener(OnCompletionListener)}setOnCompletionListener,
+ * {@link #setOnBufferingUpdateListener(OnBufferingUpdateListener)}setOnBufferingUpdateListener,
+ * {@link #setOnInfoListener(OnInfoListener)}setOnInfoListener,
+ * {@link #setOnErrorListener(OnErrorListener)}setOnErrorListener, etc).
+ * In order to receive the respective callback
+ * associated with these listeners, applications are required to create
+ * MediaPlayer objects on a thread with its own Looper running (main UI
+ * thread by default has a Looper running).
+ *
+ */
+public class MediaPlayer extends PlayerBase
+ implements SubtitleController.Listener
+ , VolumeAutomation
+{
+ /**
+ Constant to retrieve only the new metadata since the last
+ call.
+ // FIXME: unhide.
+ // FIXME: add link to getMetadata(boolean, boolean)
+ {@hide}
+ */
+ public static final boolean METADATA_UPDATE_ONLY = true;
+
+ /**
+ Constant to retrieve all the metadata.
+ // FIXME: unhide.
+ // FIXME: add link to getMetadata(boolean, boolean)
+ {@hide}
+ */
+ public static final boolean METADATA_ALL = false;
+
+ /**
+ Constant to enable the metadata filter during retrieval.
+ // FIXME: unhide.
+ // FIXME: add link to getMetadata(boolean, boolean)
+ {@hide}
+ */
+ public static final boolean APPLY_METADATA_FILTER = true;
+
+ /**
+ Constant to disable the metadata filter during retrieval.
+ // FIXME: unhide.
+ // FIXME: add link to getMetadata(boolean, boolean)
+ {@hide}
+ */
+ public static final boolean BYPASS_METADATA_FILTER = false;
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private final static String TAG = "MediaPlayer";
+ // Name of the remote interface for the media player. Must be kept
+ // in sync with the 2nd parameter of the IMPLEMENT_META_INTERFACE
+ // macro invocation in IMediaPlayer.cpp
+ private final static String IMEDIA_PLAYER = "android.media.IMediaPlayer";
+
+ private long mNativeContext; // accessed by native methods
+ private long mNativeSurfaceTexture; // accessed by native methods
+ private int mListenerContext; // accessed by native methods
+ private SurfaceHolder mSurfaceHolder;
+ private EventHandler mEventHandler;
+ private PowerManager.WakeLock mWakeLock = null;
+ private boolean mScreenOnWhilePlaying;
+ private boolean mStayAwake;
+ private int mStreamType = AudioManager.USE_DEFAULT_STREAM_TYPE;
+ private int mUsage = -1;
+ private boolean mBypassInterruptionPolicy;
+
+ // Modular DRM
+ private UUID mDrmUUID;
+ private final Object mDrmLock = new Object();
+ private DrmInfo mDrmInfo;
+ private MediaDrm mDrmObj;
+ private byte[] mDrmSessionId;
+ private boolean mDrmInfoResolved;
+ private boolean mActiveDrmScheme;
+ private boolean mDrmConfigAllowed;
+ private boolean mDrmProvisioningInProgress;
+ private boolean mPrepareDrmInProgress;
+ private ProvisioningThread mDrmProvisioningThread;
+
+ /**
+ * Default constructor. Consider using one of the create() methods for
+ * synchronously instantiating a MediaPlayer from a Uri or resource.
+ * <p>When done with the MediaPlayer, you should call {@link #release()},
+ * to free the resources. If not released, too many MediaPlayer instances may
+ * result in an exception.</p>
+ */
+ public MediaPlayer() {
+ super(new AudioAttributes.Builder().build(),
+ AudioPlaybackConfiguration.PLAYER_TYPE_JAM_MEDIAPLAYER);
+
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else {
+ mEventHandler = null;
+ }
+
+ mTimeProvider = new TimeProvider(this);
+ mOpenSubtitleSources = new Vector<InputStream>();
+
+ /* Native setup requires a weak reference to our object.
+ * It's easier to create it here than in C++.
+ */
+ native_setup(new WeakReference<MediaPlayer>(this));
+
+ baseRegisterPlayer();
+ }
+
+ /*
+ * Update the MediaPlayer SurfaceTexture.
+ * Call after setting a new display surface.
+ */
+ private native void _setVideoSurface(Surface surface);
+
+ /* Do not change these values (starting with INVOKE_ID) without updating
+ * their counterparts in include/media/mediaplayer.h!
+ */
+ private static final int INVOKE_ID_GET_TRACK_INFO = 1;
+ private static final int INVOKE_ID_ADD_EXTERNAL_SOURCE = 2;
+ private static final int INVOKE_ID_ADD_EXTERNAL_SOURCE_FD = 3;
+ private static final int INVOKE_ID_SELECT_TRACK = 4;
+ private static final int INVOKE_ID_DESELECT_TRACK = 5;
+ private static final int INVOKE_ID_SET_VIDEO_SCALE_MODE = 6;
+ private static final int INVOKE_ID_GET_SELECTED_TRACK = 7;
+
+ /**
+ * Create a request parcel which can be routed to the native media
+ * player using {@link #invoke(Parcel, Parcel)}. The Parcel
+ * returned has the proper InterfaceToken set. The caller should
+ * not overwrite that token, i.e it can only append data to the
+ * Parcel.
+ *
+ * @return A parcel suitable to hold a request for the native
+ * player.
+ * {@hide}
+ */
+ public Parcel newRequest() {
+ Parcel parcel = Parcel.obtain();
+ parcel.writeInterfaceToken(IMEDIA_PLAYER);
+ return parcel;
+ }
+
+ /**
+ * Invoke a generic method on the native player using opaque
+ * parcels for the request and reply. Both payloads' format is a
+ * convention between the java caller and the native player.
+ * Must be called after setDataSource to make sure a native player
+ * exists. On failure, a RuntimeException is thrown.
+ *
+ * @param request Parcel with the data for the extension. The
+ * caller must use {@link #newRequest()} to get one.
+ *
+ * @param reply Output parcel with the data returned by the
+ * native player.
+ * {@hide}
+ */
+ public void invoke(Parcel request, Parcel reply) {
+ int retcode = native_invoke(request, reply);
+ reply.setDataPosition(0);
+ if (retcode != 0) {
+ throw new RuntimeException("failure code: " + retcode);
+ }
+ }
+
+ /**
+ * Sets the {@link SurfaceHolder} to use for displaying the video
+ * portion of the media.
+ *
+ * Either a surface holder or surface must be set if a display or video sink
+ * is needed. Not calling this method or {@link #setSurface(Surface)}
+ * when playing back a video will result in only the audio track being played.
+ * A null surface holder or surface will result in only the audio track being
+ * played.
+ *
+ * @param sh the SurfaceHolder to use for video display
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized or has been released.
+ */
+ public void setDisplay(SurfaceHolder sh) {
+ mSurfaceHolder = sh;
+ Surface surface;
+ if (sh != null) {
+ surface = sh.getSurface();
+ } else {
+ surface = null;
+ }
+ _setVideoSurface(surface);
+ updateSurfaceScreenOn();
+ }
+
+ /**
+ * Sets the {@link Surface} to be used as the sink for the video portion of
+ * the media. This is similar to {@link #setDisplay(SurfaceHolder)}, but
+ * does not support {@link #setScreenOnWhilePlaying(boolean)}. Setting a
+ * Surface will un-set any Surface or SurfaceHolder that was previously set.
+ * A null surface will result in only the audio track being played.
+ *
+ * If the Surface sends frames to a {@link SurfaceTexture}, the timestamps
+ * returned from {@link SurfaceTexture#getTimestamp()} will have an
+ * unspecified zero point. These timestamps cannot be directly compared
+ * between different media sources, different instances of the same media
+ * source, or multiple runs of the same program. The timestamp is normally
+ * monotonically increasing and is unaffected by time-of-day adjustments,
+ * but it is reset when the position is set.
+ *
+ * @param surface The {@link Surface} to be used for the video portion of
+ * the media.
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized or has been released.
+ */
+ public void setSurface(Surface surface) {
+ if (mScreenOnWhilePlaying && surface != null) {
+ Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective for Surface");
+ }
+ mSurfaceHolder = null;
+ _setVideoSurface(surface);
+ updateSurfaceScreenOn();
+ }
+
+ /* Do not change these video scaling mode values below without updating
+ * their counterparts in system/window.h! Please do not forget to update
+ * {@link #isVideoScalingModeSupported} when new video scaling modes
+ * are added.
+ */
+ /**
+ * Specifies a video scaling mode. The content is stretched to the
+ * surface rendering area. When the surface has the same aspect ratio
+ * as the content, the aspect ratio of the content is maintained;
+ * otherwise, the aspect ratio of the content is not maintained when video
+ * is being rendered. Unlike {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING},
+ * there is no content cropping with this video scaling mode.
+ */
+ public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT = 1;
+
+ /**
+ * Specifies a video scaling mode. The content is scaled, maintaining
+ * its aspect ratio. The whole surface area is always used. When the
+ * aspect ratio of the content is the same as the surface, no content
+ * is cropped; otherwise, content is cropped to fit the surface.
+ */
+ public static final int VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING = 2;
+ /**
+ * Sets video scaling mode. To make the target video scaling mode
+ * effective during playback, this method must be called after
+ * data source is set. If not called, the default video
+ * scaling mode is {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT}.
+ *
+ * <p> The supported video scaling modes are:
+ * <ul>
+ * <li> {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT}
+ * <li> {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}
+ * </ul>
+ *
+ * @param mode target video scaling mode. Must be one of the supported
+ * video scaling modes; otherwise, IllegalArgumentException will be thrown.
+ *
+ * @see MediaPlayer#VIDEO_SCALING_MODE_SCALE_TO_FIT
+ * @see MediaPlayer#VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING
+ */
+ public void setVideoScalingMode(int mode) {
+ if (!isVideoScalingModeSupported(mode)) {
+ final String msg = "Scaling mode " + mode + " is not supported";
+ throw new IllegalArgumentException(msg);
+ }
+ Parcel request = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+ try {
+ request.writeInterfaceToken(IMEDIA_PLAYER);
+ request.writeInt(INVOKE_ID_SET_VIDEO_SCALE_MODE);
+ request.writeInt(mode);
+ invoke(request, reply);
+ } finally {
+ request.recycle();
+ reply.recycle();
+ }
+ }
+
+ /**
+ * Convenience method to create a MediaPlayer for a given Uri.
+ * On success, {@link #prepare()} will already have been called and must not be called again.
+ * <p>When done with the MediaPlayer, you should call {@link #release()},
+ * to free the resources. If not released, too many MediaPlayer instances will
+ * result in an exception.</p>
+ * <p>Note that since {@link #prepare()} is called automatically in this method,
+ * you cannot change the audio
+ * session ID (see {@link #setAudioSessionId(int)}) or audio attributes
+ * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p>
+ *
+ * @param context the Context to use
+ * @param uri the Uri from which to get the datasource
+ * @return a MediaPlayer object, or null if creation failed
+ */
+ public static MediaPlayer create(Context context, Uri uri) {
+ return create (context, uri, null);
+ }
+
+ /**
+ * Convenience method to create a MediaPlayer for a given Uri.
+ * On success, {@link #prepare()} will already have been called and must not be called again.
+ * <p>When done with the MediaPlayer, you should call {@link #release()},
+ * to free the resources. If not released, too many MediaPlayer instances will
+ * result in an exception.</p>
+ * <p>Note that since {@link #prepare()} is called automatically in this method,
+ * you cannot change the audio
+ * session ID (see {@link #setAudioSessionId(int)}) or audio attributes
+ * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p>
+ *
+ * @param context the Context to use
+ * @param uri the Uri from which to get the datasource
+ * @param holder the SurfaceHolder to use for displaying the video
+ * @return a MediaPlayer object, or null if creation failed
+ */
+ public static MediaPlayer create(Context context, Uri uri, SurfaceHolder holder) {
+ int s = AudioSystem.newAudioSessionId();
+ return create(context, uri, holder, null, s > 0 ? s : 0);
+ }
+
+ /**
+ * Same factory method as {@link #create(Context, Uri, SurfaceHolder)} but that lets you specify
+ * the audio attributes and session ID to be used by the new MediaPlayer instance.
+ * @param context the Context to use
+ * @param uri the Uri from which to get the datasource
+ * @param holder the SurfaceHolder to use for displaying the video, may be null.
+ * @param audioAttributes the {@link AudioAttributes} to be used by the media player.
+ * @param audioSessionId the audio session ID to be used by the media player,
+ * see {@link AudioManager#generateAudioSessionId()} to obtain a new session.
+ * @return a MediaPlayer object, or null if creation failed
+ */
+ public static MediaPlayer create(Context context, Uri uri, SurfaceHolder holder,
+ AudioAttributes audioAttributes, int audioSessionId) {
+
+ try {
+ MediaPlayer mp = new MediaPlayer();
+ final AudioAttributes aa = audioAttributes != null ? audioAttributes :
+ new AudioAttributes.Builder().build();
+ mp.setAudioAttributes(aa);
+ mp.setAudioSessionId(audioSessionId);
+ mp.setDataSource(context, uri);
+ if (holder != null) {
+ mp.setDisplay(holder);
+ }
+ mp.prepare();
+ return mp;
+ } catch (IOException ex) {
+ Log.d(TAG, "create failed:", ex);
+ // fall through
+ } catch (IllegalArgumentException ex) {
+ Log.d(TAG, "create failed:", ex);
+ // fall through
+ } catch (SecurityException ex) {
+ Log.d(TAG, "create failed:", ex);
+ // fall through
+ }
+
+ return null;
+ }
+
+ // Note no convenience method to create a MediaPlayer with SurfaceTexture sink.
+
+ /**
+ * Convenience method to create a MediaPlayer for a given resource id.
+ * On success, {@link #prepare()} will already have been called and must not be called again.
+ * <p>When done with the MediaPlayer, you should call {@link #release()},
+ * to free the resources. If not released, too many MediaPlayer instances will
+ * result in an exception.</p>
+ * <p>Note that since {@link #prepare()} is called automatically in this method,
+ * you cannot change the audio
+ * session ID (see {@link #setAudioSessionId(int)}) or audio attributes
+ * (see {@link #setAudioAttributes(AudioAttributes)} of the new MediaPlayer.</p>
+ *
+ * @param context the Context to use
+ * @param resid the raw resource id (<var>R.raw.&lt;something></var>) for
+ * the resource to use as the datasource
+ * @return a MediaPlayer object, or null if creation failed
+ */
+ public static MediaPlayer create(Context context, int resid) {
+ int s = AudioSystem.newAudioSessionId();
+ return create(context, resid, null, s > 0 ? s : 0);
+ }
+
+ /**
+ * Same factory method as {@link #create(Context, int)} but that lets you specify the audio
+ * attributes and session ID to be used by the new MediaPlayer instance.
+ * @param context the Context to use
+ * @param resid the raw resource id (<var>R.raw.&lt;something></var>) for
+ * the resource to use as the datasource
+ * @param audioAttributes the {@link AudioAttributes} to be used by the media player.
+ * @param audioSessionId the audio session ID to be used by the media player,
+ * see {@link AudioManager#generateAudioSessionId()} to obtain a new session.
+ * @return a MediaPlayer object, or null if creation failed
+ */
+ public static MediaPlayer create(Context context, int resid,
+ AudioAttributes audioAttributes, int audioSessionId) {
+ try {
+ AssetFileDescriptor afd = context.getResources().openRawResourceFd(resid);
+ if (afd == null) return null;
+
+ MediaPlayer mp = new MediaPlayer();
+
+ final AudioAttributes aa = audioAttributes != null ? audioAttributes :
+ new AudioAttributes.Builder().build();
+ mp.setAudioAttributes(aa);
+ mp.setAudioSessionId(audioSessionId);
+
+ mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
+ afd.close();
+ mp.prepare();
+ return mp;
+ } catch (IOException ex) {
+ Log.d(TAG, "create failed:", ex);
+ // fall through
+ } catch (IllegalArgumentException ex) {
+ Log.d(TAG, "create failed:", ex);
+ // fall through
+ } catch (SecurityException ex) {
+ Log.d(TAG, "create failed:", ex);
+ // fall through
+ }
+ return null;
+ }
+
+ /**
+ * Sets the data source as a content Uri.
+ *
+ * @param context the Context to use when resolving the Uri
+ * @param uri the Content URI of the data you want to play
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public void setDataSource(@NonNull Context context, @NonNull Uri uri)
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
+ setDataSource(context, uri, null, null);
+ }
+
+ /**
+ * Sets the data source as a content Uri.
+ *
+ * To provide cookies for the subsequent HTTP requests, you can install your own default cookie
+ * handler and use other variants of setDataSource APIs instead. Alternatively, you can use
+ * this API to pass the cookies as a list of HttpCookie. If the app has not installed
+ * a CookieHandler already, this API creates a CookieManager and populates its CookieStore with
+ * the provided cookies. If the app has installed its own handler already, this API requires the
+ * handler to be of CookieManager type such that the API can update the manager’s CookieStore.
+ *
+ * <p><strong>Note</strong> that the cross domain redirection is allowed by default,
+ * but that can be changed with key/value pairs through the headers parameter with
+ * "android-allow-cross-domain-redirect" as the key and "0" or "1" as the value to
+ * disallow or allow cross domain redirection.
+ *
+ * @param context the Context to use when resolving the Uri
+ * @param uri the Content URI of the data you want to play
+ * @param headers the headers to be sent together with the request for the data
+ * The headers must not include cookies. Instead, use the cookies param.
+ * @param cookies the cookies to be sent together with the request
+ * @throws IllegalArgumentException if cookies are provided and the installed handler is not
+ * a CookieManager
+ * @throws IllegalStateException if it is called in an invalid state
+ * @throws NullPointerException if context or uri is null
+ * @throws IOException if uri has a file scheme and an I/O error occurs
+ */
+ public void setDataSource(@NonNull Context context, @NonNull Uri uri,
+ @Nullable Map<String, String> headers, @Nullable List<HttpCookie> cookies)
+ throws IOException {
+ if (context == null) {
+ throw new NullPointerException("context param can not be null.");
+ }
+
+ if (uri == null) {
+ throw new NullPointerException("uri param can not be null.");
+ }
+
+ if (cookies != null) {
+ CookieHandler cookieHandler = CookieHandler.getDefault();
+ if (cookieHandler != null && !(cookieHandler instanceof CookieManager)) {
+ throw new IllegalArgumentException("The cookie handler has to be of CookieManager "
+ + "type when cookies are provided.");
+ }
+ }
+
+ // The context and URI usually belong to the calling user. Get a resolver for that user
+ // and strip out the userId from the URI if present.
+ final ContentResolver resolver = context.getContentResolver();
+ final String scheme = uri.getScheme();
+ final String authority = ContentProvider.getAuthorityWithoutUserId(uri.getAuthority());
+ if (ContentResolver.SCHEME_FILE.equals(scheme)) {
+ setDataSource(uri.getPath());
+ return;
+ } else if (ContentResolver.SCHEME_CONTENT.equals(scheme)
+ && Settings.AUTHORITY.equals(authority)) {
+ // Try cached ringtone first since the actual provider may not be
+ // encryption aware, or it may be stored on CE media storage
+ final int type = RingtoneManager.getDefaultType(uri);
+ final Uri cacheUri = RingtoneManager.getCacheForType(type, context.getUserId());
+ final Uri actualUri = RingtoneManager.getActualDefaultRingtoneUri(context, type);
+ if (attemptDataSource(resolver, cacheUri)) {
+ return;
+ } else if (attemptDataSource(resolver, actualUri)) {
+ return;
+ } else {
+ setDataSource(uri.toString(), headers, cookies);
+ }
+ } else {
+ // Try requested Uri locally first, or fallback to media server
+ if (attemptDataSource(resolver, uri)) {
+ return;
+ } else {
+ setDataSource(uri.toString(), headers, cookies);
+ }
+ }
+ }
+
+ /**
+ * Sets the data source as a content Uri.
+ *
+ * <p><strong>Note</strong> that the cross domain redirection is allowed by default,
+ * but that can be changed with key/value pairs through the headers parameter with
+ * "android-allow-cross-domain-redirect" as the key and "0" or "1" as the value to
+ * disallow or allow cross domain redirection.
+ *
+ * @param context the Context to use when resolving the Uri
+ * @param uri the Content URI of the data you want to play
+ * @param headers the headers to be sent together with the request for the data
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public void setDataSource(@NonNull Context context, @NonNull Uri uri,
+ @Nullable Map<String, String> headers)
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
+ setDataSource(context, uri, headers, null);
+ }
+
+ private boolean attemptDataSource(ContentResolver resolver, Uri uri) {
+ try (AssetFileDescriptor afd = resolver.openAssetFileDescriptor(uri, "r")) {
+ setDataSource(afd);
+ return true;
+ } catch (NullPointerException | SecurityException | IOException ex) {
+ Log.w(TAG, "Couldn't open " + uri + ": " + ex);
+ return false;
+ }
+ }
+
+ /**
+ * Sets the data source (file-path or http/rtsp URL) to use.
+ *
+ * <p>When <code>path</code> refers to a local file, the file may actually be opened by a
+ * process other than the calling application. This implies that the pathname
+ * should be an absolute path (as any other process runs with unspecified current working
+ * directory), and that the pathname should reference a world-readable file.
+ * As an alternative, the application could first open the file for reading,
+ * and then use the file descriptor form {@link #setDataSource(FileDescriptor)}.
+ *
+ * @param path the path of the file, or the http/rtsp URL of the stream you want to play
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public void setDataSource(String path)
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
+ setDataSource(path, null, null);
+ }
+
+ /**
+ * Sets the data source (file-path or http/rtsp URL) to use.
+ *
+ * @param path the path of the file, or the http/rtsp URL of the stream you want to play
+ * @param headers the headers associated with the http request for the stream you want to play
+ * @throws IllegalStateException if it is called in an invalid state
+ * @hide pending API council
+ */
+ public void setDataSource(String path, Map<String, String> headers)
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
+ setDataSource(path, headers, null);
+ }
+
+ private void setDataSource(String path, Map<String, String> headers, List<HttpCookie> cookies)
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException
+ {
+ String[] keys = null;
+ String[] values = null;
+
+ if (headers != null) {
+ keys = new String[headers.size()];
+ values = new String[headers.size()];
+
+ int i = 0;
+ for (Map.Entry<String, String> entry: headers.entrySet()) {
+ keys[i] = entry.getKey();
+ values[i] = entry.getValue();
+ ++i;
+ }
+ }
+ setDataSource(path, keys, values, cookies);
+ }
+
+ private void setDataSource(String path, String[] keys, String[] values,
+ List<HttpCookie> cookies)
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException {
+ final Uri uri = Uri.parse(path);
+ final String scheme = uri.getScheme();
+ if ("file".equals(scheme)) {
+ path = uri.getPath();
+ } else if (scheme != null) {
+ // handle non-file sources
+ nativeSetDataSource(
+ MediaHTTPService.createHttpServiceBinderIfNecessary(path, cookies),
+ path,
+ keys,
+ values);
+ return;
+ }
+
+ final File file = new File(path);
+ if (file.exists()) {
+ FileInputStream is = new FileInputStream(file);
+ FileDescriptor fd = is.getFD();
+ setDataSource(fd);
+ is.close();
+ } else {
+ throw new IOException("setDataSource failed.");
+ }
+ }
+
+ private native void nativeSetDataSource(
+ IBinder httpServiceBinder, String path, String[] keys, String[] values)
+ throws IOException, IllegalArgumentException, SecurityException, IllegalStateException;
+
+ /**
+ * Sets the data source (AssetFileDescriptor) to use. It is the caller's
+ * responsibility to close the file descriptor. It is safe to do so as soon
+ * as this call returns.
+ *
+ * @param afd the AssetFileDescriptor for the file you want to play
+ * @throws IllegalStateException if it is called in an invalid state
+ * @throws IllegalArgumentException if afd is not a valid AssetFileDescriptor
+ * @throws IOException if afd can not be read
+ */
+ public void setDataSource(@NonNull AssetFileDescriptor afd)
+ throws IOException, IllegalArgumentException, IllegalStateException {
+ Preconditions.checkNotNull(afd);
+ // Note: using getDeclaredLength so that our behavior is the same
+ // as previous versions when the content provider is returning
+ // a full file.
+ if (afd.getDeclaredLength() < 0) {
+ setDataSource(afd.getFileDescriptor());
+ } else {
+ setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getDeclaredLength());
+ }
+ }
+
+ /**
+ * Sets the data source (FileDescriptor) to use. It is the caller's responsibility
+ * to close the file descriptor. It is safe to do so as soon as this call returns.
+ *
+ * @param fd the FileDescriptor for the file you want to play
+ * @throws IllegalStateException if it is called in an invalid state
+ * @throws IllegalArgumentException if fd is not a valid FileDescriptor
+ * @throws IOException if fd can not be read
+ */
+ public void setDataSource(FileDescriptor fd)
+ throws IOException, IllegalArgumentException, IllegalStateException {
+ // intentionally less than LONG_MAX
+ setDataSource(fd, 0, 0x7ffffffffffffffL);
+ }
+
+ /**
+ * Sets the data source (FileDescriptor) to use. The FileDescriptor must be
+ * seekable (N.B. a LocalSocket is not seekable). It is the caller's responsibility
+ * to close the file descriptor. It is safe to do so as soon as this call returns.
+ *
+ * @param fd the FileDescriptor for the file you want to play
+ * @param offset the offset into the file where the data to be played starts, in bytes
+ * @param length the length in bytes of the data to be played
+ * @throws IllegalStateException if it is called in an invalid state
+ * @throws IllegalArgumentException if fd is not a valid FileDescriptor
+ * @throws IOException if fd can not be read
+ */
+ public void setDataSource(FileDescriptor fd, long offset, long length)
+ throws IOException, IllegalArgumentException, IllegalStateException {
+ _setDataSource(fd, offset, length);
+ }
+
+ private native void _setDataSource(FileDescriptor fd, long offset, long length)
+ throws IOException, IllegalArgumentException, IllegalStateException;
+
+ /**
+ * Sets the data source (MediaDataSource) to use.
+ *
+ * @param dataSource the MediaDataSource for the media you want to play
+ * @throws IllegalStateException if it is called in an invalid state
+ * @throws IllegalArgumentException if dataSource is not a valid MediaDataSource
+ */
+ public void setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException, IllegalStateException {
+ _setDataSource(dataSource);
+ }
+
+ private native void _setDataSource(MediaDataSource dataSource)
+ throws IllegalArgumentException, IllegalStateException;
+
+ /**
+ * Prepares the player for playback, synchronously.
+ *
+ * After setting the datasource and the display surface, you need to either
+ * call prepare() or prepareAsync(). For files, it is OK to call prepare(),
+ * which blocks until MediaPlayer is ready for playback.
+ *
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public void prepare() throws IOException, IllegalStateException {
+ _prepare();
+ scanInternalSubtitleTracks();
+
+ // DrmInfo, if any, has been resolved by now.
+ synchronized (mDrmLock) {
+ mDrmInfoResolved = true;
+ }
+ }
+
+ private native void _prepare() throws IOException, IllegalStateException;
+
+ /**
+ * Prepares the player for playback, asynchronously.
+ *
+ * After setting the datasource and the display surface, you need to either
+ * call prepare() or prepareAsync(). For streams, you should call prepareAsync(),
+ * which returns immediately, rather than blocking until enough data has been
+ * buffered.
+ *
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public native void prepareAsync() throws IllegalStateException;
+
+ /**
+ * Starts or resumes playback. If playback had previously been paused,
+ * playback will continue from where it was paused. If playback had
+ * been stopped, or never started before, playback will start at the
+ * beginning.
+ *
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public void start() throws IllegalStateException {
+ //FIXME use lambda to pass startImpl to superclass
+ final int delay = getStartDelayMs();
+ if (delay == 0) {
+ startImpl();
+ } else {
+ new Thread() {
+ public void run() {
+ try {
+ Thread.sleep(delay);
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ baseSetStartDelayMs(0);
+ try {
+ startImpl();
+ } catch (IllegalStateException e) {
+ // fail silently for a state exception when it is happening after
+ // a delayed start, as the player state could have changed between the
+ // call to start() and the execution of startImpl()
+ }
+ }
+ }.start();
+ }
+ }
+
+ private void startImpl() {
+ baseStart();
+ stayAwake(true);
+ _start();
+ }
+
+ private native void _start() throws IllegalStateException;
+
+
+ private int getAudioStreamType() {
+ if (mStreamType == AudioManager.USE_DEFAULT_STREAM_TYPE) {
+ mStreamType = _getAudioStreamType();
+ }
+ return mStreamType;
+ }
+
+ private native int _getAudioStreamType() throws IllegalStateException;
+
+ /**
+ * Stops playback after playback has been started or paused.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ public void stop() throws IllegalStateException {
+ stayAwake(false);
+ _stop();
+ baseStop();
+ }
+
+ private native void _stop() throws IllegalStateException;
+
+ /**
+ * Pauses playback. Call start() to resume.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ public void pause() throws IllegalStateException {
+ stayAwake(false);
+ _pause();
+ basePause();
+ }
+
+ private native void _pause() throws IllegalStateException;
+
+ @Override
+ void playerStart() {
+ start();
+ }
+
+ @Override
+ void playerPause() {
+ pause();
+ }
+
+ @Override
+ void playerStop() {
+ stop();
+ }
+
+ @Override
+ /* package */ int playerApplyVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration,
+ @NonNull VolumeShaper.Operation operation) {
+ return native_applyVolumeShaper(configuration, operation);
+ }
+
+ @Override
+ /* package */ @Nullable VolumeShaper.State playerGetVolumeShaperState(int id) {
+ return native_getVolumeShaperState(id);
+ }
+
+ @Override
+ public @NonNull VolumeShaper createVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration) {
+ return new VolumeShaper(configuration, this);
+ }
+
+ private native int native_applyVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration,
+ @NonNull VolumeShaper.Operation operation);
+
+ private native @Nullable VolumeShaper.State native_getVolumeShaperState(int id);
+
+ /**
+ * Set the low-level power management behavior for this MediaPlayer. This
+ * can be used when the MediaPlayer is not playing through a SurfaceHolder
+ * set with {@link #setDisplay(SurfaceHolder)} and thus can use the
+ * high-level {@link #setScreenOnWhilePlaying(boolean)} feature.
+ *
+ * <p>This function has the MediaPlayer access the low-level power manager
+ * service to control the device's power usage while playing is occurring.
+ * The parameter is a combination of {@link android.os.PowerManager} wake flags.
+ * Use of this method requires {@link android.Manifest.permission#WAKE_LOCK}
+ * permission.
+ * By default, no attempt is made to keep the device awake during playback.
+ *
+ * @param context the Context to use
+ * @param mode the power/wake mode to set
+ * @see android.os.PowerManager
+ */
+ public void setWakeMode(Context context, int mode) {
+ boolean washeld = false;
+
+ /* Disable persistant wakelocks in media player based on property */
+ if (SystemProperties.getBoolean("audio.offload.ignore_setawake", false) == true) {
+ Log.w(TAG, "IGNORING setWakeMode " + mode);
+ return;
+ }
+
+ if (mWakeLock != null) {
+ if (mWakeLock.isHeld()) {
+ washeld = true;
+ mWakeLock.release();
+ }
+ mWakeLock = null;
+ }
+
+ PowerManager pm = (PowerManager)context.getSystemService(Context.POWER_SERVICE);
+ mWakeLock = pm.newWakeLock(mode|PowerManager.ON_AFTER_RELEASE, MediaPlayer.class.getName());
+ mWakeLock.setReferenceCounted(false);
+ if (washeld) {
+ mWakeLock.acquire();
+ }
+ }
+
+ /**
+ * Control whether we should use the attached SurfaceHolder to keep the
+ * screen on while video playback is occurring. This is the preferred
+ * method over {@link #setWakeMode} where possible, since it doesn't
+ * require that the application have permission for low-level wake lock
+ * access.
+ *
+ * @param screenOn Supply true to keep the screen on, false to allow it
+ * to turn off.
+ */
+ public void setScreenOnWhilePlaying(boolean screenOn) {
+ if (mScreenOnWhilePlaying != screenOn) {
+ if (screenOn && mSurfaceHolder == null) {
+ Log.w(TAG, "setScreenOnWhilePlaying(true) is ineffective without a SurfaceHolder");
+ }
+ mScreenOnWhilePlaying = screenOn;
+ updateSurfaceScreenOn();
+ }
+ }
+
+ private void stayAwake(boolean awake) {
+ if (mWakeLock != null) {
+ if (awake && !mWakeLock.isHeld()) {
+ mWakeLock.acquire();
+ } else if (!awake && mWakeLock.isHeld()) {
+ mWakeLock.release();
+ }
+ }
+ mStayAwake = awake;
+ updateSurfaceScreenOn();
+ }
+
+ private void updateSurfaceScreenOn() {
+ if (mSurfaceHolder != null) {
+ mSurfaceHolder.setKeepScreenOn(mScreenOnWhilePlaying && mStayAwake);
+ }
+ }
+
+ /**
+ * Returns the width of the video.
+ *
+ * @return the width of the video, or 0 if there is no video,
+ * no display surface was set, or the width has not been determined
+ * yet. The OnVideoSizeChangedListener can be registered via
+ * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)}
+ * to provide a notification when the width is available.
+ */
+ public native int getVideoWidth();
+
+ /**
+ * Returns the height of the video.
+ *
+ * @return the height of the video, or 0 if there is no video,
+ * no display surface was set, or the height has not been determined
+ * yet. The OnVideoSizeChangedListener can be registered via
+ * {@link #setOnVideoSizeChangedListener(OnVideoSizeChangedListener)}
+ * to provide a notification when the height is available.
+ */
+ public native int getVideoHeight();
+
+ /**
+ * Return Metrics data about the current player.
+ *
+ * @return a {@link PersistableBundle} containing the set of attributes and values
+ * available for the media being handled by this instance of MediaPlayer
+ * The attributes are descibed in {@link MetricsConstants}.
+ *
+ * Additional vendor-specific fields may also be present in
+ * the return value.
+ */
+ public PersistableBundle getMetrics() {
+ PersistableBundle bundle = native_getMetrics();
+ return bundle;
+ }
+
+ private native PersistableBundle native_getMetrics();
+
+ /**
+ * Checks whether the MediaPlayer is playing.
+ *
+ * @return true if currently playing, false otherwise
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized or has been released.
+ */
+ public native boolean isPlaying();
+
+ /**
+ * Gets the default buffering management params.
+ * Calling it only after {@code setDataSource} has been called.
+ * Each type of data source might have different set of default params.
+ *
+ * @return the default buffering management params supported by the source component.
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized, or {@code setDataSource} has not been called.
+ * @hide
+ */
+ @NonNull
+ public native BufferingParams getDefaultBufferingParams();
+
+ /**
+ * Gets the current buffering management params used by the source component.
+ * Calling it only after {@code setDataSource} has been called.
+ *
+ * @return the current buffering management params used by the source component.
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized, or {@code setDataSource} has not been called.
+ * @hide
+ */
+ @NonNull
+ public native BufferingParams getBufferingParams();
+
+ /**
+ * Sets buffering management params.
+ * The object sets its internal BufferingParams to the input, except that the input is
+ * invalid or not supported.
+ * Call it only after {@code setDataSource} has been called.
+ * Users should only use supported mode returned by {@link #getDefaultBufferingParams()}
+ * or its downsized version as described in {@link BufferingParams}.
+ *
+ * @param params the buffering management params.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized or has been released, or {@code setDataSource} has not been called.
+ * @throws IllegalArgumentException if params is invalid or not supported.
+ * @hide
+ */
+ public native void setBufferingParams(@NonNull BufferingParams params);
+
+ /**
+ * Change playback speed of audio by resampling the audio.
+ * <p>
+ * Specifies resampling as audio mode for variable rate playback, i.e.,
+ * resample the waveform based on the requested playback rate to get
+ * a new waveform, and play back the new waveform at the original sampling
+ * frequency.
+ * When rate is larger than 1.0, pitch becomes higher.
+ * When rate is smaller than 1.0, pitch becomes lower.
+ *
+ * @hide
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_RESAMPLE = 2;
+
+ /**
+ * Change playback speed of audio without changing its pitch.
+ * <p>
+ * Specifies time stretching as audio mode for variable rate playback.
+ * Time stretching changes the duration of the audio samples without
+ * affecting its pitch.
+ * <p>
+ * This mode is only supported for a limited range of playback speed factors,
+ * e.g. between 1/2x and 2x.
+ *
+ * @hide
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_STRETCH = 1;
+
+ /**
+ * Change playback speed of audio without changing its pitch, and
+ * possibly mute audio if time stretching is not supported for the playback
+ * speed.
+ * <p>
+ * Try to keep audio pitch when changing the playback rate, but allow the
+ * system to determine how to change audio playback if the rate is out
+ * of range.
+ *
+ * @hide
+ */
+ public static final int PLAYBACK_RATE_AUDIO_MODE_DEFAULT = 0;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ PLAYBACK_RATE_AUDIO_MODE_DEFAULT,
+ PLAYBACK_RATE_AUDIO_MODE_STRETCH,
+ PLAYBACK_RATE_AUDIO_MODE_RESAMPLE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PlaybackRateAudioMode {}
+
+ /**
+ * Sets playback rate and audio mode.
+ *
+ * @param rate the ratio between desired playback rate and normal one.
+ * @param audioMode audio playback mode. Must be one of the supported
+ * audio modes.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if audioMode is not supported.
+ *
+ * @hide
+ */
+ @NonNull
+ public PlaybackParams easyPlaybackParams(float rate, @PlaybackRateAudioMode int audioMode) {
+ PlaybackParams params = new PlaybackParams();
+ params.allowDefaults();
+ switch (audioMode) {
+ case PLAYBACK_RATE_AUDIO_MODE_DEFAULT:
+ params.setSpeed(rate).setPitch(1.0f);
+ break;
+ case PLAYBACK_RATE_AUDIO_MODE_STRETCH:
+ params.setSpeed(rate).setPitch(1.0f)
+ .setAudioFallbackMode(params.AUDIO_FALLBACK_MODE_FAIL);
+ break;
+ case PLAYBACK_RATE_AUDIO_MODE_RESAMPLE:
+ params.setSpeed(rate).setPitch(rate);
+ break;
+ default:
+ final String msg = "Audio playback mode " + audioMode + " is not supported";
+ throw new IllegalArgumentException(msg);
+ }
+ return params;
+ }
+
+ /**
+ * Sets playback rate using {@link PlaybackParams}. The object sets its internal
+ * PlaybackParams to the input, except that the object remembers previous speed
+ * when input speed is zero. This allows the object to resume at previous speed
+ * when start() is called. Calling it before the object is prepared does not change
+ * the object state. After the object is prepared, calling it with zero speed is
+ * equivalent to calling pause(). After the object is prepared, calling it with
+ * non-zero speed is equivalent to calling start().
+ *
+ * @param params the playback params.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized or has been released.
+ * @throws IllegalArgumentException if params is not supported.
+ */
+ public native void setPlaybackParams(@NonNull PlaybackParams params);
+
+ /**
+ * Gets the playback params, containing the current playback rate.
+ *
+ * @return the playback params.
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native PlaybackParams getPlaybackParams();
+
+ /**
+ * Sets A/V sync mode.
+ *
+ * @param params the A/V sync params to apply
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if params are not supported.
+ */
+ public native void setSyncParams(@NonNull SyncParams params);
+
+ /**
+ * Gets the A/V sync mode.
+ *
+ * @return the A/V sync params
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native SyncParams getSyncParams();
+
+ /**
+ * Seek modes used in method seekTo(long, int) to move media position
+ * to a specified location.
+ *
+ * Do not change these mode values without updating their counterparts
+ * in include/media/IMediaSource.h!
+ */
+ /**
+ * This mode is used with {@link #seekTo(long, int)} to move media position to
+ * a sync (or key) frame associated with a data source that is located
+ * right before or at the given time.
+ *
+ * @see #seekTo(long, int)
+ */
+ public static final int SEEK_PREVIOUS_SYNC = 0x00;
+ /**
+ * This mode is used with {@link #seekTo(long, int)} to move media position to
+ * a sync (or key) frame associated with a data source that is located
+ * right after or at the given time.
+ *
+ * @see #seekTo(long, int)
+ */
+ public static final int SEEK_NEXT_SYNC = 0x01;
+ /**
+ * This mode is used with {@link #seekTo(long, int)} to move media position to
+ * a sync (or key) frame associated with a data source that is located
+ * closest to (in time) or at the given time.
+ *
+ * @see #seekTo(long, int)
+ */
+ public static final int SEEK_CLOSEST_SYNC = 0x02;
+ /**
+ * This mode is used with {@link #seekTo(long, int)} to move media position to
+ * a frame (not necessarily a key frame) associated with a data source that
+ * is located closest to or at the given time.
+ *
+ * @see #seekTo(long, int)
+ */
+ public static final int SEEK_CLOSEST = 0x03;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ SEEK_PREVIOUS_SYNC,
+ SEEK_NEXT_SYNC,
+ SEEK_CLOSEST_SYNC,
+ SEEK_CLOSEST,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SeekMode {}
+
+ private native final void _seekTo(long msec, int mode);
+
+ /**
+ * Moves the media to specified time position by considering the given mode.
+ * <p>
+ * When seekTo is finished, the user will be notified via OnSeekComplete supplied by the user.
+ * There is at most one active seekTo processed at any time. If there is a to-be-completed
+ * seekTo, new seekTo requests will be queued in such a way that only the last request
+ * is kept. When current seekTo is completed, the queued request will be processed if
+ * that request is different from just-finished seekTo operation, i.e., the requested
+ * position or mode is different.
+ *
+ * @param msec the offset in milliseconds from the start to seek to.
+ * When seeking to the given time position, there is no guarantee that the data source
+ * has a frame located at the position. When this happens, a frame nearby will be rendered.
+ * If msec is negative, time position zero will be used.
+ * If msec is larger than duration, duration will be used.
+ * @param mode the mode indicating where exactly to seek to.
+ * Use {@link #SEEK_PREVIOUS_SYNC} if one wants to seek to a sync frame
+ * that has a timestamp earlier than or the same as msec. Use
+ * {@link #SEEK_NEXT_SYNC} if one wants to seek to a sync frame
+ * that has a timestamp later than or the same as msec. Use
+ * {@link #SEEK_CLOSEST_SYNC} if one wants to seek to a sync frame
+ * that has a timestamp closest to or the same as msec. Use
+ * {@link #SEEK_CLOSEST} if one wants to seek to a frame that may
+ * or may not be a sync frame but is closest to or the same as msec.
+ * {@link #SEEK_CLOSEST} often has larger performance overhead compared
+ * to the other options if there is no sync frame located at msec.
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized
+ * @throws IllegalArgumentException if the mode is invalid.
+ */
+ public void seekTo(long msec, @SeekMode int mode) {
+ if (mode < SEEK_PREVIOUS_SYNC || mode > SEEK_CLOSEST) {
+ final String msg = "Illegal seek mode: " + mode;
+ throw new IllegalArgumentException(msg);
+ }
+ // TODO: pass long to native, instead of truncating here.
+ if (msec > Integer.MAX_VALUE) {
+ Log.w(TAG, "seekTo offset " + msec + " is too large, cap to " + Integer.MAX_VALUE);
+ msec = Integer.MAX_VALUE;
+ } else if (msec < Integer.MIN_VALUE) {
+ Log.w(TAG, "seekTo offset " + msec + " is too small, cap to " + Integer.MIN_VALUE);
+ msec = Integer.MIN_VALUE;
+ }
+ _seekTo(msec, mode);
+ }
+
+ /**
+ * Seeks to specified time position.
+ * Same as {@link #seekTo(long, int)} with {@code mode = SEEK_PREVIOUS_SYNC}.
+ *
+ * @param msec the offset in milliseconds from the start to seek to
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized
+ */
+ public void seekTo(int msec) throws IllegalStateException {
+ seekTo(msec, SEEK_PREVIOUS_SYNC /* mode */);
+ }
+
+ /**
+ * Get current playback position as a {@link MediaTimestamp}.
+ * <p>
+ * The MediaTimestamp represents how the media time correlates to the system time in
+ * a linear fashion using an anchor and a clock rate. During regular playback, the media
+ * time moves fairly constantly (though the anchor frame may be rebased to a current
+ * system time, the linear correlation stays steady). Therefore, this method does not
+ * need to be called often.
+ * <p>
+ * To help users get current playback position, this method always anchors the timestamp
+ * to the current {@link System#nanoTime system time}, so
+ * {@link MediaTimestamp#getAnchorMediaTimeUs} can be used as current playback position.
+ *
+ * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
+ * is available, e.g. because the media player has not been initialized.
+ *
+ * @see MediaTimestamp
+ */
+ @Nullable
+ public MediaTimestamp getTimestamp()
+ {
+ try {
+ // TODO: get the timestamp from native side
+ return new MediaTimestamp(
+ getCurrentPosition() * 1000L,
+ System.nanoTime(),
+ isPlaying() ? getPlaybackParams().getSpeed() : 0.f);
+ } catch (IllegalStateException e) {
+ return null;
+ }
+ }
+
+ /**
+ * Gets the current playback position.
+ *
+ * @return the current position in milliseconds
+ */
+ public native int getCurrentPosition();
+
+ /**
+ * Gets the duration of the file.
+ *
+ * @return the duration in milliseconds, if no duration is available
+ * (for example, if streaming live content), -1 is returned.
+ */
+ public native int getDuration();
+
+ /**
+ * Gets the media metadata.
+ *
+ * @param update_only controls whether the full set of available
+ * metadata is returned or just the set that changed since the
+ * last call. See {@see #METADATA_UPDATE_ONLY} and {@see
+ * #METADATA_ALL}.
+ *
+ * @param apply_filter if true only metadata that matches the
+ * filter is returned. See {@see #APPLY_METADATA_FILTER} and {@see
+ * #BYPASS_METADATA_FILTER}.
+ *
+ * @return The metadata, possibly empty. null if an error occured.
+ // FIXME: unhide.
+ * {@hide}
+ */
+ public Metadata getMetadata(final boolean update_only,
+ final boolean apply_filter) {
+ Parcel reply = Parcel.obtain();
+ Metadata data = new Metadata();
+
+ if (!native_getMetadata(update_only, apply_filter, reply)) {
+ reply.recycle();
+ return null;
+ }
+
+ // Metadata takes over the parcel, don't recycle it unless
+ // there is an error.
+ if (!data.parse(reply)) {
+ reply.recycle();
+ return null;
+ }
+ return data;
+ }
+
+ /**
+ * Set a filter for the metadata update notification and update
+ * retrieval. The caller provides 2 set of metadata keys, allowed
+ * and blocked. The blocked set always takes precedence over the
+ * allowed one.
+ * Metadata.MATCH_ALL and Metadata.MATCH_NONE are 2 sets available as
+ * shorthands to allow/block all or no metadata.
+ *
+ * By default, there is no filter set.
+ *
+ * @param allow Is the set of metadata the client is interested
+ * in receiving new notifications for.
+ * @param block Is the set of metadata the client is not interested
+ * in receiving new notifications for.
+ * @return The call status code.
+ *
+ // FIXME: unhide.
+ * {@hide}
+ */
+ public int setMetadataFilter(Set<Integer> allow, Set<Integer> block) {
+ // Do our serialization manually instead of calling
+ // Parcel.writeArray since the sets are made of the same type
+ // we avoid paying the price of calling writeValue (used by
+ // writeArray) which burns an extra int per element to encode
+ // the type.
+ Parcel request = newRequest();
+
+ // The parcel starts already with an interface token. There
+ // are 2 filters. Each one starts with a 4bytes number to
+ // store the len followed by a number of int (4 bytes as well)
+ // representing the metadata type.
+ int capacity = request.dataSize() + 4 * (1 + allow.size() + 1 + block.size());
+
+ if (request.dataCapacity() < capacity) {
+ request.setDataCapacity(capacity);
+ }
+
+ request.writeInt(allow.size());
+ for(Integer t: allow) {
+ request.writeInt(t);
+ }
+ request.writeInt(block.size());
+ for(Integer t: block) {
+ request.writeInt(t);
+ }
+ return native_setMetadataFilter(request);
+ }
+
+ /**
+ * Set the MediaPlayer to start when this MediaPlayer finishes playback
+ * (i.e. reaches the end of the stream).
+ * The media framework will attempt to transition from this player to
+ * the next as seamlessly as possible. The next player can be set at
+ * any time before completion, but shall be after setDataSource has been
+ * called successfully. The next player must be prepared by the
+ * app, and the application should not call start() on it.
+ * The next MediaPlayer must be different from 'this'. An exception
+ * will be thrown if next == this.
+ * The application may call setNextMediaPlayer(null) to indicate no
+ * next player should be started at the end of playback.
+ * If the current player is looping, it will keep looping and the next
+ * player will not be started.
+ *
+ * @param next the player to start after this one completes playback.
+ *
+ */
+ public native void setNextMediaPlayer(MediaPlayer next);
+
+ /**
+ * Releases resources associated with this MediaPlayer object.
+ * It is considered good practice to call this method when you're
+ * done using the MediaPlayer. In particular, whenever an Activity
+ * of an application is paused (its onPause() method is called),
+ * or stopped (its onStop() method is called), this method should be
+ * invoked to release the MediaPlayer object, unless the application
+ * has a special need to keep the object around. In addition to
+ * unnecessary resources (such as memory and instances of codecs)
+ * being held, failure to call this method immediately if a
+ * MediaPlayer object is no longer needed may also lead to
+ * continuous battery consumption for mobile devices, and playback
+ * failure for other applications if no multiple instances of the
+ * same codec are supported on a device. Even if multiple instances
+ * of the same codec are supported, some performance degradation
+ * may be expected when unnecessary multiple instances are used
+ * at the same time.
+ */
+ public void release() {
+ baseRelease();
+ stayAwake(false);
+ updateSurfaceScreenOn();
+ mOnPreparedListener = null;
+ mOnBufferingUpdateListener = null;
+ mOnCompletionListener = null;
+ mOnSeekCompleteListener = null;
+ mOnErrorListener = null;
+ mOnInfoListener = null;
+ mOnVideoSizeChangedListener = null;
+ mOnTimedTextListener = null;
+ if (mTimeProvider != null) {
+ mTimeProvider.close();
+ mTimeProvider = null;
+ }
+ mOnSubtitleDataListener = null;
+
+ // Modular DRM clean up
+ mOnDrmConfigHelper = null;
+ mOnDrmInfoHandlerDelegate = null;
+ mOnDrmPreparedHandlerDelegate = null;
+ resetDrmState();
+
+ _release();
+ }
+
+ private native void _release();
+
+ /**
+ * Resets the MediaPlayer to its uninitialized state. After calling
+ * this method, you will have to initialize it again by setting the
+ * data source and calling prepare().
+ */
+ public void reset() {
+ mSelectedSubtitleTrackIndex = -1;
+ synchronized(mOpenSubtitleSources) {
+ for (final InputStream is: mOpenSubtitleSources) {
+ try {
+ is.close();
+ } catch (IOException e) {
+ }
+ }
+ mOpenSubtitleSources.clear();
+ }
+ if (mSubtitleController != null) {
+ mSubtitleController.reset();
+ }
+ if (mTimeProvider != null) {
+ mTimeProvider.close();
+ mTimeProvider = null;
+ }
+
+ stayAwake(false);
+ _reset();
+ // make sure none of the listeners get called anymore
+ if (mEventHandler != null) {
+ mEventHandler.removeCallbacksAndMessages(null);
+ }
+
+ synchronized (mIndexTrackPairs) {
+ mIndexTrackPairs.clear();
+ mInbandTrackIndices.clear();
+ };
+
+ resetDrmState();
+ }
+
+ private native void _reset();
+
+ /**
+ * Sets the audio stream type for this MediaPlayer. See {@link AudioManager}
+ * for a list of stream types. Must call this method before prepare() or
+ * prepareAsync() in order for the target stream type to become effective
+ * thereafter.
+ *
+ * @param streamtype the audio stream type
+ * @deprecated use {@link #setAudioAttributes(AudioAttributes)}
+ * @see android.media.AudioManager
+ */
+ public void setAudioStreamType(int streamtype) {
+ deprecateStreamTypeForPlayback(streamtype, "MediaPlayer", "setAudioStreamType()");
+ baseUpdateAudioAttributes(
+ new AudioAttributes.Builder().setInternalLegacyStreamType(streamtype).build());
+ _setAudioStreamType(streamtype);
+ mStreamType = streamtype;
+ }
+
+ private native void _setAudioStreamType(int streamtype);
+
+ // Keep KEY_PARAMETER_* in sync with include/media/mediaplayer.h
+ private final static int KEY_PARAMETER_AUDIO_ATTRIBUTES = 1400;
+ /**
+ * Sets the parameter indicated by key.
+ * @param key key indicates the parameter to be set.
+ * @param value value of the parameter to be set.
+ * @return true if the parameter is set successfully, false otherwise
+ * {@hide}
+ */
+ private native boolean setParameter(int key, Parcel value);
+
+ /**
+ * Sets the audio attributes for this MediaPlayer.
+ * See {@link AudioAttributes} for how to build and configure an instance of this class.
+ * You must call this method before {@link #prepare()} or {@link #prepareAsync()} in order
+ * for the audio attributes to become effective thereafter.
+ * @param attributes a non-null set of audio attributes
+ */
+ public void setAudioAttributes(AudioAttributes attributes) throws IllegalArgumentException {
+ if (attributes == null) {
+ final String msg = "Cannot set AudioAttributes to null";
+ throw new IllegalArgumentException(msg);
+ }
+ baseUpdateAudioAttributes(attributes);
+ mUsage = attributes.getUsage();
+ mBypassInterruptionPolicy = (attributes.getAllFlags()
+ & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0;
+ Parcel pattributes = Parcel.obtain();
+ attributes.writeToParcel(pattributes, AudioAttributes.FLATTEN_TAGS);
+ setParameter(KEY_PARAMETER_AUDIO_ATTRIBUTES, pattributes);
+ pattributes.recycle();
+ }
+
+ /**
+ * Sets the player to be looping or non-looping.
+ *
+ * @param looping whether to loop or not
+ */
+ public native void setLooping(boolean looping);
+
+ /**
+ * Checks whether the MediaPlayer is looping or non-looping.
+ *
+ * @return true if the MediaPlayer is currently looping, false otherwise
+ */
+ public native boolean isLooping();
+
+ /**
+ * Sets the volume on this player.
+ * This API is recommended for balancing the output of audio streams
+ * within an application. Unless you are writing an application to
+ * control user settings, this API should be used in preference to
+ * {@link AudioManager#setStreamVolume(int, int, int)} which sets the volume of ALL streams of
+ * a particular type. Note that the passed volume values are raw scalars in range 0.0 to 1.0.
+ * UI controls should be scaled logarithmically.
+ *
+ * @param leftVolume left volume scalar
+ * @param rightVolume right volume scalar
+ */
+ /*
+ * FIXME: Merge this into javadoc comment above when setVolume(float) is not @hide.
+ * The single parameter form below is preferred if the channel volumes don't need
+ * to be set independently.
+ */
+ public void setVolume(float leftVolume, float rightVolume) {
+ baseSetVolume(leftVolume, rightVolume);
+ }
+
+ @Override
+ void playerSetVolume(boolean muting, float leftVolume, float rightVolume) {
+ _setVolume(muting ? 0.0f : leftVolume, muting ? 0.0f : rightVolume);
+ }
+
+ private native void _setVolume(float leftVolume, float rightVolume);
+
+ /**
+ * Similar, excepts sets volume of all channels to same value.
+ * @hide
+ */
+ public void setVolume(float volume) {
+ setVolume(volume, volume);
+ }
+
+ /**
+ * Sets the audio session ID.
+ *
+ * @param sessionId the audio session ID.
+ * The audio session ID is a system wide unique identifier for the audio stream played by
+ * this MediaPlayer instance.
+ * The primary use of the audio session ID is to associate audio effects to a particular
+ * instance of MediaPlayer: if an audio session ID is provided when creating an audio effect,
+ * this effect will be applied only to the audio content of media players within the same
+ * audio session and not to the output mix.
+ * When created, a MediaPlayer instance automatically generates its own audio session ID.
+ * However, it is possible to force this player to be part of an already existing audio session
+ * by calling this method.
+ * This method must be called before one of the overloaded <code> setDataSource </code> methods.
+ * @throws IllegalStateException if it is called in an invalid state
+ */
+ public native void setAudioSessionId(int sessionId) throws IllegalArgumentException, IllegalStateException;
+
+ /**
+ * Returns the audio session ID.
+ *
+ * @return the audio session ID. {@see #setAudioSessionId(int)}
+ * Note that the audio session ID is 0 only if a problem occured when the MediaPlayer was contructed.
+ */
+ public native int getAudioSessionId();
+
+ /**
+ * Attaches an auxiliary effect to the player. A typical auxiliary effect is a reverberation
+ * effect which can be applied on any sound source that directs a certain amount of its
+ * energy to this effect. This amount is defined by setAuxEffectSendLevel().
+ * See {@link #setAuxEffectSendLevel(float)}.
+ * <p>After creating an auxiliary effect (e.g.
+ * {@link android.media.audiofx.EnvironmentalReverb}), retrieve its ID with
+ * {@link android.media.audiofx.AudioEffect#getId()} and use it when calling this method
+ * to attach the player to the effect.
+ * <p>To detach the effect from the player, call this method with a null effect id.
+ * <p>This method must be called after one of the overloaded <code> setDataSource </code>
+ * methods.
+ * @param effectId system wide unique id of the effect to attach
+ */
+ public native void attachAuxEffect(int effectId);
+
+
+ /**
+ * Sets the send level of the player to the attached auxiliary effect.
+ * See {@link #attachAuxEffect(int)}. The level value range is 0 to 1.0.
+ * <p>By default the send level is 0, so even if an effect is attached to the player
+ * this method must be called for the effect to be applied.
+ * <p>Note that the passed level value is a raw scalar. UI controls should be scaled
+ * logarithmically: the gain applied by audio framework ranges from -72dB to 0dB,
+ * so an appropriate conversion from linear UI input x to level is:
+ * x == 0 -> level = 0
+ * 0 < x <= R -> level = 10^(72*(x-R)/20/R)
+ * @param level send level scalar
+ */
+ public void setAuxEffectSendLevel(float level) {
+ baseSetAuxEffectSendLevel(level);
+ }
+
+ @Override
+ int playerSetAuxEffectSendLevel(boolean muting, float level) {
+ _setAuxEffectSendLevel(muting ? 0.0f : level);
+ return AudioSystem.SUCCESS;
+ }
+
+ private native void _setAuxEffectSendLevel(float level);
+
+ /*
+ * @param request Parcel destinated to the media player. The
+ * Interface token must be set to the IMediaPlayer
+ * one to be routed correctly through the system.
+ * @param reply[out] Parcel that will contain the reply.
+ * @return The status code.
+ */
+ private native final int native_invoke(Parcel request, Parcel reply);
+
+
+ /*
+ * @param update_only If true fetch only the set of metadata that have
+ * changed since the last invocation of getMetadata.
+ * The set is built using the unfiltered
+ * notifications the native player sent to the
+ * MediaPlayerService during that period of
+ * time. If false, all the metadatas are considered.
+ * @param apply_filter If true, once the metadata set has been built based on
+ * the value update_only, the current filter is applied.
+ * @param reply[out] On return contains the serialized
+ * metadata. Valid only if the call was successful.
+ * @return The status code.
+ */
+ private native final boolean native_getMetadata(boolean update_only,
+ boolean apply_filter,
+ Parcel reply);
+
+ /*
+ * @param request Parcel with the 2 serialized lists of allowed
+ * metadata types followed by the one to be
+ * dropped. Each list starts with an integer
+ * indicating the number of metadata type elements.
+ * @return The status code.
+ */
+ private native final int native_setMetadataFilter(Parcel request);
+
+ private static native final void native_init();
+ private native final void native_setup(Object mediaplayer_this);
+ private native final void native_finalize();
+
+ /**
+ * Class for MediaPlayer to return each audio/video/subtitle track's metadata.
+ *
+ * @see android.media.MediaPlayer#getTrackInfo
+ */
+ static public class TrackInfo implements Parcelable {
+ /**
+ * Gets the track type.
+ * @return TrackType which indicates if the track is video, audio, timed text.
+ */
+ public int getTrackType() {
+ return mTrackType;
+ }
+
+ /**
+ * Gets the language code of the track.
+ * @return a language code in either way of ISO-639-1 or ISO-639-2.
+ * When the language is unknown or could not be determined,
+ * ISO-639-2 language code, "und", is returned.
+ */
+ public String getLanguage() {
+ String language = mFormat.getString(MediaFormat.KEY_LANGUAGE);
+ return language == null ? "und" : language;
+ }
+
+ /**
+ * Gets the {@link MediaFormat} of the track. If the format is
+ * unknown or could not be determined, null is returned.
+ */
+ public MediaFormat getFormat() {
+ if (mTrackType == MEDIA_TRACK_TYPE_TIMEDTEXT
+ || mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ return mFormat;
+ }
+ return null;
+ }
+
+ public static final int MEDIA_TRACK_TYPE_UNKNOWN = 0;
+ public static final int MEDIA_TRACK_TYPE_VIDEO = 1;
+ public static final int MEDIA_TRACK_TYPE_AUDIO = 2;
+ public static final int MEDIA_TRACK_TYPE_TIMEDTEXT = 3;
+ public static final int MEDIA_TRACK_TYPE_SUBTITLE = 4;
+ public static final int MEDIA_TRACK_TYPE_METADATA = 5;
+
+ final int mTrackType;
+ final MediaFormat mFormat;
+
+ TrackInfo(Parcel in) {
+ mTrackType = in.readInt();
+ // TODO: parcel in the full MediaFormat; currently we are using createSubtitleFormat
+ // even for audio/video tracks, meaning we only set the mime and language.
+ String mime = in.readString();
+ String language = in.readString();
+ mFormat = MediaFormat.createSubtitleFormat(mime, language);
+
+ if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ mFormat.setInteger(MediaFormat.KEY_IS_AUTOSELECT, in.readInt());
+ mFormat.setInteger(MediaFormat.KEY_IS_DEFAULT, in.readInt());
+ mFormat.setInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE, in.readInt());
+ }
+ }
+
+ /** @hide */
+ TrackInfo(int type, MediaFormat format) {
+ mTrackType = type;
+ mFormat = format;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mTrackType);
+ dest.writeString(getLanguage());
+
+ if (mTrackType == MEDIA_TRACK_TYPE_SUBTITLE) {
+ dest.writeString(mFormat.getString(MediaFormat.KEY_MIME));
+ dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_AUTOSELECT));
+ dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_DEFAULT));
+ dest.writeInt(mFormat.getInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE));
+ }
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder out = new StringBuilder(128);
+ out.append(getClass().getName());
+ out.append('{');
+ switch (mTrackType) {
+ case MEDIA_TRACK_TYPE_VIDEO:
+ out.append("VIDEO");
+ break;
+ case MEDIA_TRACK_TYPE_AUDIO:
+ out.append("AUDIO");
+ break;
+ case MEDIA_TRACK_TYPE_TIMEDTEXT:
+ out.append("TIMEDTEXT");
+ break;
+ case MEDIA_TRACK_TYPE_SUBTITLE:
+ out.append("SUBTITLE");
+ break;
+ default:
+ out.append("UNKNOWN");
+ break;
+ }
+ out.append(", " + mFormat.toString());
+ out.append("}");
+ return out.toString();
+ }
+
+ /**
+ * Used to read a TrackInfo from a Parcel.
+ */
+ static final Parcelable.Creator<TrackInfo> CREATOR
+ = new Parcelable.Creator<TrackInfo>() {
+ @Override
+ public TrackInfo createFromParcel(Parcel in) {
+ return new TrackInfo(in);
+ }
+
+ @Override
+ public TrackInfo[] newArray(int size) {
+ return new TrackInfo[size];
+ }
+ };
+
+ };
+
+ // We would like domain specific classes with more informative names than the `first` and `second`
+ // in generic Pair, but we would also like to avoid creating new/trivial classes. As a compromise
+ // we document the meanings of `first` and `second` here:
+ //
+ // Pair.first - inband track index; non-null iff representing an inband track.
+ // Pair.second - a SubtitleTrack registered with mSubtitleController; non-null iff representing
+ // an inband subtitle track or any out-of-band track (subtitle or timedtext).
+ private Vector<Pair<Integer, SubtitleTrack>> mIndexTrackPairs = new Vector<>();
+ private BitSet mInbandTrackIndices = new BitSet();
+
+ /**
+ * Returns an array of track information.
+ *
+ * @return Array of track info. The total number of tracks is the array length.
+ * Must be called again if an external timed text source has been added after any of the
+ * addTimedTextSource methods are called.
+ * @throws IllegalStateException if it is called in an invalid state.
+ */
+ public TrackInfo[] getTrackInfo() throws IllegalStateException {
+ TrackInfo trackInfo[] = getInbandTrackInfo();
+ // add out-of-band tracks
+ synchronized (mIndexTrackPairs) {
+ TrackInfo allTrackInfo[] = new TrackInfo[mIndexTrackPairs.size()];
+ for (int i = 0; i < allTrackInfo.length; i++) {
+ Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
+ if (p.first != null) {
+ // inband track
+ allTrackInfo[i] = trackInfo[p.first];
+ } else {
+ SubtitleTrack track = p.second;
+ allTrackInfo[i] = new TrackInfo(track.getTrackType(), track.getFormat());
+ }
+ }
+ return allTrackInfo;
+ }
+ }
+
+ private TrackInfo[] getInbandTrackInfo() throws IllegalStateException {
+ Parcel request = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+ try {
+ request.writeInterfaceToken(IMEDIA_PLAYER);
+ request.writeInt(INVOKE_ID_GET_TRACK_INFO);
+ invoke(request, reply);
+ TrackInfo trackInfo[] = reply.createTypedArray(TrackInfo.CREATOR);
+ return trackInfo;
+ } finally {
+ request.recycle();
+ reply.recycle();
+ }
+ }
+
+ /* Do not change these values without updating their counterparts
+ * in include/media/stagefright/MediaDefs.h and media/libstagefright/MediaDefs.cpp!
+ */
+ /**
+ * MIME type for SubRip (SRT) container. Used in addTimedTextSource APIs.
+ */
+ public static final String MEDIA_MIMETYPE_TEXT_SUBRIP = "application/x-subrip";
+
+ /**
+ * MIME type for WebVTT subtitle data.
+ * @hide
+ */
+ public static final String MEDIA_MIMETYPE_TEXT_VTT = "text/vtt";
+
+ /**
+ * MIME type for CEA-608 closed caption data.
+ * @hide
+ */
+ public static final String MEDIA_MIMETYPE_TEXT_CEA_608 = "text/cea-608";
+
+ /**
+ * MIME type for CEA-708 closed caption data.
+ * @hide
+ */
+ public static final String MEDIA_MIMETYPE_TEXT_CEA_708 = "text/cea-708";
+
+ /*
+ * A helper function to check if the mime type is supported by media framework.
+ */
+ private static boolean availableMimeTypeForExternalSource(String mimeType) {
+ if (MEDIA_MIMETYPE_TEXT_SUBRIP.equals(mimeType)) {
+ return true;
+ }
+ return false;
+ }
+
+ private SubtitleController mSubtitleController;
+
+ /** @hide */
+ public void setSubtitleAnchor(
+ SubtitleController controller,
+ SubtitleController.Anchor anchor) {
+ // TODO: create SubtitleController in MediaPlayer
+ mSubtitleController = controller;
+ mSubtitleController.setAnchor(anchor);
+ }
+
+ /**
+ * The private version of setSubtitleAnchor is used internally to set mSubtitleController if
+ * necessary when clients don't provide their own SubtitleControllers using the public version
+ * {@link #setSubtitleAnchor(SubtitleController, Anchor)} (e.g. {@link VideoView} provides one).
+ */
+ private synchronized void setSubtitleAnchor() {
+ if ((mSubtitleController == null) && (ActivityThread.currentApplication() != null)) {
+ final HandlerThread thread = new HandlerThread("SetSubtitleAnchorThread");
+ thread.start();
+ Handler handler = new Handler(thread.getLooper());
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ Context context = ActivityThread.currentApplication();
+ mSubtitleController = new SubtitleController(context, mTimeProvider, MediaPlayer.this);
+ mSubtitleController.setAnchor(new Anchor() {
+ @Override
+ public void setSubtitleWidget(RenderingWidget subtitleWidget) {
+ }
+
+ @Override
+ public Looper getSubtitleLooper() {
+ return Looper.getMainLooper();
+ }
+ });
+ thread.getLooper().quitSafely();
+ }
+ });
+ try {
+ thread.join();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ Log.w(TAG, "failed to join SetSubtitleAnchorThread");
+ }
+ }
+ }
+
+ private int mSelectedSubtitleTrackIndex = -1;
+ private Vector<InputStream> mOpenSubtitleSources;
+
+ private OnSubtitleDataListener mSubtitleDataListener = new OnSubtitleDataListener() {
+ @Override
+ public void onSubtitleData(MediaPlayer mp, SubtitleData data) {
+ int index = data.getTrackIndex();
+ synchronized (mIndexTrackPairs) {
+ for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) {
+ if (p.first != null && p.first == index && p.second != null) {
+ // inband subtitle track that owns data
+ SubtitleTrack track = p.second;
+ track.onData(data);
+ }
+ }
+ }
+ }
+ };
+
+ /** @hide */
+ @Override
+ public void onSubtitleTrackSelected(SubtitleTrack track) {
+ if (mSelectedSubtitleTrackIndex >= 0) {
+ try {
+ selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, false);
+ } catch (IllegalStateException e) {
+ }
+ mSelectedSubtitleTrackIndex = -1;
+ }
+ setOnSubtitleDataListener(null);
+ if (track == null) {
+ return;
+ }
+
+ synchronized (mIndexTrackPairs) {
+ for (Pair<Integer, SubtitleTrack> p : mIndexTrackPairs) {
+ if (p.first != null && p.second == track) {
+ // inband subtitle track that is selected
+ mSelectedSubtitleTrackIndex = p.first;
+ break;
+ }
+ }
+ }
+
+ if (mSelectedSubtitleTrackIndex >= 0) {
+ try {
+ selectOrDeselectInbandTrack(mSelectedSubtitleTrackIndex, true);
+ } catch (IllegalStateException e) {
+ }
+ setOnSubtitleDataListener(mSubtitleDataListener);
+ }
+ // no need to select out-of-band tracks
+ }
+
+ /** @hide */
+ public void addSubtitleSource(InputStream is, MediaFormat format)
+ throws IllegalStateException
+ {
+ final InputStream fIs = is;
+ final MediaFormat fFormat = format;
+
+ if (is != null) {
+ // Ensure all input streams are closed. It is also a handy
+ // way to implement timeouts in the future.
+ synchronized(mOpenSubtitleSources) {
+ mOpenSubtitleSources.add(is);
+ }
+ } else {
+ Log.w(TAG, "addSubtitleSource called with null InputStream");
+ }
+
+ getMediaTimeProvider();
+
+ // process each subtitle in its own thread
+ final HandlerThread thread = new HandlerThread("SubtitleReadThread",
+ Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE);
+ thread.start();
+ Handler handler = new Handler(thread.getLooper());
+ handler.post(new Runnable() {
+ private int addTrack() {
+ if (fIs == null || mSubtitleController == null) {
+ return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
+ }
+
+ SubtitleTrack track = mSubtitleController.addTrack(fFormat);
+ if (track == null) {
+ return MEDIA_INFO_UNSUPPORTED_SUBTITLE;
+ }
+
+ // TODO: do the conversion in the subtitle track
+ Scanner scanner = new Scanner(fIs, "UTF-8");
+ String contents = scanner.useDelimiter("\\A").next();
+ synchronized(mOpenSubtitleSources) {
+ mOpenSubtitleSources.remove(fIs);
+ }
+ scanner.close();
+ synchronized (mIndexTrackPairs) {
+ mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
+ }
+ Handler h = mTimeProvider.mEventHandler;
+ int what = TimeProvider.NOTIFY;
+ int arg1 = TimeProvider.NOTIFY_TRACK_DATA;
+ Pair<SubtitleTrack, byte[]> trackData = Pair.create(track, contents.getBytes());
+ Message m = h.obtainMessage(what, arg1, 0, trackData);
+ h.sendMessage(m);
+ return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
+ }
+
+ public void run() {
+ int res = addTrack();
+ if (mEventHandler != null) {
+ Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null);
+ mEventHandler.sendMessage(m);
+ }
+ thread.getLooper().quitSafely();
+ }
+ });
+ }
+
+ private void scanInternalSubtitleTracks() {
+ setSubtitleAnchor();
+
+ populateInbandTracks();
+
+ if (mSubtitleController != null) {
+ mSubtitleController.selectDefaultTrack();
+ }
+ }
+
+ private void populateInbandTracks() {
+ TrackInfo[] tracks = getInbandTrackInfo();
+ synchronized (mIndexTrackPairs) {
+ for (int i = 0; i < tracks.length; i++) {
+ if (mInbandTrackIndices.get(i)) {
+ continue;
+ } else {
+ mInbandTrackIndices.set(i);
+ }
+
+ // newly appeared inband track
+ if (tracks[i].getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) {
+ SubtitleTrack track = mSubtitleController.addTrack(
+ tracks[i].getFormat());
+ mIndexTrackPairs.add(Pair.create(i, track));
+ } else {
+ mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(i, null));
+ }
+ }
+ }
+ }
+
+ /* TODO: Limit the total number of external timed text source to a reasonable number.
+ */
+ /**
+ * Adds an external timed text source file.
+ *
+ * Currently supported format is SubRip with the file extension .srt, case insensitive.
+ * Note that a single external timed text source may contain multiple tracks in it.
+ * One can find the total number of available tracks using {@link #getTrackInfo()} to see what
+ * additional tracks become available after this method call.
+ *
+ * @param path The file path of external timed text source file.
+ * @param mimeType The mime type of the file. Must be one of the mime types listed above.
+ * @throws IOException if the file cannot be accessed or is corrupted.
+ * @throws IllegalArgumentException if the mimeType is not supported.
+ * @throws IllegalStateException if called in an invalid state.
+ */
+ public void addTimedTextSource(String path, String mimeType)
+ throws IOException, IllegalArgumentException, IllegalStateException {
+ if (!availableMimeTypeForExternalSource(mimeType)) {
+ final String msg = "Illegal mimeType for timed text source: " + mimeType;
+ throw new IllegalArgumentException(msg);
+ }
+
+ File file = new File(path);
+ if (file.exists()) {
+ FileInputStream is = new FileInputStream(file);
+ FileDescriptor fd = is.getFD();
+ addTimedTextSource(fd, mimeType);
+ is.close();
+ } else {
+ // We do not support the case where the path is not a file.
+ throw new IOException(path);
+ }
+ }
+
+ /**
+ * Adds an external timed text source file (Uri).
+ *
+ * Currently supported format is SubRip with the file extension .srt, case insensitive.
+ * Note that a single external timed text source may contain multiple tracks in it.
+ * One can find the total number of available tracks using {@link #getTrackInfo()} to see what
+ * additional tracks become available after this method call.
+ *
+ * @param context the Context to use when resolving the Uri
+ * @param uri the Content URI of the data you want to play
+ * @param mimeType The mime type of the file. Must be one of the mime types listed above.
+ * @throws IOException if the file cannot be accessed or is corrupted.
+ * @throws IllegalArgumentException if the mimeType is not supported.
+ * @throws IllegalStateException if called in an invalid state.
+ */
+ public void addTimedTextSource(Context context, Uri uri, String mimeType)
+ throws IOException, IllegalArgumentException, IllegalStateException {
+ String scheme = uri.getScheme();
+ if(scheme == null || scheme.equals("file")) {
+ addTimedTextSource(uri.getPath(), mimeType);
+ return;
+ }
+
+ AssetFileDescriptor fd = null;
+ try {
+ ContentResolver resolver = context.getContentResolver();
+ fd = resolver.openAssetFileDescriptor(uri, "r");
+ if (fd == null) {
+ return;
+ }
+ addTimedTextSource(fd.getFileDescriptor(), mimeType);
+ return;
+ } catch (SecurityException ex) {
+ } catch (IOException ex) {
+ } finally {
+ if (fd != null) {
+ fd.close();
+ }
+ }
+ }
+
+ /**
+ * Adds an external timed text source file (FileDescriptor).
+ *
+ * It is the caller's responsibility to close the file descriptor.
+ * It is safe to do so as soon as this call returns.
+ *
+ * Currently supported format is SubRip. Note that a single external timed text source may
+ * contain multiple tracks in it. One can find the total number of available tracks
+ * using {@link #getTrackInfo()} to see what additional tracks become available
+ * after this method call.
+ *
+ * @param fd the FileDescriptor for the file you want to play
+ * @param mimeType The mime type of the file. Must be one of the mime types listed above.
+ * @throws IllegalArgumentException if the mimeType is not supported.
+ * @throws IllegalStateException if called in an invalid state.
+ */
+ public void addTimedTextSource(FileDescriptor fd, String mimeType)
+ throws IllegalArgumentException, IllegalStateException {
+ // intentionally less than LONG_MAX
+ addTimedTextSource(fd, 0, 0x7ffffffffffffffL, mimeType);
+ }
+
+ /**
+ * Adds an external timed text file (FileDescriptor).
+ *
+ * It is the caller's responsibility to close the file descriptor.
+ * It is safe to do so as soon as this call returns.
+ *
+ * Currently supported format is SubRip. Note that a single external timed text source may
+ * contain multiple tracks in it. One can find the total number of available tracks
+ * using {@link #getTrackInfo()} to see what additional tracks become available
+ * after this method call.
+ *
+ * @param fd the FileDescriptor for the file you want to play
+ * @param offset the offset into the file where the data to be played starts, in bytes
+ * @param length the length in bytes of the data to be played
+ * @param mime The mime type of the file. Must be one of the mime types listed above.
+ * @throws IllegalArgumentException if the mimeType is not supported.
+ * @throws IllegalStateException if called in an invalid state.
+ */
+ public void addTimedTextSource(FileDescriptor fd, long offset, long length, String mime)
+ throws IllegalArgumentException, IllegalStateException {
+ if (!availableMimeTypeForExternalSource(mime)) {
+ throw new IllegalArgumentException("Illegal mimeType for timed text source: " + mime);
+ }
+
+ final FileDescriptor dupedFd;
+ try {
+ dupedFd = Libcore.os.dup(fd);
+ } catch (ErrnoException ex) {
+ Log.e(TAG, ex.getMessage(), ex);
+ throw new RuntimeException(ex);
+ }
+
+ final MediaFormat fFormat = new MediaFormat();
+ fFormat.setString(MediaFormat.KEY_MIME, mime);
+ fFormat.setInteger(MediaFormat.KEY_IS_TIMED_TEXT, 1);
+
+ // A MediaPlayer created by a VideoView should already have its mSubtitleController set.
+ if (mSubtitleController == null) {
+ setSubtitleAnchor();
+ }
+
+ if (!mSubtitleController.hasRendererFor(fFormat)) {
+ // test and add not atomic
+ Context context = ActivityThread.currentApplication();
+ mSubtitleController.registerRenderer(new SRTRenderer(context, mEventHandler));
+ }
+ final SubtitleTrack track = mSubtitleController.addTrack(fFormat);
+ synchronized (mIndexTrackPairs) {
+ mIndexTrackPairs.add(Pair.<Integer, SubtitleTrack>create(null, track));
+ }
+
+ getMediaTimeProvider();
+
+ final long offset2 = offset;
+ final long length2 = length;
+ final HandlerThread thread = new HandlerThread(
+ "TimedTextReadThread",
+ Process.THREAD_PRIORITY_BACKGROUND + Process.THREAD_PRIORITY_MORE_FAVORABLE);
+ thread.start();
+ Handler handler = new Handler(thread.getLooper());
+ handler.post(new Runnable() {
+ private int addTrack() {
+ final ByteArrayOutputStream bos = new ByteArrayOutputStream();
+ try {
+ Libcore.os.lseek(dupedFd, offset2, OsConstants.SEEK_SET);
+ byte[] buffer = new byte[4096];
+ for (long total = 0; total < length2;) {
+ int bytesToRead = (int) Math.min(buffer.length, length2 - total);
+ int bytes = IoBridge.read(dupedFd, buffer, 0, bytesToRead);
+ if (bytes < 0) {
+ break;
+ } else {
+ bos.write(buffer, 0, bytes);
+ total += bytes;
+ }
+ }
+ Handler h = mTimeProvider.mEventHandler;
+ int what = TimeProvider.NOTIFY;
+ int arg1 = TimeProvider.NOTIFY_TRACK_DATA;
+ Pair<SubtitleTrack, byte[]> trackData = Pair.create(track, bos.toByteArray());
+ Message m = h.obtainMessage(what, arg1, 0, trackData);
+ h.sendMessage(m);
+ return MEDIA_INFO_EXTERNAL_METADATA_UPDATE;
+ } catch (Exception e) {
+ Log.e(TAG, e.getMessage(), e);
+ return MEDIA_INFO_TIMED_TEXT_ERROR;
+ } finally {
+ try {
+ Libcore.os.close(dupedFd);
+ } catch (ErrnoException e) {
+ Log.e(TAG, e.getMessage(), e);
+ }
+ }
+ }
+
+ public void run() {
+ int res = addTrack();
+ if (mEventHandler != null) {
+ Message m = mEventHandler.obtainMessage(MEDIA_INFO, res, 0, null);
+ mEventHandler.sendMessage(m);
+ }
+ thread.getLooper().quitSafely();
+ }
+ });
+ }
+
+ /**
+ * Returns the index of the audio, video, or subtitle track currently selected for playback,
+ * The return value is an index into the array returned by {@link #getTrackInfo()}, and can
+ * be used in calls to {@link #selectTrack(int)} or {@link #deselectTrack(int)}.
+ *
+ * @param trackType should be one of {@link TrackInfo#MEDIA_TRACK_TYPE_VIDEO},
+ * {@link TrackInfo#MEDIA_TRACK_TYPE_AUDIO}, or
+ * {@link TrackInfo#MEDIA_TRACK_TYPE_SUBTITLE}
+ * @return index of the audio, video, or subtitle track currently selected for playback;
+ * a negative integer is returned when there is no selected track for {@code trackType} or
+ * when {@code trackType} is not one of audio, video, or subtitle.
+ * @throws IllegalStateException if called after {@link #release()}
+ *
+ * @see #getTrackInfo()
+ * @see #selectTrack(int)
+ * @see #deselectTrack(int)
+ */
+ public int getSelectedTrack(int trackType) throws IllegalStateException {
+ if (mSubtitleController != null
+ && (trackType == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE
+ || trackType == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT)) {
+ SubtitleTrack subtitleTrack = mSubtitleController.getSelectedTrack();
+ if (subtitleTrack != null) {
+ synchronized (mIndexTrackPairs) {
+ for (int i = 0; i < mIndexTrackPairs.size(); i++) {
+ Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
+ if (p.second == subtitleTrack && subtitleTrack.getTrackType() == trackType) {
+ return i;
+ }
+ }
+ }
+ }
+ }
+
+ Parcel request = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+ try {
+ request.writeInterfaceToken(IMEDIA_PLAYER);
+ request.writeInt(INVOKE_ID_GET_SELECTED_TRACK);
+ request.writeInt(trackType);
+ invoke(request, reply);
+ int inbandTrackIndex = reply.readInt();
+ synchronized (mIndexTrackPairs) {
+ for (int i = 0; i < mIndexTrackPairs.size(); i++) {
+ Pair<Integer, SubtitleTrack> p = mIndexTrackPairs.get(i);
+ if (p.first != null && p.first == inbandTrackIndex) {
+ return i;
+ }
+ }
+ }
+ return -1;
+ } finally {
+ request.recycle();
+ reply.recycle();
+ }
+ }
+
+ /**
+ * Selects a track.
+ * <p>
+ * If a MediaPlayer is in invalid state, it throws an IllegalStateException exception.
+ * If a MediaPlayer is in <em>Started</em> state, the selected track is presented immediately.
+ * If a MediaPlayer is not in Started state, it just marks the track to be played.
+ * </p>
+ * <p>
+ * In any valid state, if it is called multiple times on the same type of track (ie. Video,
+ * Audio, Timed Text), the most recent one will be chosen.
+ * </p>
+ * <p>
+ * The first audio and video tracks are selected by default if available, even though
+ * this method is not called. However, no timed text track will be selected until
+ * this function is called.
+ * </p>
+ * <p>
+ * Currently, only timed text tracks or audio tracks can be selected via this method.
+ * In addition, the support for selecting an audio track at runtime is pretty limited
+ * in that an audio track can only be selected in the <em>Prepared</em> state.
+ * </p>
+ * @param index the index of the track to be selected. The valid range of the index
+ * is 0..total number of track - 1. The total number of tracks as well as the type of
+ * each individual track can be found by calling {@link #getTrackInfo()} method.
+ * @throws IllegalStateException if called in an invalid state.
+ *
+ * @see android.media.MediaPlayer#getTrackInfo
+ */
+ public void selectTrack(int index) throws IllegalStateException {
+ selectOrDeselectTrack(index, true /* select */);
+ }
+
+ /**
+ * Deselect a track.
+ * <p>
+ * Currently, the track must be a timed text track and no audio or video tracks can be
+ * deselected. If the timed text track identified by index has not been
+ * selected before, it throws an exception.
+ * </p>
+ * @param index the index of the track to be deselected. The valid range of the index
+ * is 0..total number of tracks - 1. The total number of tracks as well as the type of
+ * each individual track can be found by calling {@link #getTrackInfo()} method.
+ * @throws IllegalStateException if called in an invalid state.
+ *
+ * @see android.media.MediaPlayer#getTrackInfo
+ */
+ public void deselectTrack(int index) throws IllegalStateException {
+ selectOrDeselectTrack(index, false /* select */);
+ }
+
+ private void selectOrDeselectTrack(int index, boolean select)
+ throws IllegalStateException {
+ // handle subtitle track through subtitle controller
+ populateInbandTracks();
+
+ Pair<Integer,SubtitleTrack> p = null;
+ try {
+ p = mIndexTrackPairs.get(index);
+ } catch (ArrayIndexOutOfBoundsException e) {
+ // ignore bad index
+ return;
+ }
+
+ SubtitleTrack track = p.second;
+ if (track == null) {
+ // inband (de)select
+ selectOrDeselectInbandTrack(p.first, select);
+ return;
+ }
+
+ if (mSubtitleController == null) {
+ return;
+ }
+
+ if (!select) {
+ // out-of-band deselect
+ if (mSubtitleController.getSelectedTrack() == track) {
+ mSubtitleController.selectTrack(null);
+ } else {
+ Log.w(TAG, "trying to deselect track that was not selected");
+ }
+ return;
+ }
+
+ // out-of-band select
+ if (track.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT) {
+ int ttIndex = getSelectedTrack(TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT);
+ synchronized (mIndexTrackPairs) {
+ if (ttIndex >= 0 && ttIndex < mIndexTrackPairs.size()) {
+ Pair<Integer,SubtitleTrack> p2 = mIndexTrackPairs.get(ttIndex);
+ if (p2.first != null && p2.second == null) {
+ // deselect inband counterpart
+ selectOrDeselectInbandTrack(p2.first, false);
+ }
+ }
+ }
+ }
+ mSubtitleController.selectTrack(track);
+ }
+
+ private void selectOrDeselectInbandTrack(int index, boolean select)
+ throws IllegalStateException {
+ Parcel request = Parcel.obtain();
+ Parcel reply = Parcel.obtain();
+ try {
+ request.writeInterfaceToken(IMEDIA_PLAYER);
+ request.writeInt(select? INVOKE_ID_SELECT_TRACK: INVOKE_ID_DESELECT_TRACK);
+ request.writeInt(index);
+ invoke(request, reply);
+ } finally {
+ request.recycle();
+ reply.recycle();
+ }
+ }
+
+
+ /**
+ * @param reply Parcel with audio/video duration info for battery
+ tracking usage
+ * @return The status code.
+ * {@hide}
+ */
+ public native static int native_pullBatteryData(Parcel reply);
+
+ /**
+ * Sets the target UDP re-transmit endpoint for the low level player.
+ * Generally, the address portion of the endpoint is an IP multicast
+ * address, although a unicast address would be equally valid. When a valid
+ * retransmit endpoint has been set, the media player will not decode and
+ * render the media presentation locally. Instead, the player will attempt
+ * to re-multiplex its media data using the Android@Home RTP profile and
+ * re-transmit to the target endpoint. Receiver devices (which may be
+ * either the same as the transmitting device or different devices) may
+ * instantiate, prepare, and start a receiver player using a setDataSource
+ * URL of the form...
+ *
+ * aahRX://&lt;multicastIP&gt;:&lt;port&gt;
+ *
+ * to receive, decode and render the re-transmitted content.
+ *
+ * setRetransmitEndpoint may only be called before setDataSource has been
+ * called; while the player is in the Idle state.
+ *
+ * @param endpoint the address and UDP port of the re-transmission target or
+ * null if no re-transmission is to be performed.
+ * @throws IllegalStateException if it is called in an invalid state
+ * @throws IllegalArgumentException if the retransmit endpoint is supplied,
+ * but invalid.
+ *
+ * {@hide} pending API council
+ */
+ public void setRetransmitEndpoint(InetSocketAddress endpoint)
+ throws IllegalStateException, IllegalArgumentException
+ {
+ String addrString = null;
+ int port = 0;
+
+ if (null != endpoint) {
+ addrString = endpoint.getAddress().getHostAddress();
+ port = endpoint.getPort();
+ }
+
+ int ret = native_setRetransmitEndpoint(addrString, port);
+ if (ret != 0) {
+ throw new IllegalArgumentException("Illegal re-transmit endpoint; native ret " + ret);
+ }
+ }
+
+ private native final int native_setRetransmitEndpoint(String addrString, int port);
+
+ @Override
+ protected void finalize() {
+ baseRelease();
+ native_finalize();
+ }
+
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediaplayer.h!
+ */
+ private static final int MEDIA_NOP = 0; // interface test message
+ private static final int MEDIA_PREPARED = 1;
+ private static final int MEDIA_PLAYBACK_COMPLETE = 2;
+ private static final int MEDIA_BUFFERING_UPDATE = 3;
+ private static final int MEDIA_SEEK_COMPLETE = 4;
+ private static final int MEDIA_SET_VIDEO_SIZE = 5;
+ private static final int MEDIA_STARTED = 6;
+ private static final int MEDIA_PAUSED = 7;
+ private static final int MEDIA_STOPPED = 8;
+ private static final int MEDIA_SKIPPED = 9;
+ private static final int MEDIA_TIMED_TEXT = 99;
+ private static final int MEDIA_ERROR = 100;
+ private static final int MEDIA_INFO = 200;
+ private static final int MEDIA_SUBTITLE_DATA = 201;
+ private static final int MEDIA_META_DATA = 202;
+ private static final int MEDIA_DRM_INFO = 210;
+
+ private TimeProvider mTimeProvider;
+
+ /** @hide */
+ public MediaTimeProvider getMediaTimeProvider() {
+ if (mTimeProvider == null) {
+ mTimeProvider = new TimeProvider(this);
+ }
+ return mTimeProvider;
+ }
+
+ private class EventHandler extends Handler
+ {
+ private MediaPlayer mMediaPlayer;
+
+ public EventHandler(MediaPlayer mp, Looper looper) {
+ super(looper);
+ mMediaPlayer = mp;
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (mMediaPlayer.mNativeContext == 0) {
+ Log.w(TAG, "mediaplayer went away with unhandled events");
+ return;
+ }
+ switch(msg.what) {
+ case MEDIA_PREPARED:
+ try {
+ scanInternalSubtitleTracks();
+ } catch (RuntimeException e) {
+ // send error message instead of crashing;
+ // send error message instead of inlining a call to onError
+ // to avoid code duplication.
+ Message msg2 = obtainMessage(
+ MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, MEDIA_ERROR_UNSUPPORTED, null);
+ sendMessage(msg2);
+ }
+
+ OnPreparedListener onPreparedListener = mOnPreparedListener;
+ if (onPreparedListener != null)
+ onPreparedListener.onPrepared(mMediaPlayer);
+ return;
+
+ case MEDIA_DRM_INFO:
+ Log.v(TAG, "MEDIA_DRM_INFO " + mOnDrmInfoHandlerDelegate);
+
+ if (msg.obj == null) {
+ Log.w(TAG, "MEDIA_DRM_INFO msg.obj=NULL");
+ } else if (msg.obj instanceof Parcel) {
+ // The parcel was parsed already in postEventFromNative
+ DrmInfo drmInfo = null;
+
+ OnDrmInfoHandlerDelegate onDrmInfoHandlerDelegate;
+ synchronized (mDrmLock) {
+ if (mOnDrmInfoHandlerDelegate != null && mDrmInfo != null) {
+ drmInfo = mDrmInfo.makeCopy();
+ }
+ // local copy while keeping the lock
+ onDrmInfoHandlerDelegate = mOnDrmInfoHandlerDelegate;
+ }
+
+ // notifying the client outside the lock
+ if (onDrmInfoHandlerDelegate != null) {
+ onDrmInfoHandlerDelegate.notifyClient(drmInfo);
+ }
+ } else {
+ Log.w(TAG, "MEDIA_DRM_INFO msg.obj of unexpected type " + msg.obj);
+ }
+ return;
+
+ case MEDIA_PLAYBACK_COMPLETE:
+ {
+ mOnCompletionInternalListener.onCompletion(mMediaPlayer);
+ OnCompletionListener onCompletionListener = mOnCompletionListener;
+ if (onCompletionListener != null)
+ onCompletionListener.onCompletion(mMediaPlayer);
+ }
+ stayAwake(false);
+ return;
+
+ case MEDIA_STOPPED:
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onStopped();
+ }
+ }
+ break;
+
+ case MEDIA_STARTED:
+ case MEDIA_PAUSED:
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onPaused(msg.what == MEDIA_PAUSED);
+ }
+ }
+ break;
+
+ case MEDIA_BUFFERING_UPDATE:
+ OnBufferingUpdateListener onBufferingUpdateListener = mOnBufferingUpdateListener;
+ if (onBufferingUpdateListener != null)
+ onBufferingUpdateListener.onBufferingUpdate(mMediaPlayer, msg.arg1);
+ return;
+
+ case MEDIA_SEEK_COMPLETE:
+ OnSeekCompleteListener onSeekCompleteListener = mOnSeekCompleteListener;
+ if (onSeekCompleteListener != null) {
+ onSeekCompleteListener.onSeekComplete(mMediaPlayer);
+ }
+ // fall through
+
+ case MEDIA_SKIPPED:
+ {
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onSeekComplete(mMediaPlayer);
+ }
+ }
+ return;
+
+ case MEDIA_SET_VIDEO_SIZE:
+ OnVideoSizeChangedListener onVideoSizeChangedListener = mOnVideoSizeChangedListener;
+ if (onVideoSizeChangedListener != null) {
+ onVideoSizeChangedListener.onVideoSizeChanged(
+ mMediaPlayer, msg.arg1, msg.arg2);
+ }
+ return;
+
+ case MEDIA_ERROR:
+ Log.e(TAG, "Error (" + msg.arg1 + "," + msg.arg2 + ")");
+ boolean error_was_handled = false;
+ OnErrorListener onErrorListener = mOnErrorListener;
+ if (onErrorListener != null) {
+ error_was_handled = onErrorListener.onError(mMediaPlayer, msg.arg1, msg.arg2);
+ }
+ {
+ mOnCompletionInternalListener.onCompletion(mMediaPlayer);
+ OnCompletionListener onCompletionListener = mOnCompletionListener;
+ if (onCompletionListener != null && ! error_was_handled) {
+ onCompletionListener.onCompletion(mMediaPlayer);
+ }
+ }
+ stayAwake(false);
+ return;
+
+ case MEDIA_INFO:
+ switch (msg.arg1) {
+ case MEDIA_INFO_VIDEO_TRACK_LAGGING:
+ Log.i(TAG, "Info (" + msg.arg1 + "," + msg.arg2 + ")");
+ break;
+ case MEDIA_INFO_METADATA_UPDATE:
+ try {
+ scanInternalSubtitleTracks();
+ } catch (RuntimeException e) {
+ Message msg2 = obtainMessage(
+ MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, MEDIA_ERROR_UNSUPPORTED, null);
+ sendMessage(msg2);
+ }
+ // fall through
+
+ case MEDIA_INFO_EXTERNAL_METADATA_UPDATE:
+ msg.arg1 = MEDIA_INFO_METADATA_UPDATE;
+ // update default track selection
+ if (mSubtitleController != null) {
+ mSubtitleController.selectDefaultTrack();
+ }
+ break;
+ case MEDIA_INFO_BUFFERING_START:
+ case MEDIA_INFO_BUFFERING_END:
+ TimeProvider timeProvider = mTimeProvider;
+ if (timeProvider != null) {
+ timeProvider.onBuffering(msg.arg1 == MEDIA_INFO_BUFFERING_START);
+ }
+ break;
+ }
+
+ OnInfoListener onInfoListener = mOnInfoListener;
+ if (onInfoListener != null) {
+ onInfoListener.onInfo(mMediaPlayer, msg.arg1, msg.arg2);
+ }
+ // No real default action so far.
+ return;
+ case MEDIA_TIMED_TEXT:
+ OnTimedTextListener onTimedTextListener = mOnTimedTextListener;
+ if (onTimedTextListener == null)
+ return;
+ if (msg.obj == null) {
+ onTimedTextListener.onTimedText(mMediaPlayer, null);
+ } else {
+ if (msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel)msg.obj;
+ TimedText text = new TimedText(parcel);
+ parcel.recycle();
+ onTimedTextListener.onTimedText(mMediaPlayer, text);
+ }
+ }
+ return;
+
+ case MEDIA_SUBTITLE_DATA:
+ OnSubtitleDataListener onSubtitleDataListener = mOnSubtitleDataListener;
+ if (onSubtitleDataListener == null) {
+ return;
+ }
+ if (msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel) msg.obj;
+ SubtitleData data = new SubtitleData(parcel);
+ parcel.recycle();
+ onSubtitleDataListener.onSubtitleData(mMediaPlayer, data);
+ }
+ return;
+
+ case MEDIA_META_DATA:
+ OnTimedMetaDataAvailableListener onTimedMetaDataAvailableListener =
+ mOnTimedMetaDataAvailableListener;
+ if (onTimedMetaDataAvailableListener == null) {
+ return;
+ }
+ if (msg.obj instanceof Parcel) {
+ Parcel parcel = (Parcel) msg.obj;
+ TimedMetaData data = TimedMetaData.createTimedMetaDataFromParcel(parcel);
+ parcel.recycle();
+ onTimedMetaDataAvailableListener.onTimedMetaDataAvailable(mMediaPlayer, data);
+ }
+ return;
+
+ case MEDIA_NOP: // interface test message - ignore
+ break;
+
+ default:
+ Log.e(TAG, "Unknown message type " + msg.what);
+ return;
+ }
+ }
+ }
+
+ /*
+ * Called from native code when an interesting event happens. This method
+ * just uses the EventHandler system to post the event back to the main app thread.
+ * We use a weak reference to the original MediaPlayer object so that the native
+ * code is safe from the object disappearing from underneath it. (This is
+ * the cookie passed to native_setup().)
+ */
+ private static void postEventFromNative(Object mediaplayer_ref,
+ int what, int arg1, int arg2, Object obj)
+ {
+ final MediaPlayer mp = (MediaPlayer)((WeakReference)mediaplayer_ref).get();
+ if (mp == null) {
+ return;
+ }
+
+ switch (what) {
+ case MEDIA_INFO:
+ if (arg1 == MEDIA_INFO_STARTED_AS_NEXT) {
+ new Thread(new Runnable() {
+ @Override
+ public void run() {
+ // this acquires the wakelock if needed, and sets the client side state
+ mp.start();
+ }
+ }).start();
+ Thread.yield();
+ }
+ break;
+
+ case MEDIA_DRM_INFO:
+ // We need to derive mDrmInfo before prepare() returns so processing it here
+ // before the notification is sent to EventHandler below. EventHandler runs in the
+ // notification looper so its handleMessage might process the event after prepare()
+ // has returned.
+ Log.v(TAG, "postEventFromNative MEDIA_DRM_INFO");
+ if (obj instanceof Parcel) {
+ Parcel parcel = (Parcel)obj;
+ DrmInfo drmInfo = new DrmInfo(parcel);
+ synchronized (mp.mDrmLock) {
+ mp.mDrmInfo = drmInfo;
+ }
+ } else {
+ Log.w(TAG, "MEDIA_DRM_INFO msg.obj of unexpected type " + obj);
+ }
+ break;
+
+ case MEDIA_PREPARED:
+ // By this time, we've learned about DrmInfo's presence or absence. This is meant
+ // mainly for prepareAsync() use case. For prepare(), this still can run to a race
+ // condition b/c MediaPlayerNative releases the prepare() lock before calling notify
+ // so we also set mDrmInfoResolved in prepare().
+ synchronized (mp.mDrmLock) {
+ mp.mDrmInfoResolved = true;
+ }
+ break;
+
+ }
+
+ if (mp.mEventHandler != null) {
+ Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2, obj);
+ mp.mEventHandler.sendMessage(m);
+ }
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when the media
+ * source is ready for playback.
+ */
+ public interface OnPreparedListener
+ {
+ /**
+ * Called when the media file is ready for playback.
+ *
+ * @param mp the MediaPlayer that is ready for playback
+ */
+ void onPrepared(MediaPlayer mp);
+ }
+
+ /**
+ * Register a callback to be invoked when the media source is ready
+ * for playback.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnPreparedListener(OnPreparedListener listener)
+ {
+ mOnPreparedListener = listener;
+ }
+
+ private OnPreparedListener mOnPreparedListener;
+
+ /**
+ * Interface definition for a callback to be invoked when playback of
+ * a media source has completed.
+ */
+ public interface OnCompletionListener
+ {
+ /**
+ * Called when the end of a media source is reached during playback.
+ *
+ * @param mp the MediaPlayer that reached the end of the file
+ */
+ void onCompletion(MediaPlayer mp);
+ }
+
+ /**
+ * Register a callback to be invoked when the end of a media source
+ * has been reached during playback.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnCompletionListener(OnCompletionListener listener)
+ {
+ mOnCompletionListener = listener;
+ }
+
+ private OnCompletionListener mOnCompletionListener;
+
+ /**
+ * @hide
+ * Internal completion listener to update PlayerBase of the play state. Always "registered".
+ */
+ private final OnCompletionListener mOnCompletionInternalListener = new OnCompletionListener() {
+ @Override
+ public void onCompletion(MediaPlayer mp) {
+ baseStop();
+ }
+ };
+
+ /**
+ * Interface definition of a callback to be invoked indicating buffering
+ * status of a media resource being streamed over the network.
+ */
+ public interface OnBufferingUpdateListener
+ {
+ /**
+ * Called to update status in buffering a media stream received through
+ * progressive HTTP download. The received buffering percentage
+ * indicates how much of the content has been buffered or played.
+ * For example a buffering update of 80 percent when half the content
+ * has already been played indicates that the next 30 percent of the
+ * content to play has been buffered.
+ *
+ * @param mp the MediaPlayer the update pertains to
+ * @param percent the percentage (0-100) of the content
+ * that has been buffered or played thus far
+ */
+ void onBufferingUpdate(MediaPlayer mp, int percent);
+ }
+
+ /**
+ * Register a callback to be invoked when the status of a network
+ * stream's buffer has changed.
+ *
+ * @param listener the callback that will be run.
+ */
+ public void setOnBufferingUpdateListener(OnBufferingUpdateListener listener)
+ {
+ mOnBufferingUpdateListener = listener;
+ }
+
+ private OnBufferingUpdateListener mOnBufferingUpdateListener;
+
+ /**
+ * Interface definition of a callback to be invoked indicating
+ * the completion of a seek operation.
+ */
+ public interface OnSeekCompleteListener
+ {
+ /**
+ * Called to indicate the completion of a seek operation.
+ *
+ * @param mp the MediaPlayer that issued the seek operation
+ */
+ public void onSeekComplete(MediaPlayer mp);
+ }
+
+ /**
+ * Register a callback to be invoked when a seek operation has been
+ * completed.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnSeekCompleteListener(OnSeekCompleteListener listener)
+ {
+ mOnSeekCompleteListener = listener;
+ }
+
+ private OnSeekCompleteListener mOnSeekCompleteListener;
+
+ /**
+ * Interface definition of a callback to be invoked when the
+ * video size is first known or updated
+ */
+ public interface OnVideoSizeChangedListener
+ {
+ /**
+ * Called to indicate the video size
+ *
+ * The video size (width and height) could be 0 if there was no video,
+ * no display surface was set, or the value was not determined yet.
+ *
+ * @param mp the MediaPlayer associated with this callback
+ * @param width the width of the video
+ * @param height the height of the video
+ */
+ public void onVideoSizeChanged(MediaPlayer mp, int width, int height);
+ }
+
+ /**
+ * Register a callback to be invoked when the video size is
+ * known or updated.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnVideoSizeChangedListener(OnVideoSizeChangedListener listener)
+ {
+ mOnVideoSizeChangedListener = listener;
+ }
+
+ private OnVideoSizeChangedListener mOnVideoSizeChangedListener;
+
+ /**
+ * Interface definition of a callback to be invoked when a
+ * timed text is available for display.
+ */
+ public interface OnTimedTextListener
+ {
+ /**
+ * Called to indicate an avaliable timed text
+ *
+ * @param mp the MediaPlayer associated with this callback
+ * @param text the timed text sample which contains the text
+ * needed to be displayed and the display format.
+ */
+ public void onTimedText(MediaPlayer mp, TimedText text);
+ }
+
+ /**
+ * Register a callback to be invoked when a timed text is available
+ * for display.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnTimedTextListener(OnTimedTextListener listener)
+ {
+ mOnTimedTextListener = listener;
+ }
+
+ private OnTimedTextListener mOnTimedTextListener;
+
+ /**
+ * Interface definition of a callback to be invoked when a
+ * track has data available.
+ *
+ * @hide
+ */
+ public interface OnSubtitleDataListener
+ {
+ public void onSubtitleData(MediaPlayer mp, SubtitleData data);
+ }
+
+ /**
+ * Register a callback to be invoked when a track has data available.
+ *
+ * @param listener the callback that will be run
+ *
+ * @hide
+ */
+ public void setOnSubtitleDataListener(OnSubtitleDataListener listener)
+ {
+ mOnSubtitleDataListener = listener;
+ }
+
+ private OnSubtitleDataListener mOnSubtitleDataListener;
+
+ /**
+ * Interface definition of a callback to be invoked when a
+ * track has timed metadata available.
+ *
+ * @see MediaPlayer#setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener)
+ */
+ public interface OnTimedMetaDataAvailableListener
+ {
+ /**
+ * Called to indicate avaliable timed metadata
+ * <p>
+ * This method will be called as timed metadata is extracted from the media,
+ * in the same order as it occurs in the media. The timing of this event is
+ * not controlled by the associated timestamp.
+ *
+ * @param mp the MediaPlayer associated with this callback
+ * @param data the timed metadata sample associated with this event
+ */
+ public void onTimedMetaDataAvailable(MediaPlayer mp, TimedMetaData data);
+ }
+
+ /**
+ * Register a callback to be invoked when a selected track has timed metadata available.
+ * <p>
+ * Currently only HTTP live streaming data URI's embedded with timed ID3 tags generates
+ * {@link TimedMetaData}.
+ *
+ * @see MediaPlayer#selectTrack(int)
+ * @see MediaPlayer.OnTimedMetaDataAvailableListener
+ * @see TimedMetaData
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnTimedMetaDataAvailableListener(OnTimedMetaDataAvailableListener listener)
+ {
+ mOnTimedMetaDataAvailableListener = listener;
+ }
+
+ private OnTimedMetaDataAvailableListener mOnTimedMetaDataAvailableListener;
+
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediaplayer.h!
+ */
+ /** Unspecified media player error.
+ * @see android.media.MediaPlayer.OnErrorListener
+ */
+ public static final int MEDIA_ERROR_UNKNOWN = 1;
+
+ /** Media server died. In this case, the application must release the
+ * MediaPlayer object and instantiate a new one.
+ * @see android.media.MediaPlayer.OnErrorListener
+ */
+ public static final int MEDIA_ERROR_SERVER_DIED = 100;
+
+ /** The video is streamed and its container is not valid for progressive
+ * playback i.e the video's index (e.g moov atom) is not at the start of the
+ * file.
+ * @see android.media.MediaPlayer.OnErrorListener
+ */
+ public static final int MEDIA_ERROR_NOT_VALID_FOR_PROGRESSIVE_PLAYBACK = 200;
+
+ /** File or network related operation errors. */
+ public static final int MEDIA_ERROR_IO = -1004;
+ /** Bitstream is not conforming to the related coding standard or file spec. */
+ public static final int MEDIA_ERROR_MALFORMED = -1007;
+ /** Bitstream is conforming to the related coding standard or file spec, but
+ * the media framework does not support the feature. */
+ public static final int MEDIA_ERROR_UNSUPPORTED = -1010;
+ /** Some operation takes too long to complete, usually more than 3-5 seconds. */
+ public static final int MEDIA_ERROR_TIMED_OUT = -110;
+
+ /** Unspecified low-level system error. This value originated from UNKNOWN_ERROR in
+ * system/core/include/utils/Errors.h
+ * @see android.media.MediaPlayer.OnErrorListener
+ * @hide
+ */
+ public static final int MEDIA_ERROR_SYSTEM = -2147483648;
+
+ /**
+ * Interface definition of a callback to be invoked when there
+ * has been an error during an asynchronous operation (other errors
+ * will throw exceptions at method call time).
+ */
+ public interface OnErrorListener
+ {
+ /**
+ * Called to indicate an error.
+ *
+ * @param mp the MediaPlayer the error pertains to
+ * @param what the type of error that has occurred:
+ * <ul>
+ * <li>{@link #MEDIA_ERROR_UNKNOWN}
+ * <li>{@link #MEDIA_ERROR_SERVER_DIED}
+ * </ul>
+ * @param extra an extra code, specific to the error. Typically
+ * implementation dependent.
+ * <ul>
+ * <li>{@link #MEDIA_ERROR_IO}
+ * <li>{@link #MEDIA_ERROR_MALFORMED}
+ * <li>{@link #MEDIA_ERROR_UNSUPPORTED}
+ * <li>{@link #MEDIA_ERROR_TIMED_OUT}
+ * <li><code>MEDIA_ERROR_SYSTEM (-2147483648)</code> - low-level system error.
+ * </ul>
+ * @return True if the method handled the error, false if it didn't.
+ * Returning false, or not having an OnErrorListener at all, will
+ * cause the OnCompletionListener to be called.
+ */
+ boolean onError(MediaPlayer mp, int what, int extra);
+ }
+
+ /**
+ * Register a callback to be invoked when an error has happened
+ * during an asynchronous operation.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnErrorListener(OnErrorListener listener)
+ {
+ mOnErrorListener = listener;
+ }
+
+ private OnErrorListener mOnErrorListener;
+
+
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediaplayer.h!
+ */
+ /** Unspecified media player info.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_UNKNOWN = 1;
+
+ /** The player was started because it was used as the next player for another
+ * player, which just completed playback.
+ * @see android.media.MediaPlayer.OnInfoListener
+ * @hide
+ */
+ public static final int MEDIA_INFO_STARTED_AS_NEXT = 2;
+
+ /** The player just pushed the very first video frame for rendering.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_VIDEO_RENDERING_START = 3;
+
+ /** The video is too complex for the decoder: it can't decode frames fast
+ * enough. Possibly only the audio plays fine at this stage.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_VIDEO_TRACK_LAGGING = 700;
+
+ /** MediaPlayer is temporarily pausing playback internally in order to
+ * buffer more data.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_BUFFERING_START = 701;
+
+ /** MediaPlayer is resuming playback after filling buffers.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_BUFFERING_END = 702;
+
+ /** Estimated network bandwidth information (kbps) is available; currently this event fires
+ * simultaneously as {@link #MEDIA_INFO_BUFFERING_START} and {@link #MEDIA_INFO_BUFFERING_END}
+ * when playing network files.
+ * @see android.media.MediaPlayer.OnInfoListener
+ * @hide
+ */
+ public static final int MEDIA_INFO_NETWORK_BANDWIDTH = 703;
+
+ /** Bad interleaving means that a media has been improperly interleaved or
+ * not interleaved at all, e.g has all the video samples first then all the
+ * audio ones. Video is playing but a lot of disk seeks may be happening.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_BAD_INTERLEAVING = 800;
+
+ /** The media cannot be seeked (e.g live stream)
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_NOT_SEEKABLE = 801;
+
+ /** A new set of metadata is available.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_METADATA_UPDATE = 802;
+
+ /** A new set of external-only metadata is available. Used by
+ * JAVA framework to avoid triggering track scanning.
+ * @hide
+ */
+ public static final int MEDIA_INFO_EXTERNAL_METADATA_UPDATE = 803;
+
+ /** Informs that audio is not playing. Note that playback of the video
+ * is not interrupted.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_AUDIO_NOT_PLAYING = 804;
+
+ /** Informs that video is not playing. Note that playback of the audio
+ * is not interrupted.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_VIDEO_NOT_PLAYING = 805;
+
+ /** Failed to handle timed text track properly.
+ * @see android.media.MediaPlayer.OnInfoListener
+ *
+ * {@hide}
+ */
+ public static final int MEDIA_INFO_TIMED_TEXT_ERROR = 900;
+
+ /** Subtitle track was not supported by the media framework.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_UNSUPPORTED_SUBTITLE = 901;
+
+ /** Reading the subtitle track takes too long.
+ * @see android.media.MediaPlayer.OnInfoListener
+ */
+ public static final int MEDIA_INFO_SUBTITLE_TIMED_OUT = 902;
+
+ /**
+ * Interface definition of a callback to be invoked to communicate some
+ * info and/or warning about the media or its playback.
+ */
+ public interface OnInfoListener
+ {
+ /**
+ * Called to indicate an info or a warning.
+ *
+ * @param mp the MediaPlayer the info pertains to.
+ * @param what the type of info or warning.
+ * <ul>
+ * <li>{@link #MEDIA_INFO_UNKNOWN}
+ * <li>{@link #MEDIA_INFO_VIDEO_TRACK_LAGGING}
+ * <li>{@link #MEDIA_INFO_VIDEO_RENDERING_START}
+ * <li>{@link #MEDIA_INFO_BUFFERING_START}
+ * <li>{@link #MEDIA_INFO_BUFFERING_END}
+ * <li><code>MEDIA_INFO_NETWORK_BANDWIDTH (703)</code> -
+ * bandwidth information is available (as <code>extra</code> kbps)
+ * <li>{@link #MEDIA_INFO_BAD_INTERLEAVING}
+ * <li>{@link #MEDIA_INFO_NOT_SEEKABLE}
+ * <li>{@link #MEDIA_INFO_METADATA_UPDATE}
+ * <li>{@link #MEDIA_INFO_UNSUPPORTED_SUBTITLE}
+ * <li>{@link #MEDIA_INFO_SUBTITLE_TIMED_OUT}
+ * </ul>
+ * @param extra an extra code, specific to the info. Typically
+ * implementation dependent.
+ * @return True if the method handled the info, false if it didn't.
+ * Returning false, or not having an OnInfoListener at all, will
+ * cause the info to be discarded.
+ */
+ boolean onInfo(MediaPlayer mp, int what, int extra);
+ }
+
+ /**
+ * Register a callback to be invoked when an info/warning is available.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnInfoListener(OnInfoListener listener)
+ {
+ mOnInfoListener = listener;
+ }
+
+ private OnInfoListener mOnInfoListener;
+
+ // Modular DRM begin
+
+ /**
+ * Interface definition of a callback to be invoked when the app
+ * can do DRM configuration (get/set properties) before the session
+ * is opened. This facilitates configuration of the properties, like
+ * 'securityLevel', which has to be set after DRM scheme creation but
+ * before the DRM session is opened.
+ *
+ * The only allowed DRM calls in this listener are {@code getDrmPropertyString}
+ * and {@code setDrmPropertyString}.
+ *
+ */
+ public interface OnDrmConfigHelper
+ {
+ /**
+ * Called to give the app the opportunity to configure DRM before the session is created
+ *
+ * @param mp the {@code MediaPlayer} associated with this callback
+ */
+ public void onDrmConfig(MediaPlayer mp);
+ }
+
+ /**
+ * Register a callback to be invoked for configuration of the DRM object before
+ * the session is created.
+ * The callback will be invoked synchronously during the execution
+ * of {@link #prepareDrm(UUID uuid)}.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnDrmConfigHelper(OnDrmConfigHelper listener)
+ {
+ synchronized (mDrmLock) {
+ mOnDrmConfigHelper = listener;
+ } // synchronized
+ }
+
+ private OnDrmConfigHelper mOnDrmConfigHelper;
+
+ /**
+ * Interface definition of a callback to be invoked when the
+ * DRM info becomes available
+ */
+ public interface OnDrmInfoListener
+ {
+ /**
+ * Called to indicate DRM info is available
+ *
+ * @param mp the {@code MediaPlayer} associated with this callback
+ * @param drmInfo DRM info of the source including PSSH, and subset
+ * of crypto schemes supported by this device
+ */
+ public void onDrmInfo(MediaPlayer mp, DrmInfo drmInfo);
+ }
+
+ /**
+ * Register a callback to be invoked when the DRM info is
+ * known.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnDrmInfoListener(OnDrmInfoListener listener)
+ {
+ setOnDrmInfoListener(listener, null);
+ }
+
+ /**
+ * Register a callback to be invoked when the DRM info is
+ * known.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnDrmInfoListener(OnDrmInfoListener listener, Handler handler)
+ {
+ synchronized (mDrmLock) {
+ if (listener != null) {
+ mOnDrmInfoHandlerDelegate = new OnDrmInfoHandlerDelegate(this, listener, handler);
+ } else {
+ mOnDrmInfoHandlerDelegate = null;
+ }
+ } // synchronized
+ }
+
+ private OnDrmInfoHandlerDelegate mOnDrmInfoHandlerDelegate;
+
+
+ /**
+ * The status codes for {@link OnDrmPreparedListener#onDrmPrepared} listener.
+ * <p>
+ *
+ * DRM preparation has succeeded.
+ */
+ public static final int PREPARE_DRM_STATUS_SUCCESS = 0;
+
+ /**
+ * The device required DRM provisioning but couldn't reach the provisioning server.
+ */
+ public static final int PREPARE_DRM_STATUS_PROVISIONING_NETWORK_ERROR = 1;
+
+ /**
+ * The device required DRM provisioning but the provisioning server denied the request.
+ */
+ public static final int PREPARE_DRM_STATUS_PROVISIONING_SERVER_ERROR = 2;
+
+ /**
+ * The DRM preparation has failed .
+ */
+ public static final int PREPARE_DRM_STATUS_PREPARATION_ERROR = 3;
+
+
+ /** @hide */
+ @IntDef({
+ PREPARE_DRM_STATUS_SUCCESS,
+ PREPARE_DRM_STATUS_PROVISIONING_NETWORK_ERROR,
+ PREPARE_DRM_STATUS_PROVISIONING_SERVER_ERROR,
+ PREPARE_DRM_STATUS_PREPARATION_ERROR,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PrepareDrmStatusCode {}
+
+ /**
+ * Interface definition of a callback to notify the app when the
+ * DRM is ready for key request/response
+ */
+ public interface OnDrmPreparedListener
+ {
+ /**
+ * Called to notify the app that prepareDrm is finished and ready for key request/response
+ *
+ * @param mp the {@code MediaPlayer} associated with this callback
+ * @param status the result of DRM preparation which can be
+ * {@link #PREPARE_DRM_STATUS_SUCCESS},
+ * {@link #PREPARE_DRM_STATUS_PROVISIONING_NETWORK_ERROR},
+ * {@link #PREPARE_DRM_STATUS_PROVISIONING_SERVER_ERROR}, or
+ * {@link #PREPARE_DRM_STATUS_PREPARATION_ERROR}.
+ */
+ public void onDrmPrepared(MediaPlayer mp, @PrepareDrmStatusCode int status);
+ }
+
+ /**
+ * Register a callback to be invoked when the DRM object is prepared.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnDrmPreparedListener(OnDrmPreparedListener listener)
+ {
+ setOnDrmPreparedListener(listener, null);
+ }
+
+ /**
+ * Register a callback to be invoked when the DRM object is prepared.
+ *
+ * @param listener the callback that will be run
+ * @param handler the Handler that will receive the callback
+ */
+ public void setOnDrmPreparedListener(OnDrmPreparedListener listener, Handler handler)
+ {
+ synchronized (mDrmLock) {
+ if (listener != null) {
+ mOnDrmPreparedHandlerDelegate = new OnDrmPreparedHandlerDelegate(this,
+ listener, handler);
+ } else {
+ mOnDrmPreparedHandlerDelegate = null;
+ }
+ } // synchronized
+ }
+
+ private OnDrmPreparedHandlerDelegate mOnDrmPreparedHandlerDelegate;
+
+
+ private class OnDrmInfoHandlerDelegate {
+ private MediaPlayer mMediaPlayer;
+ private OnDrmInfoListener mOnDrmInfoListener;
+ private Handler mHandler;
+
+ OnDrmInfoHandlerDelegate(MediaPlayer mp, OnDrmInfoListener listener, Handler handler) {
+ mMediaPlayer = mp;
+ mOnDrmInfoListener = listener;
+
+ // find the looper for our new event handler
+ if (handler != null) {
+ mHandler = handler;
+ } else {
+ // handler == null
+ // Will let OnDrmInfoListener be called in mEventHandler similar to other
+ // legacy notifications. This is because MEDIA_DRM_INFO's notification has to be
+ // sent before MEDIA_PREPARED's (i.e., in the same order they are issued by
+ // mediaserver). As a result, the callback has to be called directly by
+ // EventHandler.handleMessage similar to onPrepared.
+ }
+ }
+
+ void notifyClient(DrmInfo drmInfo) {
+ if (mHandler != null) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mOnDrmInfoListener.onDrmInfo(mMediaPlayer, drmInfo);
+ }
+ });
+ }
+ else { // no handler: direct call by mEventHandler
+ mOnDrmInfoListener.onDrmInfo(mMediaPlayer, drmInfo);
+ }
+ }
+ }
+
+ private class OnDrmPreparedHandlerDelegate {
+ private MediaPlayer mMediaPlayer;
+ private OnDrmPreparedListener mOnDrmPreparedListener;
+ private Handler mHandler;
+
+ OnDrmPreparedHandlerDelegate(MediaPlayer mp, OnDrmPreparedListener listener,
+ Handler handler) {
+ mMediaPlayer = mp;
+ mOnDrmPreparedListener = listener;
+
+ // find the looper for our new event handler
+ if (handler != null) {
+ mHandler = handler;
+ } else if (mEventHandler != null) {
+ // Otherwise, use mEventHandler
+ mHandler = mEventHandler;
+ } else {
+ Log.e(TAG, "OnDrmPreparedHandlerDelegate: Unexpected null mEventHandler");
+ }
+ }
+
+ void notifyClient(int status) {
+ if (mHandler != null) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mOnDrmPreparedListener.onDrmPrepared(mMediaPlayer, status);
+ }
+ });
+ } else {
+ Log.e(TAG, "OnDrmPreparedHandlerDelegate:notifyClient: Unexpected null mHandler");
+ }
+ }
+ }
+
+ /**
+ * Retrieves the DRM Info associated with the current source
+ *
+ * @throws IllegalStateException if called before prepare()
+ */
+ public DrmInfo getDrmInfo()
+ {
+ DrmInfo drmInfo = null;
+
+ // there is not much point if the app calls getDrmInfo within an OnDrmInfoListenet;
+ // regardless below returns drmInfo anyway instead of raising an exception
+ synchronized (mDrmLock) {
+ if (!mDrmInfoResolved && mDrmInfo == null) {
+ final String msg = "The Player has not been prepared yet";
+ Log.v(TAG, msg);
+ throw new IllegalStateException(msg);
+ }
+
+ if (mDrmInfo != null) {
+ drmInfo = mDrmInfo.makeCopy();
+ }
+ } // synchronized
+
+ return drmInfo;
+ }
+
+
+ /**
+ * Prepares the DRM for the current source
+ * <p>
+ * If {@code OnDrmConfigHelper} is registered, it will be called during
+ * preparation to allow configuration of the DRM properties before opening the
+ * DRM session. Note that the callback is called synchronously in the thread that called
+ * {@code prepareDrm}. It should be used only for a series of {@code getDrmPropertyString}
+ * and {@code setDrmPropertyString} calls and refrain from any lengthy operation.
+ * <p>
+ * If the device has not been provisioned before, this call also provisions the device
+ * which involves accessing the provisioning server and can take a variable time to
+ * complete depending on the network connectivity.
+ * If {@code OnDrmPreparedListener} is registered, prepareDrm() runs in non-blocking
+ * mode by launching the provisioning in the background and returning. The listener
+ * will be called when provisioning and preparation has finished. If a
+ * {@code OnDrmPreparedListener} is not registered, prepareDrm() waits till provisioning
+ * and preparation has finished, i.e., runs in blocking mode.
+ * <p>
+ * If {@code OnDrmPreparedListener} is registered, it is called to indicate the DRM
+ * session being ready. The application should not make any assumption about its call
+ * sequence (e.g., before or after prepareDrm returns), or the thread context that will
+ * execute the listener (unless the listener is registered with a handler thread).
+ * <p>
+ *
+ * @param uuid The UUID of the crypto scheme. If not known beforehand, it can be retrieved
+ * from the source through {@code getDrmInfo} or registering a {@code onDrmInfoListener}.
+ *
+ * @throws IllegalStateException if called before prepare(), or the DRM was
+ * prepared already
+ * @throws UnsupportedSchemeException if the crypto scheme is not supported
+ * @throws ResourceBusyException if required DRM resources are in use
+ * @throws ProvisioningNetworkErrorException if provisioning is required but failed due to a
+ * network error
+ * @throws ProvisioningServerErrorException if provisioning is required but failed due to
+ * the request denied by the provisioning server
+ */
+ public void prepareDrm(@NonNull UUID uuid)
+ throws UnsupportedSchemeException, ResourceBusyException,
+ ProvisioningNetworkErrorException, ProvisioningServerErrorException
+ {
+ Log.v(TAG, "prepareDrm: uuid: " + uuid + " mOnDrmConfigHelper: " + mOnDrmConfigHelper);
+
+ boolean allDoneWithoutProvisioning = false;
+ // get a snapshot as we'll use them outside the lock
+ OnDrmPreparedHandlerDelegate onDrmPreparedHandlerDelegate = null;
+
+ synchronized (mDrmLock) {
+
+ // only allowing if tied to a protected source; might relax for releasing offline keys
+ if (mDrmInfo == null) {
+ final String msg = "prepareDrm(): Wrong usage: The player must be prepared and " +
+ "DRM info be retrieved before this call.";
+ Log.e(TAG, msg);
+ throw new IllegalStateException(msg);
+ }
+
+ if (mActiveDrmScheme) {
+ final String msg = "prepareDrm(): Wrong usage: There is already " +
+ "an active DRM scheme with " + mDrmUUID;
+ Log.e(TAG, msg);
+ throw new IllegalStateException(msg);
+ }
+
+ if (mPrepareDrmInProgress) {
+ final String msg = "prepareDrm(): Wrong usage: There is already " +
+ "a pending prepareDrm call.";
+ Log.e(TAG, msg);
+ throw new IllegalStateException(msg);
+ }
+
+ if (mDrmProvisioningInProgress) {
+ final String msg = "prepareDrm(): Unexpectd: Provisioning is already in progress.";
+ Log.e(TAG, msg);
+ throw new IllegalStateException(msg);
+ }
+
+ // shouldn't need this; just for safeguard
+ cleanDrmObj();
+
+ mPrepareDrmInProgress = true;
+ // local copy while the lock is held
+ onDrmPreparedHandlerDelegate = mOnDrmPreparedHandlerDelegate;
+
+ try {
+ // only creating the DRM object to allow pre-openSession configuration
+ prepareDrm_createDrmStep(uuid);
+ } catch (Exception e) {
+ Log.w(TAG, "prepareDrm(): Exception ", e);
+ mPrepareDrmInProgress = false;
+ throw e;
+ }
+
+ mDrmConfigAllowed = true;
+ } // synchronized
+
+
+ // call the callback outside the lock
+ if (mOnDrmConfigHelper != null) {
+ mOnDrmConfigHelper.onDrmConfig(this);
+ }
+
+ synchronized (mDrmLock) {
+ mDrmConfigAllowed = false;
+ boolean earlyExit = false;
+
+ try {
+ prepareDrm_openSessionStep(uuid);
+
+ mDrmUUID = uuid;
+ mActiveDrmScheme = true;
+
+ allDoneWithoutProvisioning = true;
+ } catch (IllegalStateException e) {
+ final String msg = "prepareDrm(): Wrong usage: The player must be " +
+ "in the prepared state to call prepareDrm().";
+ Log.e(TAG, msg);
+ earlyExit = true;
+ throw new IllegalStateException(msg);
+ } catch (NotProvisionedException e) {
+ Log.w(TAG, "prepareDrm: NotProvisionedException");
+
+ // handle provisioning internally; it'll reset mPrepareDrmInProgress
+ int result = HandleProvisioninig(uuid);
+
+ // if blocking mode, we're already done;
+ // if non-blocking mode, we attempted to launch background provisioning
+ if (result != PREPARE_DRM_STATUS_SUCCESS) {
+ earlyExit = true;
+ String msg;
+
+ switch (result) {
+ case PREPARE_DRM_STATUS_PROVISIONING_NETWORK_ERROR:
+ msg = "prepareDrm: Provisioning was required but failed " +
+ "due to a network error.";
+ Log.e(TAG, msg);
+ throw new ProvisioningNetworkErrorException(msg);
+
+ case PREPARE_DRM_STATUS_PROVISIONING_SERVER_ERROR:
+ msg = "prepareDrm: Provisioning was required but the request " +
+ "was denied by the server.";
+ Log.e(TAG, msg);
+ throw new ProvisioningServerErrorException(msg);
+
+ case PREPARE_DRM_STATUS_PREPARATION_ERROR:
+ default: // default for safeguard
+ msg = "prepareDrm: Post-provisioning preparation failed.";
+ Log.e(TAG, msg);
+ throw new IllegalStateException(msg);
+ }
+ }
+ // nothing else to do;
+ // if blocking or non-blocking, HandleProvisioninig does the re-attempt & cleanup
+ } catch (Exception e) {
+ Log.e(TAG, "prepareDrm: Exception " + e);
+ earlyExit = true;
+ throw e;
+ } finally {
+ if (!mDrmProvisioningInProgress) {// if early exit other than provisioning exception
+ mPrepareDrmInProgress = false;
+ }
+ if (earlyExit) { // cleaning up object if didn't succeed
+ cleanDrmObj();
+ }
+ } // finally
+ } // synchronized
+
+
+ // if finished successfully without provisioning, call the callback outside the lock
+ if (allDoneWithoutProvisioning) {
+ if (onDrmPreparedHandlerDelegate != null)
+ onDrmPreparedHandlerDelegate.notifyClient(PREPARE_DRM_STATUS_SUCCESS);
+ }
+
+ }
+
+
+ private native void _releaseDrm();
+
+ /**
+ * Releases the DRM session
+ * <p>
+ * The player has to have an active DRM session and be in stopped, or prepared
+ * state before this call is made.
+ * A {@code reset()} call will release the DRM session implicitly.
+ *
+ * @throws NoDrmSchemeException if there is no active DRM session to release
+ */
+ public void releaseDrm()
+ throws NoDrmSchemeException
+ {
+ Log.v(TAG, "releaseDrm:");
+
+ synchronized (mDrmLock) {
+ if (!mActiveDrmScheme) {
+ Log.e(TAG, "releaseDrm(): No active DRM scheme to release.");
+ throw new NoDrmSchemeException("releaseDrm: No active DRM scheme to release.");
+ }
+
+ try {
+ // we don't have the player's state in this layer. The below call raises
+ // exception if we're in a non-stopped/prepared state.
+
+ // for cleaning native/mediaserver crypto object
+ _releaseDrm();
+
+ // for cleaning client-side MediaDrm object; only called if above has succeeded
+ cleanDrmObj();
+
+ mActiveDrmScheme = false;
+ } catch (IllegalStateException e) {
+ Log.w(TAG, "releaseDrm: Exception ", e);
+ throw new IllegalStateException("releaseDrm: The player is not in a valid state.");
+ } catch (Exception e) {
+ Log.e(TAG, "releaseDrm: Exception ", e);
+ }
+ } // synchronized
+ }
+
+
+ /**
+ * A key request/response exchange occurs between the app and a license server
+ * to obtain or release keys used to decrypt encrypted content.
+ * <p>
+ * getKeyRequest() is used to obtain an opaque key request byte array that is
+ * delivered to the license server. The opaque key request byte array is returned
+ * in KeyRequest.data. The recommended URL to deliver the key request to is
+ * returned in KeyRequest.defaultUrl.
+ * <p>
+ * After the app has received the key request response from the server,
+ * it should deliver to the response to the DRM engine plugin using the method
+ * {@link #provideKeyResponse}.
+ *
+ * @param keySetId is the key-set identifier of the offline keys being released when keyType is
+ * {@link MediaDrm#KEY_TYPE_RELEASE}. It should be set to null for other key requests, when
+ * keyType is {@link MediaDrm#KEY_TYPE_STREAMING} or {@link MediaDrm#KEY_TYPE_OFFLINE}.
+ *
+ * @param initData is the container-specific initialization data when the keyType is
+ * {@link MediaDrm#KEY_TYPE_STREAMING} or {@link MediaDrm#KEY_TYPE_OFFLINE}. Its meaning is
+ * interpreted based on the mime type provided in the mimeType parameter. It could
+ * contain, for example, the content ID, key ID or other data obtained from the content
+ * metadata that is required in generating the key request.
+ * When the keyType is {@link MediaDrm#KEY_TYPE_RELEASE}, it should be set to null.
+ *
+ * @param mimeType identifies the mime type of the content
+ *
+ * @param keyType specifies the type of the request. The request may be to acquire
+ * keys for streaming, {@link MediaDrm#KEY_TYPE_STREAMING}, or for offline content
+ * {@link MediaDrm#KEY_TYPE_OFFLINE}, or to release previously acquired
+ * keys ({@link MediaDrm#KEY_TYPE_RELEASE}), which are identified by a keySetId.
+ *
+ * @param optionalParameters are included in the key request message to
+ * allow a client application to provide additional message parameters to the server.
+ * This may be {@code null} if no additional parameters are to be sent.
+ *
+ * @throws NoDrmSchemeException if there is no active DRM session
+ */
+ @NonNull
+ public MediaDrm.KeyRequest getKeyRequest(@Nullable byte[] keySetId, @Nullable byte[] initData,
+ @Nullable String mimeType, @MediaDrm.KeyType int keyType,
+ @Nullable Map<String, String> optionalParameters)
+ throws NoDrmSchemeException
+ {
+ Log.v(TAG, "getKeyRequest: " +
+ " keySetId: " + keySetId + " initData:" + initData + " mimeType: " + mimeType +
+ " keyType: " + keyType + " optionalParameters: " + optionalParameters);
+
+ synchronized (mDrmLock) {
+ if (!mActiveDrmScheme) {
+ Log.e(TAG, "getKeyRequest NoDrmSchemeException");
+ throw new NoDrmSchemeException("getKeyRequest: Has to set a DRM scheme first.");
+ }
+
+ try {
+ byte[] scope = (keyType != MediaDrm.KEY_TYPE_RELEASE) ?
+ mDrmSessionId : // sessionId for KEY_TYPE_STREAMING/OFFLINE
+ keySetId; // keySetId for KEY_TYPE_RELEASE
+
+ HashMap<String, String> hmapOptionalParameters =
+ (optionalParameters != null) ?
+ new HashMap<String, String>(optionalParameters) :
+ null;
+
+ MediaDrm.KeyRequest request = mDrmObj.getKeyRequest(scope, initData, mimeType,
+ keyType, hmapOptionalParameters);
+ Log.v(TAG, "getKeyRequest: --> request: " + request);
+
+ return request;
+
+ } catch (NotProvisionedException e) {
+ Log.w(TAG, "getKeyRequest NotProvisionedException: " +
+ "Unexpected. Shouldn't have reached here.");
+ throw new IllegalStateException("getKeyRequest: Unexpected provisioning error.");
+ } catch (Exception e) {
+ Log.w(TAG, "getKeyRequest Exception " + e);
+ throw e;
+ }
+
+ } // synchronized
+ }
+
+
+ /**
+ * A key response is received from the license server by the app, then it is
+ * provided to the DRM engine plugin using provideKeyResponse. When the
+ * response is for an offline key request, a key-set identifier is returned that
+ * can be used to later restore the keys to a new session with the method
+ * {@ link # restoreKeys}.
+ * When the response is for a streaming or release request, null is returned.
+ *
+ * @param keySetId When the response is for a release request, keySetId identifies
+ * the saved key associated with the release request (i.e., the same keySetId
+ * passed to the earlier {@ link # getKeyRequest} call. It MUST be null when the
+ * response is for either streaming or offline key requests.
+ *
+ * @param response the byte array response from the server
+ *
+ * @throws NoDrmSchemeException if there is no active DRM session
+ * @throws DeniedByServerException if the response indicates that the
+ * server rejected the request
+ */
+ public byte[] provideKeyResponse(@Nullable byte[] keySetId, @NonNull byte[] response)
+ throws NoDrmSchemeException, DeniedByServerException
+ {
+ Log.v(TAG, "provideKeyResponse: keySetId: " + keySetId + " response: " + response);
+
+ synchronized (mDrmLock) {
+
+ if (!mActiveDrmScheme) {
+ Log.e(TAG, "getKeyRequest NoDrmSchemeException");
+ throw new NoDrmSchemeException("getKeyRequest: Has to set a DRM scheme first.");
+ }
+
+ try {
+ byte[] scope = (keySetId == null) ?
+ mDrmSessionId : // sessionId for KEY_TYPE_STREAMING/OFFLINE
+ keySetId; // keySetId for KEY_TYPE_RELEASE
+
+ byte[] keySetResult = mDrmObj.provideKeyResponse(scope, response);
+
+ Log.v(TAG, "provideKeyResponse: keySetId: " + keySetId + " response: " + response +
+ " --> " + keySetResult);
+
+
+ return keySetResult;
+
+ } catch (NotProvisionedException e) {
+ Log.w(TAG, "provideKeyResponse NotProvisionedException: " +
+ "Unexpected. Shouldn't have reached here.");
+ throw new IllegalStateException("provideKeyResponse: " +
+ "Unexpected provisioning error.");
+ } catch (Exception e) {
+ Log.w(TAG, "provideKeyResponse Exception " + e);
+ throw e;
+ }
+ } // synchronized
+ }
+
+
+ /**
+ * Restore persisted offline keys into a new session. keySetId identifies the
+ * keys to load, obtained from a prior call to {@link #provideKeyResponse}.
+ *
+ * @param keySetId identifies the saved key set to restore
+ */
+ public void restoreKeys(@NonNull byte[] keySetId)
+ throws NoDrmSchemeException
+ {
+ Log.v(TAG, "restoreKeys: keySetId: " + keySetId);
+
+ synchronized (mDrmLock) {
+
+ if (!mActiveDrmScheme) {
+ Log.w(TAG, "restoreKeys NoDrmSchemeException");
+ throw new NoDrmSchemeException("restoreKeys: Has to set a DRM scheme first.");
+ }
+
+ try {
+ mDrmObj.restoreKeys(mDrmSessionId, keySetId);
+ } catch (Exception e) {
+ Log.w(TAG, "restoreKeys Exception " + e);
+ throw e;
+ }
+
+ } // synchronized
+ }
+
+
+ /**
+ * Read a DRM engine plugin String property value, given the property name string.
+ * <p>
+ * @param propertyName the property name
+ *
+ * Standard fields names are:
+ * {@link MediaDrm#PROPERTY_VENDOR}, {@link MediaDrm#PROPERTY_VERSION},
+ * {@link MediaDrm#PROPERTY_DESCRIPTION}, {@link MediaDrm#PROPERTY_ALGORITHMS}
+ */
+ @NonNull
+ public String getDrmPropertyString(@NonNull @MediaDrm.StringProperty String propertyName)
+ throws NoDrmSchemeException
+ {
+ Log.v(TAG, "getDrmPropertyString: propertyName: " + propertyName);
+
+ String value;
+ synchronized (mDrmLock) {
+
+ if (!mActiveDrmScheme && !mDrmConfigAllowed) {
+ Log.w(TAG, "getDrmPropertyString NoDrmSchemeException");
+ throw new NoDrmSchemeException("getDrmPropertyString: Has to prepareDrm() first.");
+ }
+
+ try {
+ value = mDrmObj.getPropertyString(propertyName);
+ } catch (Exception e) {
+ Log.w(TAG, "getDrmPropertyString Exception " + e);
+ throw e;
+ }
+ } // synchronized
+
+ Log.v(TAG, "getDrmPropertyString: propertyName: " + propertyName + " --> value: " + value);
+
+ return value;
+ }
+
+
+ /**
+ * Set a DRM engine plugin String property value.
+ * <p>
+ * @param propertyName the property name
+ * @param value the property value
+ *
+ * Standard fields names are:
+ * {@link MediaDrm#PROPERTY_VENDOR}, {@link MediaDrm#PROPERTY_VERSION},
+ * {@link MediaDrm#PROPERTY_DESCRIPTION}, {@link MediaDrm#PROPERTY_ALGORITHMS}
+ */
+ public void setDrmPropertyString(@NonNull @MediaDrm.StringProperty String propertyName,
+ @NonNull String value)
+ throws NoDrmSchemeException
+ {
+ Log.v(TAG, "setDrmPropertyString: propertyName: " + propertyName + " value: " + value);
+
+ synchronized (mDrmLock) {
+
+ if ( !mActiveDrmScheme && !mDrmConfigAllowed ) {
+ Log.w(TAG, "setDrmPropertyString NoDrmSchemeException");
+ throw new NoDrmSchemeException("setDrmPropertyString: Has to prepareDrm() first.");
+ }
+
+ try {
+ mDrmObj.setPropertyString(propertyName, value);
+ } catch ( Exception e ) {
+ Log.w(TAG, "setDrmPropertyString Exception " + e);
+ throw e;
+ }
+ } // synchronized
+ }
+
+ /**
+ * Encapsulates the DRM properties of the source.
+ */
+ public static final class DrmInfo {
+ private Map<UUID, byte[]> mapPssh;
+ private UUID[] supportedSchemes;
+
+ /**
+ * Returns the PSSH info of the data source for each supported DRM scheme.
+ */
+ public Map<UUID, byte[]> getPssh() {
+ return mapPssh;
+ }
+
+ /**
+ * Returns the intersection of the data source and the device DRM schemes.
+ * It effectively identifies the subset of the source's DRM schemes which
+ * are supported by the device too.
+ */
+ public UUID[] getSupportedSchemes() {
+ return supportedSchemes;
+ }
+
+ private DrmInfo(Map<UUID, byte[]> Pssh, UUID[] SupportedSchemes) {
+ mapPssh = Pssh;
+ supportedSchemes = SupportedSchemes;
+ }
+
+ private DrmInfo(Parcel parcel) {
+ Log.v(TAG, "DrmInfo(" + parcel + ") size " + parcel.dataSize());
+
+ int psshsize = parcel.readInt();
+ byte[] pssh = new byte[psshsize];
+ parcel.readByteArray(pssh);
+
+ Log.v(TAG, "DrmInfo() PSSH: " + arrToHex(pssh));
+ mapPssh = parsePSSH(pssh, psshsize);
+ Log.v(TAG, "DrmInfo() PSSH: " + mapPssh);
+
+ int supportedDRMsCount = parcel.readInt();
+ supportedSchemes = new UUID[supportedDRMsCount];
+ for (int i = 0; i < supportedDRMsCount; i++) {
+ byte[] uuid = new byte[16];
+ parcel.readByteArray(uuid);
+
+ supportedSchemes[i] = bytesToUUID(uuid);
+
+ Log.v(TAG, "DrmInfo() supportedScheme[" + i + "]: " +
+ supportedSchemes[i]);
+ }
+
+ Log.v(TAG, "DrmInfo() Parcel psshsize: " + psshsize +
+ " supportedDRMsCount: " + supportedDRMsCount);
+ }
+
+ private DrmInfo makeCopy() {
+ return new DrmInfo(this.mapPssh, this.supportedSchemes);
+ }
+
+ private String arrToHex(byte[] bytes) {
+ String out = "0x";
+ for (int i = 0; i < bytes.length; i++) {
+ out += String.format("%02x", bytes[i]);
+ }
+
+ return out;
+ }
+
+ private UUID bytesToUUID(byte[] uuid) {
+ long msb = 0, lsb = 0;
+ for (int i = 0; i < 8; i++) {
+ msb |= ( ((long)uuid[i] & 0xff) << (8 * (7 - i)) );
+ lsb |= ( ((long)uuid[i+8] & 0xff) << (8 * (7 - i)) );
+ }
+
+ return new UUID(msb, lsb);
+ }
+
+ private Map<UUID, byte[]> parsePSSH(byte[] pssh, int psshsize) {
+ Map<UUID, byte[]> result = new HashMap<UUID, byte[]>();
+
+ final int UUID_SIZE = 16;
+ final int DATALEN_SIZE = 4;
+
+ int len = psshsize;
+ int numentries = 0;
+ int i = 0;
+
+ while (len > 0) {
+ if (len < UUID_SIZE) {
+ Log.w(TAG, String.format("parsePSSH: len is too short to parse " +
+ "UUID: (%d < 16) pssh: %d", len, psshsize));
+ return null;
+ }
+
+ byte[] subset = Arrays.copyOfRange(pssh, i, i + UUID_SIZE);
+ UUID uuid = bytesToUUID(subset);
+ i += UUID_SIZE;
+ len -= UUID_SIZE;
+
+ // get data length
+ if (len < 4) {
+ Log.w(TAG, String.format("parsePSSH: len is too short to parse " +
+ "datalen: (%d < 4) pssh: %d", len, psshsize));
+ return null;
+ }
+
+ subset = Arrays.copyOfRange(pssh, i, i+DATALEN_SIZE);
+ int datalen = (ByteOrder.nativeOrder() == ByteOrder.LITTLE_ENDIAN) ?
+ ((subset[3] & 0xff) << 24) | ((subset[2] & 0xff) << 16) |
+ ((subset[1] & 0xff) << 8) | (subset[0] & 0xff) :
+ ((subset[0] & 0xff) << 24) | ((subset[1] & 0xff) << 16) |
+ ((subset[2] & 0xff) << 8) | (subset[3] & 0xff) ;
+ i += DATALEN_SIZE;
+ len -= DATALEN_SIZE;
+
+ if (len < datalen) {
+ Log.w(TAG, String.format("parsePSSH: len is too short to parse " +
+ "data: (%d < %d) pssh: %d", len, datalen, psshsize));
+ return null;
+ }
+
+ byte[] data = Arrays.copyOfRange(pssh, i, i+datalen);
+
+ // skip the data
+ i += datalen;
+ len -= datalen;
+
+ Log.v(TAG, String.format("parsePSSH[%d]: <%s, %s> pssh: %d",
+ numentries, uuid, arrToHex(data), psshsize));
+ numentries++;
+ result.put(uuid, data);
+ }
+
+ return result;
+ }
+
+ }; // DrmInfo
+
+ /**
+ * Thrown when a DRM method is called before preparing a DRM scheme through prepareDrm().
+ * Extends MediaDrm.MediaDrmException
+ */
+ public static final class NoDrmSchemeException extends MediaDrmException {
+ public NoDrmSchemeException(String detailMessage) {
+ super(detailMessage);
+ }
+ }
+
+ /**
+ * Thrown when the device requires DRM provisioning but the provisioning attempt has
+ * failed due to a network error (Internet reachability, timeout, etc.).
+ * Extends MediaDrm.MediaDrmException
+ */
+ public static final class ProvisioningNetworkErrorException extends MediaDrmException {
+ public ProvisioningNetworkErrorException(String detailMessage) {
+ super(detailMessage);
+ }
+ }
+
+ /**
+ * Thrown when the device requires DRM provisioning but the provisioning attempt has
+ * failed due to the provisioning server denying the request.
+ * Extends MediaDrm.MediaDrmException
+ */
+ public static final class ProvisioningServerErrorException extends MediaDrmException {
+ public ProvisioningServerErrorException(String detailMessage) {
+ super(detailMessage);
+ }
+ }
+
+
+ private native void _prepareDrm(@NonNull byte[] uuid, @NonNull byte[] drmSessionId);
+
+ // Modular DRM helpers
+
+ private void prepareDrm_createDrmStep(@NonNull UUID uuid)
+ throws UnsupportedSchemeException {
+ Log.v(TAG, "prepareDrm_createDrmStep: UUID: " + uuid);
+
+ try {
+ mDrmObj = new MediaDrm(uuid);
+ Log.v(TAG, "prepareDrm_createDrmStep: Created mDrmObj=" + mDrmObj);
+ } catch (Exception e) { // UnsupportedSchemeException
+ Log.e(TAG, "prepareDrm_createDrmStep: MediaDrm failed with " + e);
+ throw e;
+ }
+ }
+
+ private void prepareDrm_openSessionStep(@NonNull UUID uuid)
+ throws NotProvisionedException, ResourceBusyException {
+ Log.v(TAG, "prepareDrm_openSessionStep: uuid: " + uuid);
+
+ // TODO: don't need an open session for a future specialKeyReleaseDrm mode but we should do
+ // it anyway so it raises provisioning error if needed. We'd rather handle provisioning
+ // at prepareDrm/openSession rather than getKeyRequest/provideKeyResponse
+ try {
+ mDrmSessionId = mDrmObj.openSession();
+ Log.v(TAG, "prepareDrm_openSessionStep: mDrmSessionId=" + mDrmSessionId);
+
+ // Sending it down to native/mediaserver to create the crypto object
+ // This call could simply fail due to bad player state, e.g., after start().
+ _prepareDrm(getByteArrayFromUUID(uuid), mDrmSessionId);
+ Log.v(TAG, "prepareDrm_openSessionStep: _prepareDrm/Crypto succeeded");
+
+ } catch (Exception e) { //ResourceBusyException, NotProvisionedException
+ Log.e(TAG, "prepareDrm_openSessionStep: open/crypto failed with " + e);
+ throw e;
+ }
+
+ }
+
+ private class ProvisioningThread extends Thread
+ {
+ public static final int TIMEOUT_MS = 60000;
+
+ private UUID uuid;
+ private String urlStr;
+ private Object drmLock;
+ private OnDrmPreparedHandlerDelegate onDrmPreparedHandlerDelegate;
+ private MediaPlayer mediaPlayer;
+ private int status;
+ private boolean finished;
+ public int status() {
+ return status;
+ }
+
+ public ProvisioningThread initialize(MediaDrm.ProvisionRequest request,
+ UUID uuid, MediaPlayer mediaPlayer) {
+ // lock is held by the caller
+ drmLock = mediaPlayer.mDrmLock;
+ onDrmPreparedHandlerDelegate = mediaPlayer.mOnDrmPreparedHandlerDelegate;
+ this.mediaPlayer = mediaPlayer;
+
+ urlStr = request.getDefaultUrl() + "&signedRequest=" + new String(request.getData());
+ this.uuid = uuid;
+
+ status = PREPARE_DRM_STATUS_PREPARATION_ERROR;
+
+ Log.v(TAG, "HandleProvisioninig: Thread is initialised url: " + urlStr);
+ return this;
+ }
+
+ public void run() {
+
+ byte[] response = null;
+ boolean provisioningSucceeded = false;
+ try {
+ URL url = new URL(urlStr);
+ final HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+ try {
+ connection.setRequestMethod("POST");
+ connection.setDoOutput(false);
+ connection.setDoInput(true);
+ connection.setConnectTimeout(TIMEOUT_MS);
+ connection.setReadTimeout(TIMEOUT_MS);
+
+ connection.connect();
+ response = Streams.readFully(connection.getInputStream());
+
+ Log.v(TAG, "HandleProvisioninig: Thread run: response " +
+ response.length + " " + response);
+ } catch (Exception e) {
+ status = PREPARE_DRM_STATUS_PROVISIONING_NETWORK_ERROR;
+ Log.w(TAG, "HandleProvisioninig: Thread run: connect " + e + " url: " + url);
+ } finally {
+ connection.disconnect();
+ }
+ } catch (Exception e) {
+ status = PREPARE_DRM_STATUS_PROVISIONING_NETWORK_ERROR;
+ Log.w(TAG, "HandleProvisioninig: Thread run: openConnection " + e);
+ }
+
+ if (response != null) {
+ try {
+ mDrmObj.provideProvisionResponse(response);
+ Log.v(TAG, "HandleProvisioninig: Thread run: " +
+ "provideProvisionResponse SUCCEEDED!");
+
+ provisioningSucceeded = true;
+ } catch (Exception e) {
+ status = PREPARE_DRM_STATUS_PROVISIONING_SERVER_ERROR;
+ Log.w(TAG, "HandleProvisioninig: Thread run: " +
+ "provideProvisionResponse " + e);
+ }
+ }
+
+ boolean succeeded = false;
+
+ // non-blocking mode needs the lock
+ if (onDrmPreparedHandlerDelegate != null) {
+
+ synchronized (drmLock) {
+ // continuing with prepareDrm
+ if (provisioningSucceeded) {
+ succeeded = mediaPlayer.resumePrepareDrm(uuid);
+ status = (succeeded) ?
+ PREPARE_DRM_STATUS_SUCCESS :
+ PREPARE_DRM_STATUS_PREPARATION_ERROR;
+ }
+ mediaPlayer.mDrmProvisioningInProgress = false;
+ mediaPlayer.mPrepareDrmInProgress = false;
+ if (!succeeded) {
+ cleanDrmObj(); // cleaning up if it hasn't gone through while in the lock
+ }
+ } // synchronized
+
+ // calling the callback outside the lock
+ onDrmPreparedHandlerDelegate.notifyClient(status);
+ } else { // blocking mode already has the lock
+
+ // continuing with prepareDrm
+ if (provisioningSucceeded) {
+ succeeded = mediaPlayer.resumePrepareDrm(uuid);
+ status = (succeeded) ?
+ PREPARE_DRM_STATUS_SUCCESS :
+ PREPARE_DRM_STATUS_PREPARATION_ERROR;
+ }
+ mediaPlayer.mDrmProvisioningInProgress = false;
+ mediaPlayer.mPrepareDrmInProgress = false;
+ if (!succeeded) {
+ cleanDrmObj(); // cleaning up if it hasn't gone through
+ }
+ }
+
+ finished = true;
+ } // run()
+
+ } // ProvisioningThread
+
+ private int HandleProvisioninig(UUID uuid)
+ {
+ // the lock is already held by the caller
+
+ if (mDrmProvisioningInProgress) {
+ Log.e(TAG, "HandleProvisioninig: Unexpected mDrmProvisioningInProgress");
+ return PREPARE_DRM_STATUS_PREPARATION_ERROR;
+ }
+
+ MediaDrm.ProvisionRequest provReq = mDrmObj.getProvisionRequest();
+ if (provReq == null) {
+ Log.e(TAG, "HandleProvisioninig: getProvisionRequest returned null.");
+ return PREPARE_DRM_STATUS_PREPARATION_ERROR;
+ }
+
+ Log.v(TAG, "HandleProvisioninig provReq " +
+ " data: " + provReq.getData() + " url: " + provReq.getDefaultUrl());
+
+ // networking in a background thread
+ mDrmProvisioningInProgress = true;
+
+ mDrmProvisioningThread = new ProvisioningThread().initialize(provReq, uuid, this);
+ mDrmProvisioningThread.start();
+
+ int result;
+
+ // non-blocking: this is not the final result
+ if (mOnDrmPreparedHandlerDelegate != null) {
+ result = PREPARE_DRM_STATUS_SUCCESS;
+ } else {
+ // if blocking mode, wait till provisioning is done
+ try {
+ mDrmProvisioningThread.join();
+ } catch (Exception e) {
+ Log.w(TAG, "HandleProvisioninig: Thread.join Exception " + e);
+ }
+ result = mDrmProvisioningThread.status();
+ // no longer need the thread
+ mDrmProvisioningThread = null;
+ }
+
+ return result;
+ }
+
+ private boolean resumePrepareDrm(UUID uuid)
+ {
+ Log.v(TAG, "resumePrepareDrm: uuid: " + uuid);
+
+ // mDrmLock is guaranteed to be held
+ boolean success = false;
+ try {
+ // resuming
+ prepareDrm_openSessionStep(uuid);
+
+ mDrmUUID = uuid;
+ mActiveDrmScheme = true;
+
+ success = true;
+ } catch (Exception e) {
+ Log.w(TAG, "HandleProvisioninig: Thread run _prepareDrm resume failed with " + e);
+ // mDrmObj clean up is done by the caller
+ }
+
+ return success;
+ }
+
+ private void resetDrmState()
+ {
+ synchronized (mDrmLock) {
+ Log.v(TAG, "resetDrmState: " +
+ " mDrmInfo=" + mDrmInfo +
+ " mDrmProvisioningThread=" + mDrmProvisioningThread +
+ " mPrepareDrmInProgress=" + mPrepareDrmInProgress +
+ " mActiveDrmScheme=" + mActiveDrmScheme);
+
+ mDrmInfoResolved = false;
+ mDrmInfo = null;
+
+ if (mDrmProvisioningThread != null) {
+ // timeout; relying on HttpUrlConnection
+ try {
+ mDrmProvisioningThread.join();
+ }
+ catch (InterruptedException e) {
+ Log.w(TAG, "resetDrmState: ProvThread.join Exception " + e);
+ }
+ mDrmProvisioningThread = null;
+ }
+
+ mPrepareDrmInProgress = false;
+ mActiveDrmScheme = false;
+
+ cleanDrmObj();
+ } // synchronized
+ }
+
+ private void cleanDrmObj()
+ {
+ // the caller holds mDrmLock
+ Log.v(TAG, "cleanDrmObj: mDrmObj=" + mDrmObj + " mDrmSessionId=" + mDrmSessionId);
+
+ if (mDrmSessionId != null) {
+ mDrmObj.closeSession(mDrmSessionId);
+ mDrmSessionId = null;
+ }
+ if (mDrmObj != null) {
+ mDrmObj.release();
+ mDrmObj = null;
+ }
+ }
+
+ private static final byte[] getByteArrayFromUUID(@NonNull UUID uuid) {
+ long msb = uuid.getMostSignificantBits();
+ long lsb = uuid.getLeastSignificantBits();
+
+ byte[] uuidBytes = new byte[16];
+ for (int i = 0; i < 8; ++i) {
+ uuidBytes[i] = (byte)(msb >>> (8 * (7 - i)));
+ uuidBytes[8 + i] = (byte)(lsb >>> (8 * (7 - i)));
+ }
+
+ return uuidBytes;
+ }
+
+ // Modular DRM end
+
+ /*
+ * Test whether a given video scaling mode is supported.
+ */
+ private boolean isVideoScalingModeSupported(int mode) {
+ return (mode == VIDEO_SCALING_MODE_SCALE_TO_FIT ||
+ mode == VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING);
+ }
+
+ /** @hide */
+ static class TimeProvider implements MediaPlayer.OnSeekCompleteListener,
+ MediaTimeProvider {
+ private static final String TAG = "MTP";
+ private static final long MAX_NS_WITHOUT_POSITION_CHECK = 5000000000L;
+ private static final long MAX_EARLY_CALLBACK_US = 1000;
+ private static final long TIME_ADJUSTMENT_RATE = 2; /* meaning 1/2 */
+ private long mLastTimeUs = 0;
+ private MediaPlayer mPlayer;
+ private boolean mPaused = true;
+ private boolean mStopped = true;
+ private boolean mBuffering;
+ private long mLastReportedTime;
+ private long mTimeAdjustment;
+ // since we are expecting only a handful listeners per stream, there is
+ // no need for log(N) search performance
+ private MediaTimeProvider.OnMediaTimeListener mListeners[];
+ private long mTimes[];
+ private long mLastNanoTime;
+ private Handler mEventHandler;
+ private boolean mRefresh = false;
+ private boolean mPausing = false;
+ private boolean mSeeking = false;
+ private static final int NOTIFY = 1;
+ private static final int NOTIFY_TIME = 0;
+ private static final int REFRESH_AND_NOTIFY_TIME = 1;
+ private static final int NOTIFY_STOP = 2;
+ private static final int NOTIFY_SEEK = 3;
+ private static final int NOTIFY_TRACK_DATA = 4;
+ private HandlerThread mHandlerThread;
+
+ /** @hide */
+ public boolean DEBUG = false;
+
+ public TimeProvider(MediaPlayer mp) {
+ mPlayer = mp;
+ try {
+ getCurrentTimeUs(true, false);
+ } catch (IllegalStateException e) {
+ // we assume starting position
+ mRefresh = true;
+ }
+
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null &&
+ (looper = Looper.getMainLooper()) == null) {
+ // Create our own looper here in case MP was created without one
+ mHandlerThread = new HandlerThread("MediaPlayerMTPEventThread",
+ Process.THREAD_PRIORITY_FOREGROUND);
+ mHandlerThread.start();
+ looper = mHandlerThread.getLooper();
+ }
+ mEventHandler = new EventHandler(looper);
+
+ mListeners = new MediaTimeProvider.OnMediaTimeListener[0];
+ mTimes = new long[0];
+ mLastTimeUs = 0;
+ mTimeAdjustment = 0;
+ }
+
+ private void scheduleNotification(int type, long delayUs) {
+ // ignore time notifications until seek is handled
+ if (mSeeking &&
+ (type == NOTIFY_TIME || type == REFRESH_AND_NOTIFY_TIME)) {
+ return;
+ }
+
+ if (DEBUG) Log.v(TAG, "scheduleNotification " + type + " in " + delayUs);
+ mEventHandler.removeMessages(NOTIFY);
+ Message msg = mEventHandler.obtainMessage(NOTIFY, type, 0);
+ mEventHandler.sendMessageDelayed(msg, (int) (delayUs / 1000));
+ }
+
+ /** @hide */
+ public void close() {
+ mEventHandler.removeMessages(NOTIFY);
+ if (mHandlerThread != null) {
+ mHandlerThread.quitSafely();
+ mHandlerThread = null;
+ }
+ }
+
+ /** @hide */
+ protected void finalize() {
+ if (mHandlerThread != null) {
+ mHandlerThread.quitSafely();
+ }
+ }
+
+ /** @hide */
+ public void onPaused(boolean paused) {
+ synchronized(this) {
+ if (DEBUG) Log.d(TAG, "onPaused: " + paused);
+ if (mStopped) { // handle as seek if we were stopped
+ mStopped = false;
+ mSeeking = true;
+ scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
+ } else {
+ mPausing = paused; // special handling if player disappeared
+ mSeeking = false;
+ scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */);
+ }
+ }
+ }
+
+ /** @hide */
+ public void onBuffering(boolean buffering) {
+ synchronized (this) {
+ if (DEBUG) Log.d(TAG, "onBuffering: " + buffering);
+ mBuffering = buffering;
+ scheduleNotification(REFRESH_AND_NOTIFY_TIME, 0 /* delay */);
+ }
+ }
+
+ /** @hide */
+ public void onStopped() {
+ synchronized(this) {
+ if (DEBUG) Log.d(TAG, "onStopped");
+ mPaused = true;
+ mStopped = true;
+ mSeeking = false;
+ mBuffering = false;
+ scheduleNotification(NOTIFY_STOP, 0 /* delay */);
+ }
+ }
+
+ /** @hide */
+ @Override
+ public void onSeekComplete(MediaPlayer mp) {
+ synchronized(this) {
+ mStopped = false;
+ mSeeking = true;
+ scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
+ }
+ }
+
+ /** @hide */
+ public void onNewPlayer() {
+ if (mRefresh) {
+ synchronized(this) {
+ mStopped = false;
+ mSeeking = true;
+ mBuffering = false;
+ scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
+ }
+ }
+ }
+
+ private synchronized void notifySeek() {
+ mSeeking = false;
+ try {
+ long timeUs = getCurrentTimeUs(true, false);
+ if (DEBUG) Log.d(TAG, "onSeekComplete at " + timeUs);
+
+ for (MediaTimeProvider.OnMediaTimeListener listener: mListeners) {
+ if (listener == null) {
+ break;
+ }
+ listener.onSeek(timeUs);
+ }
+ } catch (IllegalStateException e) {
+ // we should not be there, but at least signal pause
+ if (DEBUG) Log.d(TAG, "onSeekComplete but no player");
+ mPausing = true; // special handling if player disappeared
+ notifyTimedEvent(false /* refreshTime */);
+ }
+ }
+
+ private synchronized void notifyTrackData(Pair<SubtitleTrack, byte[]> trackData) {
+ SubtitleTrack track = trackData.first;
+ byte[] data = trackData.second;
+ track.onData(data, true /* eos */, ~0 /* runID: keep forever */);
+ }
+
+ private synchronized void notifyStop() {
+ for (MediaTimeProvider.OnMediaTimeListener listener: mListeners) {
+ if (listener == null) {
+ break;
+ }
+ listener.onStop();
+ }
+ }
+
+ private int registerListener(MediaTimeProvider.OnMediaTimeListener listener) {
+ int i = 0;
+ for (; i < mListeners.length; i++) {
+ if (mListeners[i] == listener || mListeners[i] == null) {
+ break;
+ }
+ }
+
+ // new listener
+ if (i >= mListeners.length) {
+ MediaTimeProvider.OnMediaTimeListener[] newListeners =
+ new MediaTimeProvider.OnMediaTimeListener[i + 1];
+ long[] newTimes = new long[i + 1];
+ System.arraycopy(mListeners, 0, newListeners, 0, mListeners.length);
+ System.arraycopy(mTimes, 0, newTimes, 0, mTimes.length);
+ mListeners = newListeners;
+ mTimes = newTimes;
+ }
+
+ if (mListeners[i] == null) {
+ mListeners[i] = listener;
+ mTimes[i] = MediaTimeProvider.NO_TIME;
+ }
+ return i;
+ }
+
+ public void notifyAt(
+ long timeUs, MediaTimeProvider.OnMediaTimeListener listener) {
+ synchronized(this) {
+ if (DEBUG) Log.d(TAG, "notifyAt " + timeUs);
+ mTimes[registerListener(listener)] = timeUs;
+ scheduleNotification(NOTIFY_TIME, 0 /* delay */);
+ }
+ }
+
+ public void scheduleUpdate(MediaTimeProvider.OnMediaTimeListener listener) {
+ synchronized(this) {
+ if (DEBUG) Log.d(TAG, "scheduleUpdate");
+ int i = registerListener(listener);
+
+ if (!mStopped) {
+ mTimes[i] = 0;
+ scheduleNotification(NOTIFY_TIME, 0 /* delay */);
+ }
+ }
+ }
+
+ public void cancelNotifications(
+ MediaTimeProvider.OnMediaTimeListener listener) {
+ synchronized(this) {
+ int i = 0;
+ for (; i < mListeners.length; i++) {
+ if (mListeners[i] == listener) {
+ System.arraycopy(mListeners, i + 1,
+ mListeners, i, mListeners.length - i - 1);
+ System.arraycopy(mTimes, i + 1,
+ mTimes, i, mTimes.length - i - 1);
+ mListeners[mListeners.length - 1] = null;
+ mTimes[mTimes.length - 1] = NO_TIME;
+ break;
+ } else if (mListeners[i] == null) {
+ break;
+ }
+ }
+
+ scheduleNotification(NOTIFY_TIME, 0 /* delay */);
+ }
+ }
+
+ private synchronized void notifyTimedEvent(boolean refreshTime) {
+ // figure out next callback
+ long nowUs;
+ try {
+ nowUs = getCurrentTimeUs(refreshTime, true);
+ } catch (IllegalStateException e) {
+ // assume we paused until new player arrives
+ mRefresh = true;
+ mPausing = true; // this ensures that call succeeds
+ nowUs = getCurrentTimeUs(refreshTime, true);
+ }
+ long nextTimeUs = nowUs;
+
+ if (mSeeking) {
+ // skip timed-event notifications until seek is complete
+ return;
+ }
+
+ if (DEBUG) {
+ StringBuilder sb = new StringBuilder();
+ sb.append("notifyTimedEvent(").append(mLastTimeUs).append(" -> ")
+ .append(nowUs).append(") from {");
+ boolean first = true;
+ for (long time: mTimes) {
+ if (time == NO_TIME) {
+ continue;
+ }
+ if (!first) sb.append(", ");
+ sb.append(time);
+ first = false;
+ }
+ sb.append("}");
+ Log.d(TAG, sb.toString());
+ }
+
+ Vector<MediaTimeProvider.OnMediaTimeListener> activatedListeners =
+ new Vector<MediaTimeProvider.OnMediaTimeListener>();
+ for (int ix = 0; ix < mTimes.length; ix++) {
+ if (mListeners[ix] == null) {
+ break;
+ }
+ if (mTimes[ix] <= NO_TIME) {
+ // ignore, unless we were stopped
+ } else if (mTimes[ix] <= nowUs + MAX_EARLY_CALLBACK_US) {
+ activatedListeners.add(mListeners[ix]);
+ if (DEBUG) Log.d(TAG, "removed");
+ mTimes[ix] = NO_TIME;
+ } else if (nextTimeUs == nowUs || mTimes[ix] < nextTimeUs) {
+ nextTimeUs = mTimes[ix];
+ }
+ }
+
+ if (nextTimeUs > nowUs && !mPaused) {
+ // schedule callback at nextTimeUs
+ if (DEBUG) Log.d(TAG, "scheduling for " + nextTimeUs + " and " + nowUs);
+ scheduleNotification(NOTIFY_TIME, nextTimeUs - nowUs);
+ } else {
+ mEventHandler.removeMessages(NOTIFY);
+ // no more callbacks
+ }
+
+ for (MediaTimeProvider.OnMediaTimeListener listener: activatedListeners) {
+ listener.onTimedEvent(nowUs);
+ }
+ }
+
+ private long getEstimatedTime(long nanoTime, boolean monotonic) {
+ if (mPaused) {
+ mLastReportedTime = mLastTimeUs + mTimeAdjustment;
+ } else {
+ long timeSinceRead = (nanoTime - mLastNanoTime) / 1000;
+ mLastReportedTime = mLastTimeUs + timeSinceRead;
+ if (mTimeAdjustment > 0) {
+ long adjustment =
+ mTimeAdjustment - timeSinceRead / TIME_ADJUSTMENT_RATE;
+ if (adjustment <= 0) {
+ mTimeAdjustment = 0;
+ } else {
+ mLastReportedTime += adjustment;
+ }
+ }
+ }
+ return mLastReportedTime;
+ }
+
+ public long getCurrentTimeUs(boolean refreshTime, boolean monotonic)
+ throws IllegalStateException {
+ synchronized (this) {
+ // we always refresh the time when the paused-state changes, because
+ // we expect to have received the pause-change event delayed.
+ if (mPaused && !refreshTime) {
+ return mLastReportedTime;
+ }
+
+ long nanoTime = System.nanoTime();
+ if (refreshTime ||
+ nanoTime >= mLastNanoTime + MAX_NS_WITHOUT_POSITION_CHECK) {
+ try {
+ mLastTimeUs = mPlayer.getCurrentPosition() * 1000L;
+ mPaused = !mPlayer.isPlaying() || mBuffering;
+ if (DEBUG) Log.v(TAG, (mPaused ? "paused" : "playing") + " at " + mLastTimeUs);
+ } catch (IllegalStateException e) {
+ if (mPausing) {
+ // if we were pausing, get last estimated timestamp
+ mPausing = false;
+ getEstimatedTime(nanoTime, monotonic);
+ mPaused = true;
+ if (DEBUG) Log.d(TAG, "illegal state, but pausing: estimating at " + mLastReportedTime);
+ return mLastReportedTime;
+ }
+ // TODO get time when prepared
+ throw e;
+ }
+ mLastNanoTime = nanoTime;
+ if (monotonic && mLastTimeUs < mLastReportedTime) {
+ /* have to adjust time */
+ mTimeAdjustment = mLastReportedTime - mLastTimeUs;
+ if (mTimeAdjustment > 1000000) {
+ // schedule seeked event if time jumped significantly
+ // TODO: do this properly by introducing an exception
+ mStopped = false;
+ mSeeking = true;
+ scheduleNotification(NOTIFY_SEEK, 0 /* delay */);
+ }
+ } else {
+ mTimeAdjustment = 0;
+ }
+ }
+
+ return getEstimatedTime(nanoTime, monotonic);
+ }
+ }
+
+ private class EventHandler extends Handler {
+ public EventHandler(Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (msg.what == NOTIFY) {
+ switch (msg.arg1) {
+ case NOTIFY_TIME:
+ notifyTimedEvent(false /* refreshTime */);
+ break;
+ case REFRESH_AND_NOTIFY_TIME:
+ notifyTimedEvent(true /* refreshTime */);
+ break;
+ case NOTIFY_STOP:
+ notifyStop();
+ break;
+ case NOTIFY_SEEK:
+ notifySeek();
+ break;
+ case NOTIFY_TRACK_DATA:
+ notifyTrackData((Pair<SubtitleTrack, byte[]>)msg.obj);
+ break;
+ }
+ }
+ }
+ }
+ }
+
+ public final static class MetricsConstants
+ {
+ private MetricsConstants() {}
+
+ /**
+ * Key to extract the MIME type of the video track
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is a String.
+ */
+ public static final String MIME_TYPE_VIDEO = "android.media.mediaplayer.video.mime";
+
+ /**
+ * Key to extract the codec being used to decode the video track
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is a String.
+ */
+ public static final String CODEC_VIDEO = "android.media.mediaplayer.video.codec";
+
+ /**
+ * Key to extract the width (in pixels) of the video track
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String WIDTH = "android.media.mediaplayer.width";
+
+ /**
+ * Key to extract the height (in pixels) of the video track
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String HEIGHT = "android.media.mediaplayer.height";
+
+ /**
+ * Key to extract the count of video frames played
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String FRAMES = "android.media.mediaplayer.frames";
+
+ /**
+ * Key to extract the count of video frames dropped
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String FRAMES_DROPPED = "android.media.mediaplayer.dropped";
+
+ /**
+ * Key to extract the MIME type of the audio track
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is a String.
+ */
+ public static final String MIME_TYPE_AUDIO = "android.media.mediaplayer.audio.mime";
+
+ /**
+ * Key to extract the codec being used to decode the audio track
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is a String.
+ */
+ public static final String CODEC_AUDIO = "android.media.mediaplayer.audio.codec";
+
+ /**
+ * Key to extract the duration (in milliseconds) of the
+ * media being played
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is a long.
+ */
+ public static final String DURATION = "android.media.mediaplayer.durationMs";
+
+ /**
+ * Key to extract the playing time (in milliseconds) of the
+ * media being played
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is a long.
+ */
+ public static final String PLAYING = "android.media.mediaplayer.playingMs";
+
+ /**
+ * Key to extract the count of errors encountered while
+ * playing the media
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String ERRORS = "android.media.mediaplayer.err";
+
+ /**
+ * Key to extract an (optional) error code detected while
+ * playing the media
+ * from the {@link MediaPlayer#getMetrics} return value.
+ * The value is an integer.
+ */
+ public static final String ERROR_CODE = "android.media.mediaplayer.errcode";
+
+ }
+}
diff --git a/android/media/MediaRecorder.java b/android/media/MediaRecorder.java
new file mode 100644
index 00000000..59a124fa
--- /dev/null
+++ b/android/media/MediaRecorder.java
@@ -0,0 +1,1466 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.app.ActivityThread;
+import android.hardware.Camera;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.os.PersistableBundle;
+import android.util.Log;
+import android.view.Surface;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.lang.ref.WeakReference;
+
+/**
+ * Used to record audio and video. The recording control is based on a
+ * simple state machine (see below).
+ *
+ * <p><img src="{@docRoot}images/mediarecorder_state_diagram.gif" border="0" />
+ * </p>
+ *
+ * <p>A common case of using MediaRecorder to record audio works as follows:
+ *
+ * <pre>MediaRecorder recorder = new MediaRecorder();
+ * recorder.setAudioSource(MediaRecorder.AudioSource.MIC);
+ * recorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
+ * recorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
+ * recorder.setOutputFile(PATH_NAME);
+ * recorder.prepare();
+ * recorder.start(); // Recording is now started
+ * ...
+ * recorder.stop();
+ * recorder.reset(); // You can reuse the object by going back to setAudioSource() step
+ * recorder.release(); // Now the object cannot be reused
+ * </pre>
+ *
+ * <p>Applications may want to register for informational and error
+ * events in order to be informed of some internal update and possible
+ * runtime errors during recording. Registration for such events is
+ * done by setting the appropriate listeners (via calls
+ * (to {@link #setOnInfoListener(OnInfoListener)}setOnInfoListener and/or
+ * {@link #setOnErrorListener(OnErrorListener)}setOnErrorListener).
+ * In order to receive the respective callback associated with these listeners,
+ * applications are required to create MediaRecorder objects on threads with a
+ * Looper running (the main UI thread by default already has a Looper running).
+ *
+ * <p><strong>Note:</strong> Currently, MediaRecorder does not work on the emulator.
+ *
+ * <div class="special reference">
+ * <h3>Developer Guides</h3>
+ * <p>For more information about how to use MediaRecorder for recording video, read the
+ * <a href="{@docRoot}guide/topics/media/camera.html#capture-video">Camera</a> developer guide.
+ * For more information about how to use MediaRecorder for recording sound, read the
+ * <a href="{@docRoot}guide/topics/media/audio-capture.html">Audio Capture</a> developer guide.</p>
+ * </div>
+ */
+public class MediaRecorder
+{
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+ private final static String TAG = "MediaRecorder";
+
+ // The two fields below are accessed by native methods
+ @SuppressWarnings("unused")
+ private long mNativeContext;
+
+ @SuppressWarnings("unused")
+ private Surface mSurface;
+
+ private String mPath;
+ private FileDescriptor mFd;
+ private File mFile;
+ private EventHandler mEventHandler;
+ private OnErrorListener mOnErrorListener;
+ private OnInfoListener mOnInfoListener;
+
+ /**
+ * Default constructor.
+ */
+ public MediaRecorder() {
+
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else {
+ mEventHandler = null;
+ }
+
+ String packageName = ActivityThread.currentPackageName();
+ /* Native setup requires a weak reference to our object.
+ * It's easier to create it here than in C++.
+ */
+ native_setup(new WeakReference<MediaRecorder>(this), packageName,
+ ActivityThread.currentOpPackageName());
+ }
+
+ /**
+ * Sets a {@link android.hardware.Camera} to use for recording.
+ *
+ * <p>Use this function to switch quickly between preview and capture mode without a teardown of
+ * the camera object. {@link android.hardware.Camera#unlock()} should be called before
+ * this. Must call before {@link #prepare}.</p>
+ *
+ * @param c the Camera to use for recording
+ * @deprecated Use {@link #getSurface} and the {@link android.hardware.camera2} API instead.
+ */
+ @Deprecated
+ public native void setCamera(Camera c);
+
+ /**
+ * Gets the surface to record from when using SURFACE video source.
+ *
+ * <p> May only be called after {@link #prepare}. Frames rendered to the Surface before
+ * {@link #start} will be discarded.</p>
+ *
+ * @throws IllegalStateException if it is called before {@link #prepare}, after
+ * {@link #stop}, or is called when VideoSource is not set to SURFACE.
+ * @see android.media.MediaRecorder.VideoSource
+ */
+ public native Surface getSurface();
+
+ /**
+ * Configures the recorder to use a persistent surface when using SURFACE video source.
+ * <p> May only be called before {@link #prepare}. If called, {@link #getSurface} should
+ * not be used and will throw IllegalStateException. Frames rendered to the Surface
+ * before {@link #start} will be discarded.</p>
+
+ * @param surface a persistent input surface created by
+ * {@link MediaCodec#createPersistentInputSurface}
+ * @throws IllegalStateException if it is called after {@link #prepare} and before
+ * {@link #stop}.
+ * @throws IllegalArgumentException if the surface was not created by
+ * {@link MediaCodec#createPersistentInputSurface}.
+ * @see MediaCodec#createPersistentInputSurface
+ * @see MediaRecorder.VideoSource
+ */
+ public void setInputSurface(@NonNull Surface surface) {
+ if (!(surface instanceof MediaCodec.PersistentSurface)) {
+ throw new IllegalArgumentException("not a PersistentSurface");
+ }
+ native_setInputSurface(surface);
+ }
+
+ private native final void native_setInputSurface(@NonNull Surface surface);
+
+ /**
+ * Sets a Surface to show a preview of recorded media (video). Calls this
+ * before prepare() to make sure that the desirable preview display is
+ * set. If {@link #setCamera(Camera)} is used and the surface has been
+ * already set to the camera, application do not need to call this. If
+ * this is called with non-null surface, the preview surface of the camera
+ * will be replaced by the new surface. If this method is called with null
+ * surface or not called at all, media recorder will not change the preview
+ * surface of the camera.
+ *
+ * @param sv the Surface to use for the preview
+ * @see android.hardware.Camera#setPreviewDisplay(android.view.SurfaceHolder)
+ */
+ public void setPreviewDisplay(Surface sv) {
+ mSurface = sv;
+ }
+
+ /**
+ * Defines the audio source.
+ * An audio source defines both a default physical source of audio signal, and a recording
+ * configuration. These constants are for instance used
+ * in {@link MediaRecorder#setAudioSource(int)} or
+ * {@link AudioRecord.Builder#setAudioSource(int)}.
+ */
+ public final class AudioSource {
+
+ private AudioSource() {}
+
+ /** @hide */
+ public final static int AUDIO_SOURCE_INVALID = -1;
+
+ /* Do not change these values without updating their counterparts
+ * in system/media/audio/include/system/audio.h!
+ */
+
+ /** Default audio source **/
+ public static final int DEFAULT = 0;
+
+ /** Microphone audio source */
+ public static final int MIC = 1;
+
+ /** Voice call uplink (Tx) audio source.
+ * <p>
+ * Capturing from <code>VOICE_UPLINK</code> source requires the
+ * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT} permission.
+ * This permission is reserved for use by system components and is not available to
+ * third-party applications.
+ * </p>
+ */
+ public static final int VOICE_UPLINK = 2;
+
+ /** Voice call downlink (Rx) audio source.
+ * <p>
+ * Capturing from <code>VOICE_DOWNLINK</code> source requires the
+ * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT} permission.
+ * This permission is reserved for use by system components and is not available to
+ * third-party applications.
+ * </p>
+ */
+ public static final int VOICE_DOWNLINK = 3;
+
+ /** Voice call uplink + downlink audio source
+ * <p>
+ * Capturing from <code>VOICE_CALL</code> source requires the
+ * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT} permission.
+ * This permission is reserved for use by system components and is not available to
+ * third-party applications.
+ * </p>
+ */
+ public static final int VOICE_CALL = 4;
+
+ /** Microphone audio source tuned for video recording, with the same orientation
+ * as the camera if available. */
+ public static final int CAMCORDER = 5;
+
+ /** Microphone audio source tuned for voice recognition. */
+ public static final int VOICE_RECOGNITION = 6;
+
+ /** Microphone audio source tuned for voice communications such as VoIP. It
+ * will for instance take advantage of echo cancellation or automatic gain control
+ * if available.
+ */
+ public static final int VOICE_COMMUNICATION = 7;
+
+ /**
+ * Audio source for a submix of audio streams to be presented remotely.
+ * <p>
+ * An application can use this audio source to capture a mix of audio streams
+ * that should be transmitted to a remote receiver such as a Wifi display.
+ * While recording is active, these audio streams are redirected to the remote
+ * submix instead of being played on the device speaker or headset.
+ * </p><p>
+ * Certain streams are excluded from the remote submix, including
+ * {@link AudioManager#STREAM_RING}, {@link AudioManager#STREAM_ALARM},
+ * and {@link AudioManager#STREAM_NOTIFICATION}. These streams will continue
+ * to be presented locally as usual.
+ * </p><p>
+ * Capturing the remote submix audio requires the
+ * {@link android.Manifest.permission#CAPTURE_AUDIO_OUTPUT} permission.
+ * This permission is reserved for use by system components and is not available to
+ * third-party applications.
+ * </p>
+ */
+ public static final int REMOTE_SUBMIX = 8;
+
+ /** Microphone audio source tuned for unprocessed (raw) sound if available, behaves like
+ * {@link #DEFAULT} otherwise. */
+ public static final int UNPROCESSED = 9;
+
+ /**
+ * Audio source for capturing broadcast radio tuner output.
+ * @hide
+ */
+ @SystemApi
+ public static final int RADIO_TUNER = 1998;
+
+ /**
+ * Audio source for preemptible, low-priority software hotword detection
+ * It presents the same gain and pre processing tuning as {@link #VOICE_RECOGNITION}.
+ * <p>
+ * An application should use this audio source when it wishes to do
+ * always-on software hotword detection, while gracefully giving in to any other application
+ * that might want to read from the microphone.
+ * </p>
+ * This is a hidden audio source.
+ * @hide
+ */
+ @SystemApi
+ public static final int HOTWORD = 1999;
+ }
+
+ // TODO make AudioSource static (API change) and move this method inside the AudioSource class
+ /**
+ * @hide
+ * @param source An audio source to test
+ * @return true if the source is only visible to system components
+ */
+ public static boolean isSystemOnlyAudioSource(int source) {
+ switch(source) {
+ case AudioSource.DEFAULT:
+ case AudioSource.MIC:
+ case AudioSource.VOICE_UPLINK:
+ case AudioSource.VOICE_DOWNLINK:
+ case AudioSource.VOICE_CALL:
+ case AudioSource.CAMCORDER:
+ case AudioSource.VOICE_RECOGNITION:
+ case AudioSource.VOICE_COMMUNICATION:
+ //case REMOTE_SUBMIX: considered "system" as it requires system permissions
+ case AudioSource.UNPROCESSED:
+ return false;
+ default:
+ return true;
+ }
+ }
+
+ /** @hide */
+ public static final String toLogFriendlyAudioSource(int source) {
+ switch(source) {
+ case AudioSource.DEFAULT:
+ return "DEFAULT";
+ case AudioSource.MIC:
+ return "MIC";
+ case AudioSource.VOICE_UPLINK:
+ return "VOICE_UPLINK";
+ case AudioSource.VOICE_DOWNLINK:
+ return "VOICE_DOWNLINK";
+ case AudioSource.VOICE_CALL:
+ return "VOICE_CALL";
+ case AudioSource.CAMCORDER:
+ return "CAMCORDER";
+ case AudioSource.VOICE_RECOGNITION:
+ return "VOICE_RECOGNITION";
+ case AudioSource.VOICE_COMMUNICATION:
+ return "VOICE_COMMUNICATION";
+ case AudioSource.REMOTE_SUBMIX:
+ return "REMOTE_SUBMIX";
+ case AudioSource.UNPROCESSED:
+ return "UNPROCESSED";
+ case AudioSource.RADIO_TUNER:
+ return "RADIO_TUNER";
+ case AudioSource.HOTWORD:
+ return "HOTWORD";
+ case AudioSource.AUDIO_SOURCE_INVALID:
+ return "AUDIO_SOURCE_INVALID";
+ default:
+ return "unknown source " + source;
+ }
+ }
+
+ /**
+ * Defines the video source. These constants are used with
+ * {@link MediaRecorder#setVideoSource(int)}.
+ */
+ public final class VideoSource {
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediarecorder.h!
+ */
+ private VideoSource() {}
+ public static final int DEFAULT = 0;
+ /** Camera video source
+ * <p>
+ * Using the {@link android.hardware.Camera} API as video source.
+ * </p>
+ */
+ public static final int CAMERA = 1;
+ /** Surface video source
+ * <p>
+ * Using a Surface as video source.
+ * </p><p>
+ * This flag must be used when recording from an
+ * {@link android.hardware.camera2} API source.
+ * </p><p>
+ * When using this video source type, use {@link MediaRecorder#getSurface()}
+ * to retrieve the surface created by MediaRecorder.
+ */
+ public static final int SURFACE = 2;
+ }
+
+ /**
+ * Defines the output format. These constants are used with
+ * {@link MediaRecorder#setOutputFormat(int)}.
+ */
+ public final class OutputFormat {
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediarecorder.h!
+ */
+ private OutputFormat() {}
+ public static final int DEFAULT = 0;
+ /** 3GPP media file format*/
+ public static final int THREE_GPP = 1;
+ /** MPEG4 media file format*/
+ public static final int MPEG_4 = 2;
+
+ /** The following formats are audio only .aac or .amr formats */
+
+ /**
+ * AMR NB file format
+ * @deprecated Deprecated in favor of MediaRecorder.OutputFormat.AMR_NB
+ */
+ public static final int RAW_AMR = 3;
+
+ /** AMR NB file format */
+ public static final int AMR_NB = 3;
+
+ /** AMR WB file format */
+ public static final int AMR_WB = 4;
+
+ /** @hide AAC ADIF file format */
+ public static final int AAC_ADIF = 5;
+
+ /** AAC ADTS file format */
+ public static final int AAC_ADTS = 6;
+
+ /** @hide Stream over a socket, limited to a single stream */
+ public static final int OUTPUT_FORMAT_RTP_AVP = 7;
+
+ /** H.264/AAC data encapsulated in MPEG2/TS */
+ public static final int MPEG_2_TS = 8;
+
+ /** VP8/VORBIS data in a WEBM container */
+ public static final int WEBM = 9;
+ };
+
+ /**
+ * Defines the audio encoding. These constants are used with
+ * {@link MediaRecorder#setAudioEncoder(int)}.
+ */
+ public final class AudioEncoder {
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediarecorder.h!
+ */
+ private AudioEncoder() {}
+ public static final int DEFAULT = 0;
+ /** AMR (Narrowband) audio codec */
+ public static final int AMR_NB = 1;
+ /** AMR (Wideband) audio codec */
+ public static final int AMR_WB = 2;
+ /** AAC Low Complexity (AAC-LC) audio codec */
+ public static final int AAC = 3;
+ /** High Efficiency AAC (HE-AAC) audio codec */
+ public static final int HE_AAC = 4;
+ /** Enhanced Low Delay AAC (AAC-ELD) audio codec */
+ public static final int AAC_ELD = 5;
+ /** Ogg Vorbis audio codec */
+ public static final int VORBIS = 6;
+ }
+
+ /**
+ * Defines the video encoding. These constants are used with
+ * {@link MediaRecorder#setVideoEncoder(int)}.
+ */
+ public final class VideoEncoder {
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediarecorder.h!
+ */
+ private VideoEncoder() {}
+ public static final int DEFAULT = 0;
+ public static final int H263 = 1;
+ public static final int H264 = 2;
+ public static final int MPEG_4_SP = 3;
+ public static final int VP8 = 4;
+ public static final int HEVC = 5;
+ }
+
+ /**
+ * Sets the audio source to be used for recording. If this method is not
+ * called, the output file will not contain an audio track. The source needs
+ * to be specified before setting recording-parameters or encoders. Call
+ * this only before setOutputFormat().
+ *
+ * @param audio_source the audio source to use
+ * @throws IllegalStateException if it is called after setOutputFormat()
+ * @see android.media.MediaRecorder.AudioSource
+ */
+ public native void setAudioSource(int audio_source)
+ throws IllegalStateException;
+
+ /**
+ * Gets the maximum value for audio sources.
+ * @see android.media.MediaRecorder.AudioSource
+ */
+ public static final int getAudioSourceMax() {
+ return AudioSource.UNPROCESSED;
+ }
+
+ /**
+ * Sets the video source to be used for recording. If this method is not
+ * called, the output file will not contain an video track. The source needs
+ * to be specified before setting recording-parameters or encoders. Call
+ * this only before setOutputFormat().
+ *
+ * @param video_source the video source to use
+ * @throws IllegalStateException if it is called after setOutputFormat()
+ * @see android.media.MediaRecorder.VideoSource
+ */
+ public native void setVideoSource(int video_source)
+ throws IllegalStateException;
+
+ /**
+ * Uses the settings from a CamcorderProfile object for recording. This method should
+ * be called after the video AND audio sources are set, and before setOutputFile().
+ * If a time lapse CamcorderProfile is used, audio related source or recording
+ * parameters are ignored.
+ *
+ * @param profile the CamcorderProfile to use
+ * @see android.media.CamcorderProfile
+ */
+ public void setProfile(CamcorderProfile profile) {
+ setOutputFormat(profile.fileFormat);
+ setVideoFrameRate(profile.videoFrameRate);
+ setVideoSize(profile.videoFrameWidth, profile.videoFrameHeight);
+ setVideoEncodingBitRate(profile.videoBitRate);
+ setVideoEncoder(profile.videoCodec);
+ if (profile.quality >= CamcorderProfile.QUALITY_TIME_LAPSE_LOW &&
+ profile.quality <= CamcorderProfile.QUALITY_TIME_LAPSE_QVGA) {
+ // Nothing needs to be done. Call to setCaptureRate() enables
+ // time lapse video recording.
+ } else {
+ setAudioEncodingBitRate(profile.audioBitRate);
+ setAudioChannels(profile.audioChannels);
+ setAudioSamplingRate(profile.audioSampleRate);
+ setAudioEncoder(profile.audioCodec);
+ }
+ }
+
+ /**
+ * Set video frame capture rate. This can be used to set a different video frame capture
+ * rate than the recorded video's playback rate. This method also sets the recording mode
+ * to time lapse. In time lapse video recording, only video is recorded. Audio related
+ * parameters are ignored when a time lapse recording session starts, if an application
+ * sets them.
+ *
+ * @param fps Rate at which frames should be captured in frames per second.
+ * The fps can go as low as desired. However the fastest fps will be limited by the hardware.
+ * For resolutions that can be captured by the video camera, the fastest fps can be computed using
+ * {@link android.hardware.Camera.Parameters#getPreviewFpsRange(int[])}. For higher
+ * resolutions the fastest fps may be more restrictive.
+ * Note that the recorder cannot guarantee that frames will be captured at the
+ * given rate due to camera/encoder limitations. However it tries to be as close as
+ * possible.
+ */
+ public void setCaptureRate(double fps) {
+ // Make sure that time lapse is enabled when this method is called.
+ setParameter("time-lapse-enable=1");
+ setParameter("time-lapse-fps=" + fps);
+ }
+
+ /**
+ * Sets the orientation hint for output video playback.
+ * This method should be called before prepare(). This method will not
+ * trigger the source video frame to rotate during video recording, but to
+ * add a composition matrix containing the rotation angle in the output
+ * video if the output format is OutputFormat.THREE_GPP or
+ * OutputFormat.MPEG_4 so that a video player can choose the proper
+ * orientation for playback. Note that some video players may choose
+ * to ignore the compostion matrix in a video during playback.
+ *
+ * @param degrees the angle to be rotated clockwise in degrees.
+ * The supported angles are 0, 90, 180, and 270 degrees.
+ * @throws IllegalArgumentException if the angle is not supported.
+ *
+ */
+ public void setOrientationHint(int degrees) {
+ if (degrees != 0 &&
+ degrees != 90 &&
+ degrees != 180 &&
+ degrees != 270) {
+ throw new IllegalArgumentException("Unsupported angle: " + degrees);
+ }
+ setParameter("video-param-rotation-angle-degrees=" + degrees);
+ }
+
+ /**
+ * Set and store the geodata (latitude and longitude) in the output file.
+ * This method should be called before prepare(). The geodata is
+ * stored in udta box if the output format is OutputFormat.THREE_GPP
+ * or OutputFormat.MPEG_4, and is ignored for other output formats.
+ * The geodata is stored according to ISO-6709 standard.
+ *
+ * @param latitude latitude in degrees. Its value must be in the
+ * range [-90, 90].
+ * @param longitude longitude in degrees. Its value must be in the
+ * range [-180, 180].
+ *
+ * @throws IllegalArgumentException if the given latitude or
+ * longitude is out of range.
+ *
+ */
+ public void setLocation(float latitude, float longitude) {
+ int latitudex10000 = (int) (latitude * 10000 + 0.5);
+ int longitudex10000 = (int) (longitude * 10000 + 0.5);
+
+ if (latitudex10000 > 900000 || latitudex10000 < -900000) {
+ String msg = "Latitude: " + latitude + " out of range.";
+ throw new IllegalArgumentException(msg);
+ }
+ if (longitudex10000 > 1800000 || longitudex10000 < -1800000) {
+ String msg = "Longitude: " + longitude + " out of range";
+ throw new IllegalArgumentException(msg);
+ }
+
+ setParameter("param-geotag-latitude=" + latitudex10000);
+ setParameter("param-geotag-longitude=" + longitudex10000);
+ }
+
+ /**
+ * Sets the format of the output file produced during recording. Call this
+ * after setAudioSource()/setVideoSource() but before prepare().
+ *
+ * <p>It is recommended to always use 3GP format when using the H.263
+ * video encoder and AMR audio encoder. Using an MPEG-4 container format
+ * may confuse some desktop players.</p>
+ *
+ * @param output_format the output format to use. The output format
+ * needs to be specified before setting recording-parameters or encoders.
+ * @throws IllegalStateException if it is called after prepare() or before
+ * setAudioSource()/setVideoSource().
+ * @see android.media.MediaRecorder.OutputFormat
+ */
+ public native void setOutputFormat(int output_format)
+ throws IllegalStateException;
+
+ /**
+ * Sets the width and height of the video to be captured. Must be called
+ * after setVideoSource(). Call this after setOutFormat() but before
+ * prepare().
+ *
+ * @param width the width of the video to be captured
+ * @param height the height of the video to be captured
+ * @throws IllegalStateException if it is called after
+ * prepare() or before setOutputFormat()
+ */
+ public native void setVideoSize(int width, int height)
+ throws IllegalStateException;
+
+ /**
+ * Sets the frame rate of the video to be captured. Must be called
+ * after setVideoSource(). Call this after setOutFormat() but before
+ * prepare().
+ *
+ * @param rate the number of frames per second of video to capture
+ * @throws IllegalStateException if it is called after
+ * prepare() or before setOutputFormat().
+ *
+ * NOTE: On some devices that have auto-frame rate, this sets the
+ * maximum frame rate, not a constant frame rate. Actual frame rate
+ * will vary according to lighting conditions.
+ */
+ public native void setVideoFrameRate(int rate) throws IllegalStateException;
+
+ /**
+ * Sets the maximum duration (in ms) of the recording session.
+ * Call this after setOutFormat() but before prepare().
+ * After recording reaches the specified duration, a notification
+ * will be sent to the {@link android.media.MediaRecorder.OnInfoListener}
+ * with a "what" code of {@link #MEDIA_RECORDER_INFO_MAX_DURATION_REACHED}
+ * and recording will be stopped. Stopping happens asynchronously, there
+ * is no guarantee that the recorder will have stopped by the time the
+ * listener is notified.
+ *
+ * @param max_duration_ms the maximum duration in ms (if zero or negative, disables the duration limit)
+ *
+ */
+ public native void setMaxDuration(int max_duration_ms) throws IllegalArgumentException;
+
+ /**
+ * Sets the maximum filesize (in bytes) of the recording session.
+ * Call this after setOutFormat() but before prepare().
+ * After recording reaches the specified filesize, a notification
+ * will be sent to the {@link android.media.MediaRecorder.OnInfoListener}
+ * with a "what" code of {@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED}
+ * and recording will be stopped. Stopping happens asynchronously, there
+ * is no guarantee that the recorder will have stopped by the time the
+ * listener is notified.
+ *
+ * @param max_filesize_bytes the maximum filesize in bytes (if zero or negative, disables the limit)
+ *
+ */
+ public native void setMaxFileSize(long max_filesize_bytes) throws IllegalArgumentException;
+
+ /**
+ * Sets the audio encoder to be used for recording. If this method is not
+ * called, the output file will not contain an audio track. Call this after
+ * setOutputFormat() but before prepare().
+ *
+ * @param audio_encoder the audio encoder to use.
+ * @throws IllegalStateException if it is called before
+ * setOutputFormat() or after prepare().
+ * @see android.media.MediaRecorder.AudioEncoder
+ */
+ public native void setAudioEncoder(int audio_encoder)
+ throws IllegalStateException;
+
+ /**
+ * Sets the video encoder to be used for recording. If this method is not
+ * called, the output file will not contain an video track. Call this after
+ * setOutputFormat() and before prepare().
+ *
+ * @param video_encoder the video encoder to use.
+ * @throws IllegalStateException if it is called before
+ * setOutputFormat() or after prepare()
+ * @see android.media.MediaRecorder.VideoEncoder
+ */
+ public native void setVideoEncoder(int video_encoder)
+ throws IllegalStateException;
+
+ /**
+ * Sets the audio sampling rate for recording. Call this method before prepare().
+ * Prepare() may perform additional checks on the parameter to make sure whether
+ * the specified audio sampling rate is applicable. The sampling rate really depends
+ * on the format for the audio recording, as well as the capabilities of the platform.
+ * For instance, the sampling rate supported by AAC audio coding standard ranges
+ * from 8 to 96 kHz, the sampling rate supported by AMRNB is 8kHz, and the sampling
+ * rate supported by AMRWB is 16kHz. Please consult with the related audio coding
+ * standard for the supported audio sampling rate.
+ *
+ * @param samplingRate the sampling rate for audio in samples per second.
+ */
+ public void setAudioSamplingRate(int samplingRate) {
+ if (samplingRate <= 0) {
+ throw new IllegalArgumentException("Audio sampling rate is not positive");
+ }
+ setParameter("audio-param-sampling-rate=" + samplingRate);
+ }
+
+ /**
+ * Sets the number of audio channels for recording. Call this method before prepare().
+ * Prepare() may perform additional checks on the parameter to make sure whether the
+ * specified number of audio channels are applicable.
+ *
+ * @param numChannels the number of audio channels. Usually it is either 1 (mono) or 2
+ * (stereo).
+ */
+ public void setAudioChannels(int numChannels) {
+ if (numChannels <= 0) {
+ throw new IllegalArgumentException("Number of channels is not positive");
+ }
+ setParameter("audio-param-number-of-channels=" + numChannels);
+ }
+
+ /**
+ * Sets the audio encoding bit rate for recording. Call this method before prepare().
+ * Prepare() may perform additional checks on the parameter to make sure whether the
+ * specified bit rate is applicable, and sometimes the passed bitRate will be clipped
+ * internally to ensure the audio recording can proceed smoothly based on the
+ * capabilities of the platform.
+ *
+ * @param bitRate the audio encoding bit rate in bits per second.
+ */
+ public void setAudioEncodingBitRate(int bitRate) {
+ if (bitRate <= 0) {
+ throw new IllegalArgumentException("Audio encoding bit rate is not positive");
+ }
+ setParameter("audio-param-encoding-bitrate=" + bitRate);
+ }
+
+ /**
+ * Sets the video encoding bit rate for recording. Call this method before prepare().
+ * Prepare() may perform additional checks on the parameter to make sure whether the
+ * specified bit rate is applicable, and sometimes the passed bitRate will be
+ * clipped internally to ensure the video recording can proceed smoothly based on
+ * the capabilities of the platform.
+ *
+ * @param bitRate the video encoding bit rate in bits per second.
+ */
+ public void setVideoEncodingBitRate(int bitRate) {
+ if (bitRate <= 0) {
+ throw new IllegalArgumentException("Video encoding bit rate is not positive");
+ }
+ setParameter("video-param-encoding-bitrate=" + bitRate);
+ }
+
+ /**
+ * Sets the desired video encoding profile and level for recording. The profile and level
+ * must be valid for the video encoder set by {@link #setVideoEncoder}. This method can
+ * called before or after {@link #setVideoEncoder} but it must be called before {@link #prepare}.
+ * {@code prepare()} may perform additional checks on the parameter to make sure that the specified
+ * profile and level are applicable, and sometimes the passed profile or level will be
+ * discarded due to codec capablity or to ensure the video recording can proceed smoothly
+ * based on the capabilities of the platform. <br>Application can also use the
+ * {@link MediaCodecInfo.CodecCapabilities#profileLevels} to query applicable combination of profile
+ * and level for the corresponding format. Note that the requested profile/level may not be supported by
+ * the codec that is actually being used by this MediaRecorder instance.
+ * @param profile declared in {@link MediaCodecInfo.CodecProfileLevel}.
+ * @param level declared in {@link MediaCodecInfo.CodecProfileLevel}.
+ * @throws IllegalArgumentException when an invalid profile or level value is used.
+ */
+ public void setVideoEncodingProfileLevel(int profile, int level) {
+ if (profile <= 0) {
+ throw new IllegalArgumentException("Video encoding profile is not positive");
+ }
+ if (level <= 0) {
+ throw new IllegalArgumentException("Video encoding level is not positive");
+ }
+ setParameter("video-param-encoder-profile=" + profile);
+ setParameter("video-param-encoder-level=" + level);
+ }
+
+ /**
+ * Currently not implemented. It does nothing.
+ * @deprecated Time lapse mode video recording using camera still image capture
+ * is not desirable, and will not be supported.
+ * @hide
+ */
+ public void setAuxiliaryOutputFile(FileDescriptor fd)
+ {
+ Log.w(TAG, "setAuxiliaryOutputFile(FileDescriptor) is no longer supported.");
+ }
+
+ /**
+ * Currently not implemented. It does nothing.
+ * @deprecated Time lapse mode video recording using camera still image capture
+ * is not desirable, and will not be supported.
+ * @hide
+ */
+ public void setAuxiliaryOutputFile(String path)
+ {
+ Log.w(TAG, "setAuxiliaryOutputFile(String) is no longer supported.");
+ }
+
+ /**
+ * Pass in the file descriptor of the file to be written. Call this after
+ * setOutputFormat() but before prepare().
+ *
+ * @param fd an open file descriptor to be written into.
+ * @throws IllegalStateException if it is called before
+ * setOutputFormat() or after prepare()
+ */
+ public void setOutputFile(FileDescriptor fd) throws IllegalStateException
+ {
+ mPath = null;
+ mFile = null;
+ mFd = fd;
+ }
+
+ /**
+ * Pass in the file object to be written. Call this after setOutputFormat() but before prepare().
+ * File should be seekable. After setting the next output file, application should not use the
+ * file until {@link #stop}. Application is responsible for cleaning up unused files after
+ * {@link #stop} is called.
+ *
+ * @param file the file object to be written into.
+ */
+ public void setOutputFile(File file)
+ {
+ mPath = null;
+ mFd = null;
+ mFile = file;
+ }
+
+ /**
+ * Sets the next output file descriptor to be used when the maximum filesize is reached
+ * on the prior output {@link #setOutputFile} or {@link #setNextOutputFile}). File descriptor
+ * must be seekable and writable. After setting the next output file, application should not
+ * use the file referenced by this file descriptor until {@link #stop}. It is the application's
+ * responsibility to close the file descriptor. It is safe to do so as soon as this call returns.
+ * Application must call this after receiving on the
+ * {@link android.media.MediaRecorder.OnInfoListener} a "what" code of
+ * {@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING} and before receiving a "what" code of
+ * {@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED}. The file is not used until switching to
+ * that output. Application will receive{@link #MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED}
+ * when the next output file is used. Application will not be able to set a new output file if
+ * the previous one has not been used. Application is responsible for cleaning up unused files
+ * after {@link #stop} is called.
+ *
+ * @param fd an open file descriptor to be written into.
+ * @throws IllegalStateException if it is called before prepare().
+ * @throws IOException if setNextOutputFile fails otherwise.
+ */
+ public void setNextOutputFile(FileDescriptor fd) throws IOException
+ {
+ _setNextOutputFile(fd);
+ }
+
+ /**
+ * Sets the path of the output file to be produced. Call this after
+ * setOutputFormat() but before prepare().
+ *
+ * @param path The pathname to use.
+ * @throws IllegalStateException if it is called before
+ * setOutputFormat() or after prepare()
+ */
+ public void setOutputFile(String path) throws IllegalStateException
+ {
+ mFd = null;
+ mFile = null;
+ mPath = path;
+ }
+
+ /**
+ * Sets the next output file to be used when the maximum filesize is reached on the prior
+ * output {@link #setOutputFile} or {@link #setNextOutputFile}). File should be seekable.
+ * After setting the next output file, application should not use the file until {@link #stop}.
+ * Application must call this after receiving on the
+ * {@link android.media.MediaRecorder.OnInfoListener} a "what" code of
+ * {@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING} and before receiving a "what" code of
+ * {@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED}. The file is not used until switching to
+ * that output. Application will receive {@link #MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED}
+ * when the next output file is used. Application will not be able to set a new output file if
+ * the previous one has not been used. Application is responsible for cleaning up unused files
+ * after {@link #stop} is called.
+ *
+ * @param file The file to use.
+ * @throws IllegalStateException if it is called before prepare().
+ * @throws IOException if setNextOutputFile fails otherwise.
+ */
+ public void setNextOutputFile(File file) throws IOException
+ {
+ RandomAccessFile f = new RandomAccessFile(file, "rws");
+ try {
+ _setNextOutputFile(f.getFD());
+ } finally {
+ f.close();
+ }
+ }
+
+ // native implementation
+ private native void _setOutputFile(FileDescriptor fd) throws IllegalStateException, IOException;
+ private native void _setNextOutputFile(FileDescriptor fd) throws IllegalStateException, IOException;
+ private native void _prepare() throws IllegalStateException, IOException;
+
+ /**
+ * Prepares the recorder to begin capturing and encoding data. This method
+ * must be called after setting up the desired audio and video sources,
+ * encoders, file format, etc., but before start().
+ *
+ * @throws IllegalStateException if it is called after
+ * start() or before setOutputFormat().
+ * @throws IOException if prepare fails otherwise.
+ */
+ public void prepare() throws IllegalStateException, IOException
+ {
+ if (mPath != null) {
+ RandomAccessFile file = new RandomAccessFile(mPath, "rws");
+ try {
+ _setOutputFile(file.getFD());
+ } finally {
+ file.close();
+ }
+ } else if (mFd != null) {
+ _setOutputFile(mFd);
+ } else if (mFile != null) {
+ RandomAccessFile file = new RandomAccessFile(mFile, "rws");
+ try {
+ _setOutputFile(file.getFD());
+ } finally {
+ file.close();
+ }
+ } else {
+ throw new IOException("No valid output file");
+ }
+
+ _prepare();
+ }
+
+ /**
+ * Begins capturing and encoding data to the file specified with
+ * setOutputFile(). Call this after prepare().
+ *
+ * <p>Since API level 13, if applications set a camera via
+ * {@link #setCamera(Camera)}, the apps can use the camera after this method
+ * call. The apps do not need to lock the camera again. However, if this
+ * method fails, the apps should still lock the camera back. The apps should
+ * not start another recording session during recording.
+ *
+ * @throws IllegalStateException if it is called before
+ * prepare() or when the camera is already in use by another app.
+ */
+ public native void start() throws IllegalStateException;
+
+ /**
+ * Stops recording. Call this after start(). Once recording is stopped,
+ * you will have to configure it again as if it has just been constructed.
+ * Note that a RuntimeException is intentionally thrown to the
+ * application, if no valid audio/video data has been received when stop()
+ * is called. This happens if stop() is called immediately after
+ * start(). The failure lets the application take action accordingly to
+ * clean up the output file (delete the output file, for instance), since
+ * the output file is not properly constructed when this happens.
+ *
+ * @throws IllegalStateException if it is called before start()
+ */
+ public native void stop() throws IllegalStateException;
+
+ /**
+ * Pauses recording. Call this after start(). You may resume recording
+ * with resume() without reconfiguration, as opposed to stop(). It does
+ * nothing if the recording is already paused.
+ *
+ * When the recording is paused and resumed, the resulting output would
+ * be as if nothing happend during paused period, immediately switching
+ * to the resumed scene.
+ *
+ * @throws IllegalStateException if it is called before start() or after
+ * stop()
+ */
+ public native void pause() throws IllegalStateException;
+
+ /**
+ * Resumes recording. Call this after start(). It does nothing if the
+ * recording is not paused.
+ *
+ * @throws IllegalStateException if it is called before start() or after
+ * stop()
+ * @see android.media.MediaRecorder#pause
+ */
+ public native void resume() throws IllegalStateException;
+
+ /**
+ * Restarts the MediaRecorder to its idle state. After calling
+ * this method, you will have to configure it again as if it had just been
+ * constructed.
+ */
+ public void reset() {
+ native_reset();
+
+ // make sure none of the listeners get called anymore
+ mEventHandler.removeCallbacksAndMessages(null);
+ }
+
+ private native void native_reset();
+
+ /**
+ * Returns the maximum absolute amplitude that was sampled since the last
+ * call to this method. Call this only after the setAudioSource().
+ *
+ * @return the maximum absolute amplitude measured since the last call, or
+ * 0 when called for the first time
+ * @throws IllegalStateException if it is called before
+ * the audio source has been set.
+ */
+ public native int getMaxAmplitude() throws IllegalStateException;
+
+ /* Do not change this value without updating its counterpart
+ * in include/media/mediarecorder.h or mediaplayer.h!
+ */
+ /** Unspecified media recorder error.
+ * @see android.media.MediaRecorder.OnErrorListener
+ */
+ public static final int MEDIA_RECORDER_ERROR_UNKNOWN = 1;
+ /** Media server died. In this case, the application must release the
+ * MediaRecorder object and instantiate a new one.
+ * @see android.media.MediaRecorder.OnErrorListener
+ */
+ public static final int MEDIA_ERROR_SERVER_DIED = 100;
+
+ /**
+ * Interface definition for a callback to be invoked when an error
+ * occurs while recording.
+ */
+ public interface OnErrorListener
+ {
+ /**
+ * Called when an error occurs while recording.
+ *
+ * @param mr the MediaRecorder that encountered the error
+ * @param what the type of error that has occurred:
+ * <ul>
+ * <li>{@link #MEDIA_RECORDER_ERROR_UNKNOWN}
+ * <li>{@link #MEDIA_ERROR_SERVER_DIED}
+ * </ul>
+ * @param extra an extra code, specific to the error type
+ */
+ void onError(MediaRecorder mr, int what, int extra);
+ }
+
+ /**
+ * Register a callback to be invoked when an error occurs while
+ * recording.
+ *
+ * @param l the callback that will be run
+ */
+ public void setOnErrorListener(OnErrorListener l)
+ {
+ mOnErrorListener = l;
+ }
+
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediarecorder.h!
+ */
+ /** Unspecified media recorder info.
+ * @see android.media.MediaRecorder.OnInfoListener
+ */
+ public static final int MEDIA_RECORDER_INFO_UNKNOWN = 1;
+ /** A maximum duration had been setup and has now been reached.
+ * @see android.media.MediaRecorder.OnInfoListener
+ */
+ public static final int MEDIA_RECORDER_INFO_MAX_DURATION_REACHED = 800;
+ /** A maximum filesize had been setup and has now been reached.
+ * Note: This event will not be sent if application already set
+ * next output file through {@link #setNextOutputFile}.
+ * @see android.media.MediaRecorder.OnInfoListener
+ */
+ public static final int MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED = 801;
+ /** A maximum filesize had been setup and current recorded file size
+ * has reached 90% of the limit. This is sent once per file upon
+ * reaching/passing the 90% limit. To continue the recording, applicaiton
+ * should use {@link #setNextOutputFile} to set the next output file.
+ * Otherwise, recording will stop when reaching maximum file size.
+ * @see android.media.MediaRecorder.OnInfoListener
+ */
+ public static final int MEDIA_RECORDER_INFO_MAX_FILESIZE_APPROACHING = 802;
+ /** A maximum filesize had been reached and MediaRecorder has switched
+ * output to a new file set by application {@link #setNextOutputFile}.
+ * For best practice, application should use this event to keep track
+ * of whether the file previously set has been used or not.
+ * @see android.media.MediaRecorder.OnInfoListener
+ */
+ public static final int MEDIA_RECORDER_INFO_NEXT_OUTPUT_FILE_STARTED = 803;
+
+ /** informational events for individual tracks, for testing purpose.
+ * The track informational event usually contains two parts in the ext1
+ * arg of the onInfo() callback: bit 31-28 contains the track id; and
+ * the rest of the 28 bits contains the informational event defined here.
+ * For example, ext1 = (1 << 28 | MEDIA_RECORDER_TRACK_INFO_TYPE) if the
+ * track id is 1 for informational event MEDIA_RECORDER_TRACK_INFO_TYPE;
+ * while ext1 = (0 << 28 | MEDIA_RECORDER_TRACK_INFO_TYPE) if the track
+ * id is 0 for informational event MEDIA_RECORDER_TRACK_INFO_TYPE. The
+ * application should extract the track id and the type of informational
+ * event from ext1, accordingly.
+ *
+ * FIXME:
+ * Please update the comment for onInfo also when these
+ * events are unhidden so that application knows how to extract the track
+ * id and the informational event type from onInfo callback.
+ *
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_LIST_START = 1000;
+ /** Signal the completion of the track for the recording session.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_COMPLETION_STATUS = 1000;
+ /** Indicate the recording progress in time (ms) during recording.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_PROGRESS_IN_TIME = 1001;
+ /** Indicate the track type: 0 for Audio and 1 for Video.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_TYPE = 1002;
+ /** Provide the track duration information.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_DURATION_MS = 1003;
+ /** Provide the max chunk duration in time (ms) for the given track.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_MAX_CHUNK_DUR_MS = 1004;
+ /** Provide the total number of recordd frames.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_ENCODED_FRAMES = 1005;
+ /** Provide the max spacing between neighboring chunks for the given track.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INTER_CHUNK_TIME_MS = 1006;
+ /** Provide the elapsed time measuring from the start of the recording
+ * till the first output frame of the given track is received, excluding
+ * any intentional start time offset of a recording session for the
+ * purpose of eliminating the recording sound in the recorded file.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_INITIAL_DELAY_MS = 1007;
+ /** Provide the start time difference (delay) betweeen this track and
+ * the start of the movie.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_START_OFFSET_MS = 1008;
+ /** Provide the total number of data (in kilo-bytes) encoded.
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_DATA_KBYTES = 1009;
+ /**
+ * {@hide}
+ */
+ public static final int MEDIA_RECORDER_TRACK_INFO_LIST_END = 2000;
+
+
+ /**
+ * Interface definition of a callback to be invoked to communicate some
+ * info and/or warning about the recording.
+ */
+ public interface OnInfoListener
+ {
+ /**
+ * Called to indicate an info or a warning during recording.
+ *
+ * @param mr the MediaRecorder the info pertains to
+ * @param what the type of info or warning that has occurred
+ * <ul>
+ * <li>{@link #MEDIA_RECORDER_INFO_UNKNOWN}
+ * <li>{@link #MEDIA_RECORDER_INFO_MAX_DURATION_REACHED}
+ * <li>{@link #MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED}
+ * </ul>
+ * @param extra an extra code, specific to the info type
+ */
+ void onInfo(MediaRecorder mr, int what, int extra);
+ }
+
+ /**
+ * Register a callback to be invoked when an informational event occurs while
+ * recording.
+ *
+ * @param listener the callback that will be run
+ */
+ public void setOnInfoListener(OnInfoListener listener)
+ {
+ mOnInfoListener = listener;
+ }
+
+ private class EventHandler extends Handler
+ {
+ private MediaRecorder mMediaRecorder;
+
+ public EventHandler(MediaRecorder mr, Looper looper) {
+ super(looper);
+ mMediaRecorder = mr;
+ }
+
+ /* Do not change these values without updating their counterparts
+ * in include/media/mediarecorder.h!
+ */
+ private static final int MEDIA_RECORDER_EVENT_LIST_START = 1;
+ private static final int MEDIA_RECORDER_EVENT_ERROR = 1;
+ private static final int MEDIA_RECORDER_EVENT_INFO = 2;
+ private static final int MEDIA_RECORDER_EVENT_LIST_END = 99;
+
+ /* Events related to individual tracks */
+ private static final int MEDIA_RECORDER_TRACK_EVENT_LIST_START = 100;
+ private static final int MEDIA_RECORDER_TRACK_EVENT_ERROR = 100;
+ private static final int MEDIA_RECORDER_TRACK_EVENT_INFO = 101;
+ private static final int MEDIA_RECORDER_TRACK_EVENT_LIST_END = 1000;
+
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (mMediaRecorder.mNativeContext == 0) {
+ Log.w(TAG, "mediarecorder went away with unhandled events");
+ return;
+ }
+ switch(msg.what) {
+ case MEDIA_RECORDER_EVENT_ERROR:
+ case MEDIA_RECORDER_TRACK_EVENT_ERROR:
+ if (mOnErrorListener != null)
+ mOnErrorListener.onError(mMediaRecorder, msg.arg1, msg.arg2);
+
+ return;
+
+ case MEDIA_RECORDER_EVENT_INFO:
+ case MEDIA_RECORDER_TRACK_EVENT_INFO:
+ if (mOnInfoListener != null)
+ mOnInfoListener.onInfo(mMediaRecorder, msg.arg1, msg.arg2);
+
+ return;
+
+ default:
+ Log.e(TAG, "Unknown message type " + msg.what);
+ return;
+ }
+ }
+ }
+
+ /**
+ * Called from native code when an interesting event happens. This method
+ * just uses the EventHandler system to post the event back to the main app thread.
+ * We use a weak reference to the original MediaRecorder object so that the native
+ * code is safe from the object disappearing from underneath it. (This is
+ * the cookie passed to native_setup().)
+ */
+ private static void postEventFromNative(Object mediarecorder_ref,
+ int what, int arg1, int arg2, Object obj)
+ {
+ MediaRecorder mr = (MediaRecorder)((WeakReference)mediarecorder_ref).get();
+ if (mr == null) {
+ return;
+ }
+
+ if (mr.mEventHandler != null) {
+ Message m = mr.mEventHandler.obtainMessage(what, arg1, arg2, obj);
+ mr.mEventHandler.sendMessage(m);
+ }
+ }
+
+ /**
+ * Releases resources associated with this MediaRecorder object.
+ * It is good practice to call this method when you're done
+ * using the MediaRecorder. In particular, whenever an Activity
+ * of an application is paused (its onPause() method is called),
+ * or stopped (its onStop() method is called), this method should be
+ * invoked to release the MediaRecorder object, unless the application
+ * has a special need to keep the object around. In addition to
+ * unnecessary resources (such as memory and instances of codecs)
+ * being held, failure to call this method immediately if a
+ * MediaRecorder object is no longer needed may also lead to
+ * continuous battery consumption for mobile devices, and recording
+ * failure for other applications if no multiple instances of the
+ * same codec are supported on a device. Even if multiple instances
+ * of the same codec are supported, some performance degradation
+ * may be expected when unnecessary multiple instances are used
+ * at the same time.
+ */
+ public native void release();
+
+ private static native final void native_init();
+
+ private native final void native_setup(Object mediarecorder_this,
+ String clientName, String opPackageName) throws IllegalStateException;
+
+ private native final void native_finalize();
+
+ private native void setParameter(String nameValuePair);
+
+ /**
+ * Return Metrics data about the current Mediarecorder instance.
+ *
+ * @return a {@link PersistableBundle} containing the set of attributes and values
+ * available for the media being generated by this instance of
+ * MediaRecorder.
+ * The attributes are descibed in {@link MetricsConstants}.
+ *
+ * Additional vendor-specific fields may also be present in
+ * the return value.
+ */
+ public PersistableBundle getMetrics() {
+ PersistableBundle bundle = native_getMetrics();
+ return bundle;
+ }
+
+ private native PersistableBundle native_getMetrics();
+
+ @Override
+ protected void finalize() { native_finalize(); }
+
+ public final static class MetricsConstants
+ {
+ private MetricsConstants() {}
+
+ /**
+ * Key to extract the audio bitrate
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String AUDIO_BITRATE = "android.media.mediarecorder.audio-bitrate";
+
+ /**
+ * Key to extract the number of audio channels
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String AUDIO_CHANNELS = "android.media.mediarecorder.audio-channels";
+
+ /**
+ * Key to extract the audio samplerate
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String AUDIO_SAMPLERATE = "android.media.mediarecorder.audio-samplerate";
+
+ /**
+ * Key to extract the audio timescale
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String AUDIO_TIMESCALE = "android.media.mediarecorder.audio-timescale";
+
+ /**
+ * Key to extract the video capture frame rate
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is a double.
+ */
+ public static final String CAPTURE_FPS = "android.media.mediarecorder.capture-fps";
+
+ /**
+ * Key to extract the video capture framerate enable value
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String CAPTURE_FPS_ENABLE = "android.media.mediarecorder.capture-fpsenable";
+
+ /**
+ * Key to extract the intended playback frame rate
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String FRAMERATE = "android.media.mediarecorder.frame-rate";
+
+ /**
+ * Key to extract the height (in pixels) of the captured video
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String HEIGHT = "android.media.mediarecorder.height";
+
+ /**
+ * Key to extract the recorded movies time units
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ * A value of 1000 indicates that the movie's timing is in milliseconds.
+ */
+ public static final String MOVIE_TIMESCALE = "android.media.mediarecorder.movie-timescale";
+
+ /**
+ * Key to extract the rotation (in degrees) to properly orient the video
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String ROTATION = "android.media.mediarecorder.rotation";
+
+ /**
+ * Key to extract the video bitrate from being used
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String VIDEO_BITRATE = "android.media.mediarecorder.video-bitrate";
+
+ /**
+ * Key to extract the value for how often video iframes are generated
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String VIDEO_IFRAME_INTERVAL = "android.media.mediarecorder.video-iframe-interval";
+
+ /**
+ * Key to extract the video encoding level
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String VIDEO_LEVEL = "android.media.mediarecorder.video-encoder-level";
+
+ /**
+ * Key to extract the video encoding profile
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String VIDEO_PROFILE = "android.media.mediarecorder.video-encoder-profile";
+
+ /**
+ * Key to extract the recorded video time units
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ * A value of 1000 indicates that the video's timing is in milliseconds.
+ */
+ public static final String VIDEO_TIMESCALE = "android.media.mediarecorder.video-timescale";
+
+ /**
+ * Key to extract the width (in pixels) of the captured video
+ * from the {@link MediaRecorder#getMetrics} return.
+ * The value is an integer.
+ */
+ public static final String WIDTH = "android.media.mediarecorder.width";
+
+ }
+}
+
diff --git a/android/media/MediaRouter.java b/android/media/MediaRouter.java
new file mode 100644
index 00000000..2894e895
--- /dev/null
+++ b/android/media/MediaRouter.java
@@ -0,0 +1,3016 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.Manifest;
+import android.annotation.DrawableRes;
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemService;
+import android.app.ActivityThread;
+import android.content.BroadcastReceiver;
+import android.content.Context;
+import android.content.Intent;
+import android.content.IntentFilter;
+import android.content.pm.PackageManager;
+import android.content.res.Resources;
+import android.graphics.drawable.Drawable;
+import android.hardware.display.DisplayManager;
+import android.hardware.display.WifiDisplay;
+import android.hardware.display.WifiDisplayStatus;
+import android.media.session.MediaSession;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Process;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.os.UserHandle;
+import android.text.TextUtils;
+import android.util.Log;
+import android.view.Display;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Objects;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+/**
+ * MediaRouter allows applications to control the routing of media channels
+ * and streams from the current device to external speakers and destination devices.
+ *
+ * <p>A MediaRouter is retrieved through {@link Context#getSystemService(String)
+ * Context.getSystemService()} of a {@link Context#MEDIA_ROUTER_SERVICE
+ * Context.MEDIA_ROUTER_SERVICE}.
+ *
+ * <p>The media router API is not thread-safe; all interactions with it must be
+ * done from the main thread of the process.</p>
+ */
+@SystemService(Context.MEDIA_ROUTER_SERVICE)
+public class MediaRouter {
+ private static final String TAG = "MediaRouter";
+ private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+
+ static class Static implements DisplayManager.DisplayListener {
+ final String mPackageName;
+ final Resources mResources;
+ final IAudioService mAudioService;
+ final DisplayManager mDisplayService;
+ final IMediaRouterService mMediaRouterService;
+ final Handler mHandler;
+ final CopyOnWriteArrayList<CallbackInfo> mCallbacks =
+ new CopyOnWriteArrayList<CallbackInfo>();
+
+ final ArrayList<RouteInfo> mRoutes = new ArrayList<RouteInfo>();
+ final ArrayList<RouteCategory> mCategories = new ArrayList<RouteCategory>();
+
+ final RouteCategory mSystemCategory;
+
+ final AudioRoutesInfo mCurAudioRoutesInfo = new AudioRoutesInfo();
+
+ RouteInfo mDefaultAudioVideo;
+ RouteInfo mBluetoothA2dpRoute;
+
+ RouteInfo mSelectedRoute;
+
+ final boolean mCanConfigureWifiDisplays;
+ boolean mActivelyScanningWifiDisplays;
+ String mPreviousActiveWifiDisplayAddress;
+
+ int mDiscoveryRequestRouteTypes;
+ boolean mDiscoverRequestActiveScan;
+
+ int mCurrentUserId = -1;
+ IMediaRouterClient mClient;
+ MediaRouterClientState mClientState;
+
+ final IAudioRoutesObserver.Stub mAudioRoutesObserver = new IAudioRoutesObserver.Stub() {
+ @Override
+ public void dispatchAudioRoutesChanged(final AudioRoutesInfo newRoutes) {
+ mHandler.post(new Runnable() {
+ @Override public void run() {
+ updateAudioRoutes(newRoutes);
+ }
+ });
+ }
+ };
+
+ Static(Context appContext) {
+ mPackageName = appContext.getPackageName();
+ mResources = appContext.getResources();
+ mHandler = new Handler(appContext.getMainLooper());
+
+ IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE);
+ mAudioService = IAudioService.Stub.asInterface(b);
+
+ mDisplayService = (DisplayManager) appContext.getSystemService(Context.DISPLAY_SERVICE);
+
+ mMediaRouterService = IMediaRouterService.Stub.asInterface(
+ ServiceManager.getService(Context.MEDIA_ROUTER_SERVICE));
+
+ mSystemCategory = new RouteCategory(
+ com.android.internal.R.string.default_audio_route_category_name,
+ ROUTE_TYPE_LIVE_AUDIO | ROUTE_TYPE_LIVE_VIDEO, false);
+ mSystemCategory.mIsSystem = true;
+
+ // Only the system can configure wifi displays. The display manager
+ // enforces this with a permission check. Set a flag here so that we
+ // know whether this process is actually allowed to scan and connect.
+ mCanConfigureWifiDisplays = appContext.checkPermission(
+ Manifest.permission.CONFIGURE_WIFI_DISPLAY,
+ Process.myPid(), Process.myUid()) == PackageManager.PERMISSION_GRANTED;
+ }
+
+ // Called after sStatic is initialized
+ void startMonitoringRoutes(Context appContext) {
+ mDefaultAudioVideo = new RouteInfo(mSystemCategory);
+ mDefaultAudioVideo.mNameResId = com.android.internal.R.string.default_audio_route_name;
+ mDefaultAudioVideo.mSupportedTypes = ROUTE_TYPE_LIVE_AUDIO | ROUTE_TYPE_LIVE_VIDEO;
+ mDefaultAudioVideo.updatePresentationDisplay();
+ if (((AudioManager) appContext.getSystemService(Context.AUDIO_SERVICE))
+ .isVolumeFixed()) {
+ mDefaultAudioVideo.mVolumeHandling = RouteInfo.PLAYBACK_VOLUME_FIXED;
+ }
+
+ addRouteStatic(mDefaultAudioVideo);
+
+ // This will select the active wifi display route if there is one.
+ updateWifiDisplayStatus(mDisplayService.getWifiDisplayStatus());
+
+ appContext.registerReceiver(new WifiDisplayStatusChangedReceiver(),
+ new IntentFilter(DisplayManager.ACTION_WIFI_DISPLAY_STATUS_CHANGED));
+ appContext.registerReceiver(new VolumeChangeReceiver(),
+ new IntentFilter(AudioManager.VOLUME_CHANGED_ACTION));
+
+ mDisplayService.registerDisplayListener(this, mHandler);
+
+ AudioRoutesInfo newAudioRoutes = null;
+ try {
+ newAudioRoutes = mAudioService.startWatchingRoutes(mAudioRoutesObserver);
+ } catch (RemoteException e) {
+ }
+ if (newAudioRoutes != null) {
+ // This will select the active BT route if there is one and the current
+ // selected route is the default system route, or if there is no selected
+ // route yet.
+ updateAudioRoutes(newAudioRoutes);
+ }
+
+ // Bind to the media router service.
+ rebindAsUser(UserHandle.myUserId());
+
+ // Select the default route if the above didn't sync us up
+ // appropriately with relevant system state.
+ if (mSelectedRoute == null) {
+ selectDefaultRouteStatic();
+ }
+ }
+
+ void updateAudioRoutes(AudioRoutesInfo newRoutes) {
+ boolean audioRoutesChanged = false;
+ if (newRoutes.mainType != mCurAudioRoutesInfo.mainType) {
+ mCurAudioRoutesInfo.mainType = newRoutes.mainType;
+ int name;
+ if ((newRoutes.mainType&AudioRoutesInfo.MAIN_HEADPHONES) != 0
+ || (newRoutes.mainType&AudioRoutesInfo.MAIN_HEADSET) != 0) {
+ name = com.android.internal.R.string.default_audio_route_name_headphones;
+ } else if ((newRoutes.mainType&AudioRoutesInfo.MAIN_DOCK_SPEAKERS) != 0) {
+ name = com.android.internal.R.string.default_audio_route_name_dock_speakers;
+ } else if ((newRoutes.mainType&AudioRoutesInfo.MAIN_HDMI) != 0) {
+ name = com.android.internal.R.string.default_media_route_name_hdmi;
+ } else {
+ name = com.android.internal.R.string.default_audio_route_name;
+ }
+ mDefaultAudioVideo.mNameResId = name;
+ dispatchRouteChanged(mDefaultAudioVideo);
+ audioRoutesChanged = true;
+ }
+
+ final int mainType = mCurAudioRoutesInfo.mainType;
+ if (!TextUtils.equals(newRoutes.bluetoothName, mCurAudioRoutesInfo.bluetoothName)) {
+ mCurAudioRoutesInfo.bluetoothName = newRoutes.bluetoothName;
+ if (mCurAudioRoutesInfo.bluetoothName != null) {
+ if (mBluetoothA2dpRoute == null) {
+ // BT connected
+ final RouteInfo info = new RouteInfo(mSystemCategory);
+ info.mName = mCurAudioRoutesInfo.bluetoothName;
+ info.mDescription = mResources.getText(
+ com.android.internal.R.string.bluetooth_a2dp_audio_route_name);
+ info.mSupportedTypes = ROUTE_TYPE_LIVE_AUDIO;
+ info.mDeviceType = RouteInfo.DEVICE_TYPE_BLUETOOTH;
+ mBluetoothA2dpRoute = info;
+ addRouteStatic(mBluetoothA2dpRoute);
+ } else {
+ mBluetoothA2dpRoute.mName = mCurAudioRoutesInfo.bluetoothName;
+ dispatchRouteChanged(mBluetoothA2dpRoute);
+ }
+ } else if (mBluetoothA2dpRoute != null) {
+ // BT disconnected
+ removeRouteStatic(mBluetoothA2dpRoute);
+ mBluetoothA2dpRoute = null;
+ }
+ audioRoutesChanged = true;
+ }
+
+ if (audioRoutesChanged) {
+ selectRouteStatic(ROUTE_TYPE_LIVE_AUDIO, getDefaultSystemAudioRoute(), false);
+ Log.v(TAG, "Audio routes updated: " + newRoutes + ", a2dp=" + isBluetoothA2dpOn());
+ }
+ }
+
+ RouteInfo getDefaultSystemAudioRoute() {
+ boolean globalBluetoothA2doOn = false;
+ try {
+ globalBluetoothA2doOn = mMediaRouterService.isGlobalBluetoothA2doOn();
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to call isSystemBluetoothA2doOn.", ex);
+ }
+ return (globalBluetoothA2doOn && mBluetoothA2dpRoute != null)
+ ? mBluetoothA2dpRoute : mDefaultAudioVideo;
+ }
+
+ RouteInfo getCurrentSystemAudioRoute() {
+ return (isBluetoothA2dpOn() && mBluetoothA2dpRoute != null)
+ ? mBluetoothA2dpRoute : mDefaultAudioVideo;
+ }
+
+ boolean isBluetoothA2dpOn() {
+ try {
+ return mAudioService.isBluetoothA2dpOn();
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error querying Bluetooth A2DP state", e);
+ return false;
+ }
+ }
+
+ void updateDiscoveryRequest() {
+ // What are we looking for today?
+ int routeTypes = 0;
+ int passiveRouteTypes = 0;
+ boolean activeScan = false;
+ boolean activeScanWifiDisplay = false;
+ final int count = mCallbacks.size();
+ for (int i = 0; i < count; i++) {
+ CallbackInfo cbi = mCallbacks.get(i);
+ if ((cbi.flags & (CALLBACK_FLAG_PERFORM_ACTIVE_SCAN
+ | CALLBACK_FLAG_REQUEST_DISCOVERY)) != 0) {
+ // Discovery explicitly requested.
+ routeTypes |= cbi.type;
+ } else if ((cbi.flags & CALLBACK_FLAG_PASSIVE_DISCOVERY) != 0) {
+ // Discovery only passively requested.
+ passiveRouteTypes |= cbi.type;
+ } else {
+ // Legacy case since applications don't specify the discovery flag.
+ // Unfortunately we just have to assume they always need discovery
+ // whenever they have a callback registered.
+ routeTypes |= cbi.type;
+ }
+ if ((cbi.flags & CALLBACK_FLAG_PERFORM_ACTIVE_SCAN) != 0) {
+ activeScan = true;
+ if ((cbi.type & ROUTE_TYPE_REMOTE_DISPLAY) != 0) {
+ activeScanWifiDisplay = true;
+ }
+ }
+ }
+ if (routeTypes != 0 || activeScan) {
+ // If someone else requests discovery then enable the passive listeners.
+ // This is used by the MediaRouteButton and MediaRouteActionProvider since
+ // they don't receive lifecycle callbacks from the Activity.
+ routeTypes |= passiveRouteTypes;
+ }
+
+ // Update wifi display scanning.
+ // TODO: All of this should be managed by the media router service.
+ if (mCanConfigureWifiDisplays) {
+ if (mSelectedRoute != null
+ && mSelectedRoute.matchesTypes(ROUTE_TYPE_REMOTE_DISPLAY)) {
+ // Don't scan while already connected to a remote display since
+ // it may interfere with the ongoing transmission.
+ activeScanWifiDisplay = false;
+ }
+ if (activeScanWifiDisplay) {
+ if (!mActivelyScanningWifiDisplays) {
+ mActivelyScanningWifiDisplays = true;
+ mDisplayService.startWifiDisplayScan();
+ }
+ } else {
+ if (mActivelyScanningWifiDisplays) {
+ mActivelyScanningWifiDisplays = false;
+ mDisplayService.stopWifiDisplayScan();
+ }
+ }
+ }
+
+ // Tell the media router service all about it.
+ if (routeTypes != mDiscoveryRequestRouteTypes
+ || activeScan != mDiscoverRequestActiveScan) {
+ mDiscoveryRequestRouteTypes = routeTypes;
+ mDiscoverRequestActiveScan = activeScan;
+ publishClientDiscoveryRequest();
+ }
+ }
+
+ @Override
+ public void onDisplayAdded(int displayId) {
+ updatePresentationDisplays(displayId);
+ }
+
+ @Override
+ public void onDisplayChanged(int displayId) {
+ updatePresentationDisplays(displayId);
+ }
+
+ @Override
+ public void onDisplayRemoved(int displayId) {
+ updatePresentationDisplays(displayId);
+ }
+
+ public Display[] getAllPresentationDisplays() {
+ return mDisplayService.getDisplays(DisplayManager.DISPLAY_CATEGORY_PRESENTATION);
+ }
+
+ private void updatePresentationDisplays(int changedDisplayId) {
+ final int count = mRoutes.size();
+ for (int i = 0; i < count; i++) {
+ final RouteInfo route = mRoutes.get(i);
+ if (route.updatePresentationDisplay() || (route.mPresentationDisplay != null
+ && route.mPresentationDisplay.getDisplayId() == changedDisplayId)) {
+ dispatchRoutePresentationDisplayChanged(route);
+ }
+ }
+ }
+
+ void setSelectedRoute(RouteInfo info, boolean explicit) {
+ // Must be non-reentrant.
+ mSelectedRoute = info;
+ publishClientSelectedRoute(explicit);
+ }
+
+ void rebindAsUser(int userId) {
+ if (mCurrentUserId != userId || userId < 0 || mClient == null) {
+ if (mClient != null) {
+ try {
+ mMediaRouterService.unregisterClient(mClient);
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to unregister media router client.", ex);
+ }
+ mClient = null;
+ }
+
+ mCurrentUserId = userId;
+
+ try {
+ Client client = new Client();
+ mMediaRouterService.registerClientAsUser(client, mPackageName, userId);
+ mClient = client;
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to register media router client.", ex);
+ }
+
+ publishClientDiscoveryRequest();
+ publishClientSelectedRoute(false);
+ updateClientState();
+ }
+ }
+
+ void publishClientDiscoveryRequest() {
+ if (mClient != null) {
+ try {
+ mMediaRouterService.setDiscoveryRequest(mClient,
+ mDiscoveryRequestRouteTypes, mDiscoverRequestActiveScan);
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to publish media router client discovery request.", ex);
+ }
+ }
+ }
+
+ void publishClientSelectedRoute(boolean explicit) {
+ if (mClient != null) {
+ try {
+ mMediaRouterService.setSelectedRoute(mClient,
+ mSelectedRoute != null ? mSelectedRoute.mGlobalRouteId : null,
+ explicit);
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to publish media router client selected route.", ex);
+ }
+ }
+ }
+
+ void updateClientState() {
+ // Update the client state.
+ mClientState = null;
+ if (mClient != null) {
+ try {
+ mClientState = mMediaRouterService.getState(mClient);
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to retrieve media router client state.", ex);
+ }
+ }
+ final ArrayList<MediaRouterClientState.RouteInfo> globalRoutes =
+ mClientState != null ? mClientState.routes : null;
+
+ // Add or update routes.
+ final int globalRouteCount = globalRoutes != null ? globalRoutes.size() : 0;
+ for (int i = 0; i < globalRouteCount; i++) {
+ final MediaRouterClientState.RouteInfo globalRoute = globalRoutes.get(i);
+ RouteInfo route = findGlobalRoute(globalRoute.id);
+ if (route == null) {
+ route = makeGlobalRoute(globalRoute);
+ addRouteStatic(route);
+ } else {
+ updateGlobalRoute(route, globalRoute);
+ }
+ }
+
+ // Remove defunct routes.
+ outer: for (int i = mRoutes.size(); i-- > 0; ) {
+ final RouteInfo route = mRoutes.get(i);
+ final String globalRouteId = route.mGlobalRouteId;
+ if (globalRouteId != null) {
+ for (int j = 0; j < globalRouteCount; j++) {
+ MediaRouterClientState.RouteInfo globalRoute = globalRoutes.get(j);
+ if (globalRouteId.equals(globalRoute.id)) {
+ continue outer; // found
+ }
+ }
+ // not found
+ removeRouteStatic(route);
+ }
+ }
+ }
+
+ void requestSetVolume(RouteInfo route, int volume) {
+ if (route.mGlobalRouteId != null && mClient != null) {
+ try {
+ mMediaRouterService.requestSetVolume(mClient,
+ route.mGlobalRouteId, volume);
+ } catch (RemoteException ex) {
+ Log.w(TAG, "Unable to request volume change.", ex);
+ }
+ }
+ }
+
+ void requestUpdateVolume(RouteInfo route, int direction) {
+ if (route.mGlobalRouteId != null && mClient != null) {
+ try {
+ mMediaRouterService.requestUpdateVolume(mClient,
+ route.mGlobalRouteId, direction);
+ } catch (RemoteException ex) {
+ Log.w(TAG, "Unable to request volume change.", ex);
+ }
+ }
+ }
+
+ RouteInfo makeGlobalRoute(MediaRouterClientState.RouteInfo globalRoute) {
+ RouteInfo route = new RouteInfo(mSystemCategory);
+ route.mGlobalRouteId = globalRoute.id;
+ route.mName = globalRoute.name;
+ route.mDescription = globalRoute.description;
+ route.mSupportedTypes = globalRoute.supportedTypes;
+ route.mDeviceType = globalRoute.deviceType;
+ route.mEnabled = globalRoute.enabled;
+ route.setRealStatusCode(globalRoute.statusCode);
+ route.mPlaybackType = globalRoute.playbackType;
+ route.mPlaybackStream = globalRoute.playbackStream;
+ route.mVolume = globalRoute.volume;
+ route.mVolumeMax = globalRoute.volumeMax;
+ route.mVolumeHandling = globalRoute.volumeHandling;
+ route.mPresentationDisplayId = globalRoute.presentationDisplayId;
+ route.updatePresentationDisplay();
+ return route;
+ }
+
+ void updateGlobalRoute(RouteInfo route, MediaRouterClientState.RouteInfo globalRoute) {
+ boolean changed = false;
+ boolean volumeChanged = false;
+ boolean presentationDisplayChanged = false;
+
+ if (!Objects.equals(route.mName, globalRoute.name)) {
+ route.mName = globalRoute.name;
+ changed = true;
+ }
+ if (!Objects.equals(route.mDescription, globalRoute.description)) {
+ route.mDescription = globalRoute.description;
+ changed = true;
+ }
+ final int oldSupportedTypes = route.mSupportedTypes;
+ if (oldSupportedTypes != globalRoute.supportedTypes) {
+ route.mSupportedTypes = globalRoute.supportedTypes;
+ changed = true;
+ }
+ if (route.mEnabled != globalRoute.enabled) {
+ route.mEnabled = globalRoute.enabled;
+ changed = true;
+ }
+ if (route.mRealStatusCode != globalRoute.statusCode) {
+ route.setRealStatusCode(globalRoute.statusCode);
+ changed = true;
+ }
+ if (route.mPlaybackType != globalRoute.playbackType) {
+ route.mPlaybackType = globalRoute.playbackType;
+ changed = true;
+ }
+ if (route.mPlaybackStream != globalRoute.playbackStream) {
+ route.mPlaybackStream = globalRoute.playbackStream;
+ changed = true;
+ }
+ if (route.mVolume != globalRoute.volume) {
+ route.mVolume = globalRoute.volume;
+ changed = true;
+ volumeChanged = true;
+ }
+ if (route.mVolumeMax != globalRoute.volumeMax) {
+ route.mVolumeMax = globalRoute.volumeMax;
+ changed = true;
+ volumeChanged = true;
+ }
+ if (route.mVolumeHandling != globalRoute.volumeHandling) {
+ route.mVolumeHandling = globalRoute.volumeHandling;
+ changed = true;
+ volumeChanged = true;
+ }
+ if (route.mPresentationDisplayId != globalRoute.presentationDisplayId) {
+ route.mPresentationDisplayId = globalRoute.presentationDisplayId;
+ route.updatePresentationDisplay();
+ changed = true;
+ presentationDisplayChanged = true;
+ }
+
+ if (changed) {
+ dispatchRouteChanged(route, oldSupportedTypes);
+ }
+ if (volumeChanged) {
+ dispatchRouteVolumeChanged(route);
+ }
+ if (presentationDisplayChanged) {
+ dispatchRoutePresentationDisplayChanged(route);
+ }
+ }
+
+ RouteInfo findGlobalRoute(String globalRouteId) {
+ final int count = mRoutes.size();
+ for (int i = 0; i < count; i++) {
+ final RouteInfo route = mRoutes.get(i);
+ if (globalRouteId.equals(route.mGlobalRouteId)) {
+ return route;
+ }
+ }
+ return null;
+ }
+
+ boolean isPlaybackActive() {
+ if (mClient != null) {
+ try {
+ return mMediaRouterService.isPlaybackActive(mClient);
+ } catch (RemoteException ex) {
+ Log.e(TAG, "Unable to retrieve playback active state.", ex);
+ }
+ }
+ return false;
+ }
+
+ final class Client extends IMediaRouterClient.Stub {
+ @Override
+ public void onStateChanged() {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (Client.this == mClient) {
+ updateClientState();
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRestoreRoute() {
+ // Skip restoring route if the selected route is not a system audio route, or
+ // MediaRouter is initializing.
+ if ((mSelectedRoute != mDefaultAudioVideo && mSelectedRoute != mBluetoothA2dpRoute)
+ || mSelectedRoute == null) {
+ return;
+ }
+ mSelectedRoute.select();
+ }
+ }
+ }
+
+ static Static sStatic;
+
+ /**
+ * Route type flag for live audio.
+ *
+ * <p>A device that supports live audio routing will allow the media audio stream
+ * to be routed to supported destinations. This can include internal speakers or
+ * audio jacks on the device itself, A2DP devices, and more.</p>
+ *
+ * <p>Once initiated this routing is transparent to the application. All audio
+ * played on the media stream will be routed to the selected destination.</p>
+ */
+ public static final int ROUTE_TYPE_LIVE_AUDIO = 1 << 0;
+
+ /**
+ * Route type flag for live video.
+ *
+ * <p>A device that supports live video routing will allow a mirrored version
+ * of the device's primary display or a customized
+ * {@link android.app.Presentation Presentation} to be routed to supported destinations.</p>
+ *
+ * <p>Once initiated, display mirroring is transparent to the application.
+ * While remote routing is active the application may use a
+ * {@link android.app.Presentation Presentation} to replace the mirrored view
+ * on the external display with different content.</p>
+ *
+ * @see RouteInfo#getPresentationDisplay()
+ * @see android.app.Presentation
+ */
+ public static final int ROUTE_TYPE_LIVE_VIDEO = 1 << 1;
+
+ /**
+ * Temporary interop constant to identify remote displays.
+ * @hide To be removed when media router API is updated.
+ */
+ public static final int ROUTE_TYPE_REMOTE_DISPLAY = 1 << 2;
+
+ /**
+ * Route type flag for application-specific usage.
+ *
+ * <p>Unlike other media route types, user routes are managed by the application.
+ * The MediaRouter will manage and dispatch events for user routes, but the application
+ * is expected to interpret the meaning of these events and perform the requested
+ * routing tasks.</p>
+ */
+ public static final int ROUTE_TYPE_USER = 1 << 23;
+
+ static final int ROUTE_TYPE_ANY = ROUTE_TYPE_LIVE_AUDIO | ROUTE_TYPE_LIVE_VIDEO
+ | ROUTE_TYPE_REMOTE_DISPLAY | ROUTE_TYPE_USER;
+
+ /**
+ * Flag for {@link #addCallback}: Actively scan for routes while this callback
+ * is registered.
+ * <p>
+ * When this flag is specified, the media router will actively scan for new
+ * routes. Certain routes, such as wifi display routes, may not be discoverable
+ * except when actively scanning. This flag is typically used when the route picker
+ * dialog has been opened by the user to ensure that the route information is
+ * up to date.
+ * </p><p>
+ * Active scanning may consume a significant amount of power and may have intrusive
+ * effects on wireless connectivity. Therefore it is important that active scanning
+ * only be requested when it is actually needed to satisfy a user request to
+ * discover and select a new route.
+ * </p>
+ */
+ public static final int CALLBACK_FLAG_PERFORM_ACTIVE_SCAN = 1 << 0;
+
+ /**
+ * Flag for {@link #addCallback}: Do not filter route events.
+ * <p>
+ * When this flag is specified, the callback will be invoked for event that affect any
+ * route even if they do not match the callback's filter.
+ * </p>
+ */
+ public static final int CALLBACK_FLAG_UNFILTERED_EVENTS = 1 << 1;
+
+ /**
+ * Explicitly requests discovery.
+ *
+ * @hide Future API ported from support library. Revisit this later.
+ */
+ public static final int CALLBACK_FLAG_REQUEST_DISCOVERY = 1 << 2;
+
+ /**
+ * Requests that discovery be performed but only if there is some other active
+ * callback already registered.
+ *
+ * @hide Compatibility workaround for the fact that applications do not currently
+ * request discovery explicitly (except when using the support library API).
+ */
+ public static final int CALLBACK_FLAG_PASSIVE_DISCOVERY = 1 << 3;
+
+ /**
+ * Flag for {@link #isRouteAvailable}: Ignore the default route.
+ * <p>
+ * This flag is used to determine whether a matching non-default route is available.
+ * This constraint may be used to decide whether to offer the route chooser dialog
+ * to the user. There is no point offering the chooser if there are no
+ * non-default choices.
+ * </p>
+ *
+ * @hide Future API ported from support library. Revisit this later.
+ */
+ public static final int AVAILABILITY_FLAG_IGNORE_DEFAULT_ROUTE = 1 << 0;
+
+ // Maps application contexts
+ static final HashMap<Context, MediaRouter> sRouters = new HashMap<Context, MediaRouter>();
+
+ static String typesToString(int types) {
+ final StringBuilder result = new StringBuilder();
+ if ((types & ROUTE_TYPE_LIVE_AUDIO) != 0) {
+ result.append("ROUTE_TYPE_LIVE_AUDIO ");
+ }
+ if ((types & ROUTE_TYPE_LIVE_VIDEO) != 0) {
+ result.append("ROUTE_TYPE_LIVE_VIDEO ");
+ }
+ if ((types & ROUTE_TYPE_REMOTE_DISPLAY) != 0) {
+ result.append("ROUTE_TYPE_REMOTE_DISPLAY ");
+ }
+ if ((types & ROUTE_TYPE_USER) != 0) {
+ result.append("ROUTE_TYPE_USER ");
+ }
+ return result.toString();
+ }
+
+ /** @hide */
+ public MediaRouter(Context context) {
+ synchronized (Static.class) {
+ if (sStatic == null) {
+ final Context appContext = context.getApplicationContext();
+ sStatic = new Static(appContext);
+ sStatic.startMonitoringRoutes(appContext);
+ }
+ }
+ }
+
+ /**
+ * Gets the default route for playing media content on the system.
+ * <p>
+ * The system always provides a default route.
+ * </p>
+ *
+ * @return The default route, which is guaranteed to never be null.
+ */
+ public RouteInfo getDefaultRoute() {
+ return sStatic.mDefaultAudioVideo;
+ }
+
+ /**
+ * Returns a Bluetooth route if available, otherwise the default route.
+ * @hide
+ */
+ public RouteInfo getFallbackRoute() {
+ return (sStatic.mBluetoothA2dpRoute != null)
+ ? sStatic.mBluetoothA2dpRoute : sStatic.mDefaultAudioVideo;
+ }
+
+ /**
+ * @hide for use by framework routing UI
+ */
+ public RouteCategory getSystemCategory() {
+ return sStatic.mSystemCategory;
+ }
+
+ /** @hide */
+ public RouteInfo getSelectedRoute() {
+ return getSelectedRoute(ROUTE_TYPE_ANY);
+ }
+
+ /**
+ * Return the currently selected route for any of the given types
+ *
+ * @param type route types
+ * @return the selected route
+ */
+ public RouteInfo getSelectedRoute(int type) {
+ if (sStatic.mSelectedRoute != null &&
+ (sStatic.mSelectedRoute.mSupportedTypes & type) != 0) {
+ // If the selected route supports any of the types supplied, it's still considered
+ // 'selected' for that type.
+ return sStatic.mSelectedRoute;
+ } else if (type == ROUTE_TYPE_USER) {
+ // The caller specifically asked for a user route and the currently selected route
+ // doesn't qualify.
+ return null;
+ }
+ // If the above didn't match and we're not specifically asking for a user route,
+ // consider the default selected.
+ return sStatic.mDefaultAudioVideo;
+ }
+
+ /**
+ * Returns true if there is a route that matches the specified types.
+ * <p>
+ * This method returns true if there are any available routes that match the types
+ * regardless of whether they are enabled or disabled. If the
+ * {@link #AVAILABILITY_FLAG_IGNORE_DEFAULT_ROUTE} flag is specified, then
+ * the method will only consider non-default routes.
+ * </p>
+ *
+ * @param types The types to match.
+ * @param flags Flags to control the determination of whether a route may be available.
+ * May be zero or {@link #AVAILABILITY_FLAG_IGNORE_DEFAULT_ROUTE}.
+ * @return True if a matching route may be available.
+ *
+ * @hide Future API ported from support library. Revisit this later.
+ */
+ public boolean isRouteAvailable(int types, int flags) {
+ final int count = sStatic.mRoutes.size();
+ for (int i = 0; i < count; i++) {
+ RouteInfo route = sStatic.mRoutes.get(i);
+ if (route.matchesTypes(types)) {
+ if ((flags & AVAILABILITY_FLAG_IGNORE_DEFAULT_ROUTE) == 0
+ || route != sStatic.mDefaultAudioVideo) {
+ return true;
+ }
+ }
+ }
+
+ // It doesn't look like we can find a matching route right now.
+ return false;
+ }
+
+ /**
+ * Add a callback to listen to events about specific kinds of media routes.
+ * If the specified callback is already registered, its registration will be updated for any
+ * additional route types specified.
+ * <p>
+ * This is a convenience method that has the same effect as calling
+ * {@link #addCallback(int, Callback, int)} without flags.
+ * </p>
+ *
+ * @param types Types of routes this callback is interested in
+ * @param cb Callback to add
+ */
+ public void addCallback(int types, Callback cb) {
+ addCallback(types, cb, 0);
+ }
+
+ /**
+ * Add a callback to listen to events about specific kinds of media routes.
+ * If the specified callback is already registered, its registration will be updated for any
+ * additional route types specified.
+ * <p>
+ * By default, the callback will only be invoked for events that affect routes
+ * that match the specified selector. The filtering may be disabled by specifying
+ * the {@link #CALLBACK_FLAG_UNFILTERED_EVENTS} flag.
+ * </p>
+ *
+ * @param types Types of routes this callback is interested in
+ * @param cb Callback to add
+ * @param flags Flags to control the behavior of the callback.
+ * May be zero or a combination of {@link #CALLBACK_FLAG_PERFORM_ACTIVE_SCAN} and
+ * {@link #CALLBACK_FLAG_UNFILTERED_EVENTS}.
+ */
+ public void addCallback(int types, Callback cb, int flags) {
+ CallbackInfo info;
+ int index = findCallbackInfo(cb);
+ if (index >= 0) {
+ info = sStatic.mCallbacks.get(index);
+ info.type |= types;
+ info.flags |= flags;
+ } else {
+ info = new CallbackInfo(cb, types, flags, this);
+ sStatic.mCallbacks.add(info);
+ }
+ sStatic.updateDiscoveryRequest();
+ }
+
+ /**
+ * Remove the specified callback. It will no longer receive events about media routing.
+ *
+ * @param cb Callback to remove
+ */
+ public void removeCallback(Callback cb) {
+ int index = findCallbackInfo(cb);
+ if (index >= 0) {
+ sStatic.mCallbacks.remove(index);
+ sStatic.updateDiscoveryRequest();
+ } else {
+ Log.w(TAG, "removeCallback(" + cb + "): callback not registered");
+ }
+ }
+
+ private int findCallbackInfo(Callback cb) {
+ final int count = sStatic.mCallbacks.size();
+ for (int i = 0; i < count; i++) {
+ final CallbackInfo info = sStatic.mCallbacks.get(i);
+ if (info.cb == cb) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ /**
+ * Select the specified route to use for output of the given media types.
+ * <p class="note">
+ * As API version 18, this function may be used to select any route.
+ * In prior versions, this function could only be used to select user
+ * routes and would ignore any attempt to select a system route.
+ * </p>
+ *
+ * @param types type flags indicating which types this route should be used for.
+ * The route must support at least a subset.
+ * @param route Route to select
+ * @throws IllegalArgumentException if the given route is {@code null}
+ */
+ public void selectRoute(int types, @NonNull RouteInfo route) {
+ if (route == null) {
+ throw new IllegalArgumentException("Route cannot be null.");
+ }
+ selectRouteStatic(types, route, true);
+ }
+
+ /**
+ * @hide internal use
+ */
+ public void selectRouteInt(int types, RouteInfo route, boolean explicit) {
+ selectRouteStatic(types, route, explicit);
+ }
+
+ static void selectRouteStatic(int types, @NonNull RouteInfo route, boolean explicit) {
+ Log.v(TAG, "Selecting route: " + route);
+ assert(route != null);
+ final RouteInfo oldRoute = sStatic.mSelectedRoute;
+ boolean wasDefaultOrBluetoothRoute = (oldRoute == sStatic.mDefaultAudioVideo
+ || oldRoute == sStatic.mBluetoothA2dpRoute);
+ if (oldRoute == route
+ && (!wasDefaultOrBluetoothRoute || route == sStatic.getCurrentSystemAudioRoute())) {
+ return;
+ }
+ if (!route.matchesTypes(types)) {
+ Log.w(TAG, "selectRoute ignored; cannot select route with supported types " +
+ typesToString(route.getSupportedTypes()) + " into route types " +
+ typesToString(types));
+ return;
+ }
+
+ final RouteInfo btRoute = sStatic.mBluetoothA2dpRoute;
+ if (sStatic.isPlaybackActive() && btRoute != null && (types & ROUTE_TYPE_LIVE_AUDIO) != 0
+ && (route == btRoute || route == sStatic.mDefaultAudioVideo)) {
+ try {
+ sStatic.mAudioService.setBluetoothA2dpOn(route == btRoute);
+ // TODO: Remove the following logging when no longer needed.
+ if (route != btRoute) {
+ StackTraceElement[] callStack = Thread.currentThread().getStackTrace();
+ StringBuffer sb = new StringBuffer();
+ // callStack[3] is the caller of this method.
+ for (int i = 3; i < callStack.length; i++) {
+ StackTraceElement caller = callStack[i];
+ sb.append(caller.getClassName() + "." + caller.getMethodName()
+ + ":" + caller.getLineNumber()).append(" ");
+ }
+ Log.w(TAG, "Default route is selected while a BT route is available: pkgName="
+ + sStatic.mPackageName + ", callers=" + sb.toString());
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error changing Bluetooth A2DP state", e);
+ }
+ }
+
+ final WifiDisplay activeDisplay =
+ sStatic.mDisplayService.getWifiDisplayStatus().getActiveDisplay();
+ final boolean oldRouteHasAddress = oldRoute != null && oldRoute.mDeviceAddress != null;
+ final boolean newRouteHasAddress = route.mDeviceAddress != null;
+ if (activeDisplay != null || oldRouteHasAddress || newRouteHasAddress) {
+ if (newRouteHasAddress && !matchesDeviceAddress(activeDisplay, route)) {
+ if (sStatic.mCanConfigureWifiDisplays) {
+ sStatic.mDisplayService.connectWifiDisplay(route.mDeviceAddress);
+ } else {
+ Log.e(TAG, "Cannot connect to wifi displays because this process "
+ + "is not allowed to do so.");
+ }
+ } else if (activeDisplay != null && !newRouteHasAddress) {
+ sStatic.mDisplayService.disconnectWifiDisplay();
+ }
+ }
+
+ sStatic.setSelectedRoute(route, explicit);
+
+ if (oldRoute != null) {
+ dispatchRouteUnselected(types & oldRoute.getSupportedTypes(), oldRoute);
+ if (oldRoute.resolveStatusCode()) {
+ dispatchRouteChanged(oldRoute);
+ }
+ }
+ if (route != null) {
+ if (route.resolveStatusCode()) {
+ dispatchRouteChanged(route);
+ }
+ dispatchRouteSelected(types & route.getSupportedTypes(), route);
+ }
+
+ // The behavior of active scans may depend on the currently selected route.
+ sStatic.updateDiscoveryRequest();
+ }
+
+ static void selectDefaultRouteStatic() {
+ // TODO: Be smarter about the route types here; this selects for all valid.
+ if (sStatic.mSelectedRoute != sStatic.mBluetoothA2dpRoute
+ && sStatic.mBluetoothA2dpRoute != null && sStatic.isBluetoothA2dpOn()) {
+ selectRouteStatic(ROUTE_TYPE_ANY, sStatic.mBluetoothA2dpRoute, false);
+ } else {
+ selectRouteStatic(ROUTE_TYPE_ANY, sStatic.mDefaultAudioVideo, false);
+ }
+ }
+
+ /**
+ * Compare the device address of a display and a route.
+ * Nulls/no device address will match another null/no address.
+ */
+ static boolean matchesDeviceAddress(WifiDisplay display, RouteInfo info) {
+ final boolean routeHasAddress = info != null && info.mDeviceAddress != null;
+ if (display == null && !routeHasAddress) {
+ return true;
+ }
+
+ if (display != null && routeHasAddress) {
+ return display.getDeviceAddress().equals(info.mDeviceAddress);
+ }
+ return false;
+ }
+
+ /**
+ * Add an app-specified route for media to the MediaRouter.
+ * App-specified route definitions are created using {@link #createUserRoute(RouteCategory)}
+ *
+ * @param info Definition of the route to add
+ * @see #createUserRoute(RouteCategory)
+ * @see #removeUserRoute(UserRouteInfo)
+ */
+ public void addUserRoute(UserRouteInfo info) {
+ addRouteStatic(info);
+ }
+
+ /**
+ * @hide Framework use only
+ */
+ public void addRouteInt(RouteInfo info) {
+ addRouteStatic(info);
+ }
+
+ static void addRouteStatic(RouteInfo info) {
+ Log.v(TAG, "Adding route: " + info);
+ final RouteCategory cat = info.getCategory();
+ if (!sStatic.mCategories.contains(cat)) {
+ sStatic.mCategories.add(cat);
+ }
+ if (cat.isGroupable() && !(info instanceof RouteGroup)) {
+ // Enforce that any added route in a groupable category must be in a group.
+ final RouteGroup group = new RouteGroup(info.getCategory());
+ group.mSupportedTypes = info.mSupportedTypes;
+ sStatic.mRoutes.add(group);
+ dispatchRouteAdded(group);
+ group.addRoute(info);
+
+ info = group;
+ } else {
+ sStatic.mRoutes.add(info);
+ dispatchRouteAdded(info);
+ }
+ }
+
+ /**
+ * Remove an app-specified route for media from the MediaRouter.
+ *
+ * @param info Definition of the route to remove
+ * @see #addUserRoute(UserRouteInfo)
+ */
+ public void removeUserRoute(UserRouteInfo info) {
+ removeRouteStatic(info);
+ }
+
+ /**
+ * Remove all app-specified routes from the MediaRouter.
+ *
+ * @see #removeUserRoute(UserRouteInfo)
+ */
+ public void clearUserRoutes() {
+ for (int i = 0; i < sStatic.mRoutes.size(); i++) {
+ final RouteInfo info = sStatic.mRoutes.get(i);
+ // TODO Right now, RouteGroups only ever contain user routes.
+ // The code below will need to change if this assumption does.
+ if (info instanceof UserRouteInfo || info instanceof RouteGroup) {
+ removeRouteStatic(info);
+ i--;
+ }
+ }
+ }
+
+ /**
+ * @hide internal use only
+ */
+ public void removeRouteInt(RouteInfo info) {
+ removeRouteStatic(info);
+ }
+
+ static void removeRouteStatic(RouteInfo info) {
+ Log.v(TAG, "Removing route: " + info);
+ if (sStatic.mRoutes.remove(info)) {
+ final RouteCategory removingCat = info.getCategory();
+ final int count = sStatic.mRoutes.size();
+ boolean found = false;
+ for (int i = 0; i < count; i++) {
+ final RouteCategory cat = sStatic.mRoutes.get(i).getCategory();
+ if (removingCat == cat) {
+ found = true;
+ break;
+ }
+ }
+ if (info.isSelected()) {
+ // Removing the currently selected route? Select the default before we remove it.
+ selectDefaultRouteStatic();
+ }
+ if (!found) {
+ sStatic.mCategories.remove(removingCat);
+ }
+ dispatchRouteRemoved(info);
+ }
+ }
+
+ /**
+ * Return the number of {@link MediaRouter.RouteCategory categories} currently
+ * represented by routes known to this MediaRouter.
+ *
+ * @return the number of unique categories represented by this MediaRouter's known routes
+ */
+ public int getCategoryCount() {
+ return sStatic.mCategories.size();
+ }
+
+ /**
+ * Return the {@link MediaRouter.RouteCategory category} at the given index.
+ * Valid indices are in the range [0-getCategoryCount).
+ *
+ * @param index which category to return
+ * @return the category at index
+ */
+ public RouteCategory getCategoryAt(int index) {
+ return sStatic.mCategories.get(index);
+ }
+
+ /**
+ * Return the number of {@link MediaRouter.RouteInfo routes} currently known
+ * to this MediaRouter.
+ *
+ * @return the number of routes tracked by this router
+ */
+ public int getRouteCount() {
+ return sStatic.mRoutes.size();
+ }
+
+ /**
+ * Return the route at the specified index.
+ *
+ * @param index index of the route to return
+ * @return the route at index
+ */
+ public RouteInfo getRouteAt(int index) {
+ return sStatic.mRoutes.get(index);
+ }
+
+ static int getRouteCountStatic() {
+ return sStatic.mRoutes.size();
+ }
+
+ static RouteInfo getRouteAtStatic(int index) {
+ return sStatic.mRoutes.get(index);
+ }
+
+ /**
+ * Create a new user route that may be modified and registered for use by the application.
+ *
+ * @param category The category the new route will belong to
+ * @return A new UserRouteInfo for use by the application
+ *
+ * @see #addUserRoute(UserRouteInfo)
+ * @see #removeUserRoute(UserRouteInfo)
+ * @see #createRouteCategory(CharSequence, boolean)
+ */
+ public UserRouteInfo createUserRoute(RouteCategory category) {
+ return new UserRouteInfo(category);
+ }
+
+ /**
+ * Create a new route category. Each route must belong to a category.
+ *
+ * @param name Name of the new category
+ * @param isGroupable true if routes in this category may be grouped with one another
+ * @return the new RouteCategory
+ */
+ public RouteCategory createRouteCategory(CharSequence name, boolean isGroupable) {
+ return new RouteCategory(name, ROUTE_TYPE_USER, isGroupable);
+ }
+
+ /**
+ * Create a new route category. Each route must belong to a category.
+ *
+ * @param nameResId Resource ID of the name of the new category
+ * @param isGroupable true if routes in this category may be grouped with one another
+ * @return the new RouteCategory
+ */
+ public RouteCategory createRouteCategory(int nameResId, boolean isGroupable) {
+ return new RouteCategory(nameResId, ROUTE_TYPE_USER, isGroupable);
+ }
+
+ /**
+ * Rebinds the media router to handle routes that belong to the specified user.
+ * Requires the interact across users permission to access the routes of another user.
+ * <p>
+ * This method is a complete hack to work around the singleton nature of the
+ * media router when running inside of singleton processes like QuickSettings.
+ * This mechanism should be burned to the ground when MediaRouter is redesigned.
+ * Ideally the current user would be pulled from the Context but we need to break
+ * down MediaRouter.Static before we can get there.
+ * </p>
+ *
+ * @hide
+ */
+ public void rebindAsUser(int userId) {
+ sStatic.rebindAsUser(userId);
+ }
+
+ static void updateRoute(final RouteInfo info) {
+ dispatchRouteChanged(info);
+ }
+
+ static void dispatchRouteSelected(int type, RouteInfo info) {
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ if (cbi.filterRouteEvent(info)) {
+ cbi.cb.onRouteSelected(cbi.router, type, info);
+ }
+ }
+ }
+
+ static void dispatchRouteUnselected(int type, RouteInfo info) {
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ if (cbi.filterRouteEvent(info)) {
+ cbi.cb.onRouteUnselected(cbi.router, type, info);
+ }
+ }
+ }
+
+ static void dispatchRouteChanged(RouteInfo info) {
+ dispatchRouteChanged(info, info.mSupportedTypes);
+ }
+
+ static void dispatchRouteChanged(RouteInfo info, int oldSupportedTypes) {
+ if (DEBUG) {
+ Log.d(TAG, "Dispatching route change: " + info);
+ }
+ final int newSupportedTypes = info.mSupportedTypes;
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ // Reconstruct some of the history for callbacks that may not have observed
+ // all of the events needed to correctly interpret the current state.
+ // FIXME: This is a strong signal that we should deprecate route type filtering
+ // completely in the future because it can lead to inconsistencies in
+ // applications.
+ final boolean oldVisibility = cbi.filterRouteEvent(oldSupportedTypes);
+ final boolean newVisibility = cbi.filterRouteEvent(newSupportedTypes);
+ if (!oldVisibility && newVisibility) {
+ cbi.cb.onRouteAdded(cbi.router, info);
+ if (info.isSelected()) {
+ cbi.cb.onRouteSelected(cbi.router, newSupportedTypes, info);
+ }
+ }
+ if (oldVisibility || newVisibility) {
+ cbi.cb.onRouteChanged(cbi.router, info);
+ }
+ if (oldVisibility && !newVisibility) {
+ if (info.isSelected()) {
+ cbi.cb.onRouteUnselected(cbi.router, oldSupportedTypes, info);
+ }
+ cbi.cb.onRouteRemoved(cbi.router, info);
+ }
+ }
+ }
+
+ static void dispatchRouteAdded(RouteInfo info) {
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ if (cbi.filterRouteEvent(info)) {
+ cbi.cb.onRouteAdded(cbi.router, info);
+ }
+ }
+ }
+
+ static void dispatchRouteRemoved(RouteInfo info) {
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ if (cbi.filterRouteEvent(info)) {
+ cbi.cb.onRouteRemoved(cbi.router, info);
+ }
+ }
+ }
+
+ static void dispatchRouteGrouped(RouteInfo info, RouteGroup group, int index) {
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ if (cbi.filterRouteEvent(group)) {
+ cbi.cb.onRouteGrouped(cbi.router, info, group, index);
+ }
+ }
+ }
+
+ static void dispatchRouteUngrouped(RouteInfo info, RouteGroup group) {
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ if (cbi.filterRouteEvent(group)) {
+ cbi.cb.onRouteUngrouped(cbi.router, info, group);
+ }
+ }
+ }
+
+ static void dispatchRouteVolumeChanged(RouteInfo info) {
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ if (cbi.filterRouteEvent(info)) {
+ cbi.cb.onRouteVolumeChanged(cbi.router, info);
+ }
+ }
+ }
+
+ static void dispatchRoutePresentationDisplayChanged(RouteInfo info) {
+ for (CallbackInfo cbi : sStatic.mCallbacks) {
+ if (cbi.filterRouteEvent(info)) {
+ cbi.cb.onRoutePresentationDisplayChanged(cbi.router, info);
+ }
+ }
+ }
+
+ static void systemVolumeChanged(int newValue) {
+ final RouteInfo selectedRoute = sStatic.mSelectedRoute;
+ if (selectedRoute == null) return;
+
+ if (selectedRoute == sStatic.mBluetoothA2dpRoute ||
+ selectedRoute == sStatic.mDefaultAudioVideo) {
+ dispatchRouteVolumeChanged(selectedRoute);
+ } else if (sStatic.mBluetoothA2dpRoute != null) {
+ try {
+ dispatchRouteVolumeChanged(sStatic.mAudioService.isBluetoothA2dpOn() ?
+ sStatic.mBluetoothA2dpRoute : sStatic.mDefaultAudioVideo);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error checking Bluetooth A2DP state to report volume change", e);
+ }
+ } else {
+ dispatchRouteVolumeChanged(sStatic.mDefaultAudioVideo);
+ }
+ }
+
+ static void updateWifiDisplayStatus(WifiDisplayStatus status) {
+ WifiDisplay[] displays;
+ WifiDisplay activeDisplay;
+ if (status.getFeatureState() == WifiDisplayStatus.FEATURE_STATE_ON) {
+ displays = status.getDisplays();
+ activeDisplay = status.getActiveDisplay();
+
+ // Only the system is able to connect to wifi display routes.
+ // The display manager will enforce this with a permission check but it
+ // still publishes information about all available displays.
+ // Filter the list down to just the active display.
+ if (!sStatic.mCanConfigureWifiDisplays) {
+ if (activeDisplay != null) {
+ displays = new WifiDisplay[] { activeDisplay };
+ } else {
+ displays = WifiDisplay.EMPTY_ARRAY;
+ }
+ }
+ } else {
+ displays = WifiDisplay.EMPTY_ARRAY;
+ activeDisplay = null;
+ }
+ String activeDisplayAddress = activeDisplay != null ?
+ activeDisplay.getDeviceAddress() : null;
+
+ // Add or update routes.
+ for (int i = 0; i < displays.length; i++) {
+ final WifiDisplay d = displays[i];
+ if (shouldShowWifiDisplay(d, activeDisplay)) {
+ RouteInfo route = findWifiDisplayRoute(d);
+ if (route == null) {
+ route = makeWifiDisplayRoute(d, status);
+ addRouteStatic(route);
+ } else {
+ String address = d.getDeviceAddress();
+ boolean disconnected = !address.equals(activeDisplayAddress)
+ && address.equals(sStatic.mPreviousActiveWifiDisplayAddress);
+ updateWifiDisplayRoute(route, d, status, disconnected);
+ }
+ if (d.equals(activeDisplay)) {
+ selectRouteStatic(route.getSupportedTypes(), route, false);
+ }
+ }
+ }
+
+ // Remove stale routes.
+ for (int i = sStatic.mRoutes.size(); i-- > 0; ) {
+ RouteInfo route = sStatic.mRoutes.get(i);
+ if (route.mDeviceAddress != null) {
+ WifiDisplay d = findWifiDisplay(displays, route.mDeviceAddress);
+ if (d == null || !shouldShowWifiDisplay(d, activeDisplay)) {
+ removeRouteStatic(route);
+ }
+ }
+ }
+
+ // Remember the current active wifi display address so that we can infer disconnections.
+ // TODO: This hack will go away once all of this is moved into the media router service.
+ sStatic.mPreviousActiveWifiDisplayAddress = activeDisplayAddress;
+ }
+
+ private static boolean shouldShowWifiDisplay(WifiDisplay d, WifiDisplay activeDisplay) {
+ return d.isRemembered() || d.equals(activeDisplay);
+ }
+
+ static int getWifiDisplayStatusCode(WifiDisplay d, WifiDisplayStatus wfdStatus) {
+ int newStatus;
+ if (wfdStatus.getScanState() == WifiDisplayStatus.SCAN_STATE_SCANNING) {
+ newStatus = RouteInfo.STATUS_SCANNING;
+ } else if (d.isAvailable()) {
+ newStatus = d.canConnect() ?
+ RouteInfo.STATUS_AVAILABLE: RouteInfo.STATUS_IN_USE;
+ } else {
+ newStatus = RouteInfo.STATUS_NOT_AVAILABLE;
+ }
+
+ if (d.equals(wfdStatus.getActiveDisplay())) {
+ final int activeState = wfdStatus.getActiveDisplayState();
+ switch (activeState) {
+ case WifiDisplayStatus.DISPLAY_STATE_CONNECTED:
+ newStatus = RouteInfo.STATUS_CONNECTED;
+ break;
+ case WifiDisplayStatus.DISPLAY_STATE_CONNECTING:
+ newStatus = RouteInfo.STATUS_CONNECTING;
+ break;
+ case WifiDisplayStatus.DISPLAY_STATE_NOT_CONNECTED:
+ Log.e(TAG, "Active display is not connected!");
+ break;
+ }
+ }
+
+ return newStatus;
+ }
+
+ static boolean isWifiDisplayEnabled(WifiDisplay d, WifiDisplayStatus wfdStatus) {
+ return d.isAvailable() && (d.canConnect() || d.equals(wfdStatus.getActiveDisplay()));
+ }
+
+ static RouteInfo makeWifiDisplayRoute(WifiDisplay display, WifiDisplayStatus wfdStatus) {
+ final RouteInfo newRoute = new RouteInfo(sStatic.mSystemCategory);
+ newRoute.mDeviceAddress = display.getDeviceAddress();
+ newRoute.mSupportedTypes = ROUTE_TYPE_LIVE_AUDIO | ROUTE_TYPE_LIVE_VIDEO
+ | ROUTE_TYPE_REMOTE_DISPLAY;
+ newRoute.mVolumeHandling = RouteInfo.PLAYBACK_VOLUME_FIXED;
+ newRoute.mPlaybackType = RouteInfo.PLAYBACK_TYPE_REMOTE;
+
+ newRoute.setRealStatusCode(getWifiDisplayStatusCode(display, wfdStatus));
+ newRoute.mEnabled = isWifiDisplayEnabled(display, wfdStatus);
+ newRoute.mName = display.getFriendlyDisplayName();
+ newRoute.mDescription = sStatic.mResources.getText(
+ com.android.internal.R.string.wireless_display_route_description);
+ newRoute.updatePresentationDisplay();
+ newRoute.mDeviceType = RouteInfo.DEVICE_TYPE_TV;
+ return newRoute;
+ }
+
+ private static void updateWifiDisplayRoute(
+ RouteInfo route, WifiDisplay display, WifiDisplayStatus wfdStatus,
+ boolean disconnected) {
+ boolean changed = false;
+ final String newName = display.getFriendlyDisplayName();
+ if (!route.getName().equals(newName)) {
+ route.mName = newName;
+ changed = true;
+ }
+
+ boolean enabled = isWifiDisplayEnabled(display, wfdStatus);
+ changed |= route.mEnabled != enabled;
+ route.mEnabled = enabled;
+
+ changed |= route.setRealStatusCode(getWifiDisplayStatusCode(display, wfdStatus));
+
+ if (changed) {
+ dispatchRouteChanged(route);
+ }
+
+ if ((!enabled || disconnected) && route.isSelected()) {
+ // Oops, no longer available. Reselect the default.
+ selectDefaultRouteStatic();
+ }
+ }
+
+ private static WifiDisplay findWifiDisplay(WifiDisplay[] displays, String deviceAddress) {
+ for (int i = 0; i < displays.length; i++) {
+ final WifiDisplay d = displays[i];
+ if (d.getDeviceAddress().equals(deviceAddress)) {
+ return d;
+ }
+ }
+ return null;
+ }
+
+ private static RouteInfo findWifiDisplayRoute(WifiDisplay d) {
+ final int count = sStatic.mRoutes.size();
+ for (int i = 0; i < count; i++) {
+ final RouteInfo info = sStatic.mRoutes.get(i);
+ if (d.getDeviceAddress().equals(info.mDeviceAddress)) {
+ return info;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Information about a media route.
+ */
+ public static class RouteInfo {
+ CharSequence mName;
+ int mNameResId;
+ CharSequence mDescription;
+ private CharSequence mStatus;
+ int mSupportedTypes;
+ int mDeviceType;
+ RouteGroup mGroup;
+ final RouteCategory mCategory;
+ Drawable mIcon;
+ // playback information
+ int mPlaybackType = PLAYBACK_TYPE_LOCAL;
+ int mVolumeMax = RemoteControlClient.DEFAULT_PLAYBACK_VOLUME;
+ int mVolume = RemoteControlClient.DEFAULT_PLAYBACK_VOLUME;
+ int mVolumeHandling = RemoteControlClient.DEFAULT_PLAYBACK_VOLUME_HANDLING;
+ int mPlaybackStream = AudioManager.STREAM_MUSIC;
+ VolumeCallbackInfo mVcb;
+ Display mPresentationDisplay;
+ int mPresentationDisplayId = -1;
+
+ String mDeviceAddress;
+ boolean mEnabled = true;
+
+ // An id by which the route is known to the media router service.
+ // Null if this route only exists as an artifact within this process.
+ String mGlobalRouteId;
+
+ // A predetermined connection status that can override mStatus
+ private int mRealStatusCode;
+ private int mResolvedStatusCode;
+
+ /** @hide */ public static final int STATUS_NONE = 0;
+ /** @hide */ public static final int STATUS_SCANNING = 1;
+ /** @hide */ public static final int STATUS_CONNECTING = 2;
+ /** @hide */ public static final int STATUS_AVAILABLE = 3;
+ /** @hide */ public static final int STATUS_NOT_AVAILABLE = 4;
+ /** @hide */ public static final int STATUS_IN_USE = 5;
+ /** @hide */ public static final int STATUS_CONNECTED = 6;
+
+ /** @hide */
+ @IntDef({DEVICE_TYPE_UNKNOWN, DEVICE_TYPE_TV, DEVICE_TYPE_SPEAKER, DEVICE_TYPE_BLUETOOTH})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface DeviceType {}
+
+ /**
+ * The default receiver device type of the route indicating the type is unknown.
+ *
+ * @see #getDeviceType
+ */
+ public static final int DEVICE_TYPE_UNKNOWN = 0;
+
+ /**
+ * A receiver device type of the route indicating the presentation of the media is happening
+ * on a TV.
+ *
+ * @see #getDeviceType
+ */
+ public static final int DEVICE_TYPE_TV = 1;
+
+ /**
+ * A receiver device type of the route indicating the presentation of the media is happening
+ * on a speaker.
+ *
+ * @see #getDeviceType
+ */
+ public static final int DEVICE_TYPE_SPEAKER = 2;
+
+ /**
+ * A receiver device type of the route indicating the presentation of the media is happening
+ * on a bluetooth device such as a bluetooth speaker.
+ *
+ * @see #getDeviceType
+ */
+ public static final int DEVICE_TYPE_BLUETOOTH = 3;
+
+ private Object mTag;
+
+ /** @hide */
+ @IntDef({PLAYBACK_TYPE_LOCAL, PLAYBACK_TYPE_REMOTE})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PlaybackType {}
+
+ /**
+ * The default playback type, "local", indicating the presentation of the media is happening
+ * on the same device (e&#46;g&#46; a phone, a tablet) as where it is controlled from.
+ * @see #getPlaybackType()
+ */
+ public final static int PLAYBACK_TYPE_LOCAL = 0;
+
+ /**
+ * A playback type indicating the presentation of the media is happening on
+ * a different device (i&#46;e&#46; the remote device) than where it is controlled from.
+ * @see #getPlaybackType()
+ */
+ public final static int PLAYBACK_TYPE_REMOTE = 1;
+
+ /** @hide */
+ @IntDef({PLAYBACK_VOLUME_FIXED,PLAYBACK_VOLUME_VARIABLE})
+ @Retention(RetentionPolicy.SOURCE)
+ private @interface PlaybackVolume {}
+
+ /**
+ * Playback information indicating the playback volume is fixed, i&#46;e&#46; it cannot be
+ * controlled from this object. An example of fixed playback volume is a remote player,
+ * playing over HDMI where the user prefers to control the volume on the HDMI sink, rather
+ * than attenuate at the source.
+ * @see #getVolumeHandling()
+ */
+ public final static int PLAYBACK_VOLUME_FIXED = 0;
+ /**
+ * Playback information indicating the playback volume is variable and can be controlled
+ * from this object.
+ * @see #getVolumeHandling()
+ */
+ public final static int PLAYBACK_VOLUME_VARIABLE = 1;
+
+ RouteInfo(RouteCategory category) {
+ mCategory = category;
+ mDeviceType = DEVICE_TYPE_UNKNOWN;
+ }
+
+ /**
+ * Gets the user-visible name of the route.
+ * <p>
+ * The route name identifies the destination represented by the route.
+ * It may be a user-supplied name, an alias, or device serial number.
+ * </p>
+ *
+ * @return The user-visible name of a media route. This is the string presented
+ * to users who may select this as the active route.
+ */
+ public CharSequence getName() {
+ return getName(sStatic.mResources);
+ }
+
+ /**
+ * Return the properly localized/resource user-visible name of this route.
+ * <p>
+ * The route name identifies the destination represented by the route.
+ * It may be a user-supplied name, an alias, or device serial number.
+ * </p>
+ *
+ * @param context Context used to resolve the correct configuration to load
+ * @return The user-visible name of a media route. This is the string presented
+ * to users who may select this as the active route.
+ */
+ public CharSequence getName(Context context) {
+ return getName(context.getResources());
+ }
+
+ CharSequence getName(Resources res) {
+ if (mNameResId != 0) {
+ return res.getText(mNameResId);
+ }
+ return mName;
+ }
+
+ /**
+ * Gets the user-visible description of the route.
+ * <p>
+ * The route description describes the kind of destination represented by the route.
+ * It may be a user-supplied string, a model number or brand of device.
+ * </p>
+ *
+ * @return The description of the route, or null if none.
+ */
+ public CharSequence getDescription() {
+ return mDescription;
+ }
+
+ /**
+ * @return The user-visible status for a media route. This may include a description
+ * of the currently playing media, if available.
+ */
+ public CharSequence getStatus() {
+ return mStatus;
+ }
+
+ /**
+ * Set this route's status by predetermined status code. If the caller
+ * should dispatch a route changed event this call will return true;
+ */
+ boolean setRealStatusCode(int statusCode) {
+ if (mRealStatusCode != statusCode) {
+ mRealStatusCode = statusCode;
+ return resolveStatusCode();
+ }
+ return false;
+ }
+
+ /**
+ * Resolves the status code whenever the real status code or selection state
+ * changes.
+ */
+ boolean resolveStatusCode() {
+ int statusCode = mRealStatusCode;
+ if (isSelected()) {
+ switch (statusCode) {
+ // If the route is selected and its status appears to be between states
+ // then report it as connecting even though it has not yet had a chance
+ // to officially move into the CONNECTING state. Note that routes in
+ // the NONE state are assumed to not require an explicit connection
+ // lifecycle whereas those that are AVAILABLE are assumed to have
+ // to eventually proceed to CONNECTED.
+ case STATUS_AVAILABLE:
+ case STATUS_SCANNING:
+ statusCode = STATUS_CONNECTING;
+ break;
+ }
+ }
+ if (mResolvedStatusCode == statusCode) {
+ return false;
+ }
+
+ mResolvedStatusCode = statusCode;
+ int resId;
+ switch (statusCode) {
+ case STATUS_SCANNING:
+ resId = com.android.internal.R.string.media_route_status_scanning;
+ break;
+ case STATUS_CONNECTING:
+ resId = com.android.internal.R.string.media_route_status_connecting;
+ break;
+ case STATUS_AVAILABLE:
+ resId = com.android.internal.R.string.media_route_status_available;
+ break;
+ case STATUS_NOT_AVAILABLE:
+ resId = com.android.internal.R.string.media_route_status_not_available;
+ break;
+ case STATUS_IN_USE:
+ resId = com.android.internal.R.string.media_route_status_in_use;
+ break;
+ case STATUS_CONNECTED:
+ case STATUS_NONE:
+ default:
+ resId = 0;
+ break;
+ }
+ mStatus = resId != 0 ? sStatic.mResources.getText(resId) : null;
+ return true;
+ }
+
+ /**
+ * @hide
+ */
+ public int getStatusCode() {
+ return mResolvedStatusCode;
+ }
+
+ /**
+ * @return A media type flag set describing which types this route supports.
+ */
+ public int getSupportedTypes() {
+ return mSupportedTypes;
+ }
+
+ /**
+ * Gets the type of the receiver device associated with this route.
+ *
+ * @return The type of the receiver device associated with this route:
+ * {@link #DEVICE_TYPE_BLUETOOTH}, {@link #DEVICE_TYPE_TV}, {@link #DEVICE_TYPE_SPEAKER},
+ * or {@link #DEVICE_TYPE_UNKNOWN}.
+ */
+ @DeviceType
+ public int getDeviceType() {
+ return mDeviceType;
+ }
+
+ /** @hide */
+ public boolean matchesTypes(int types) {
+ return (mSupportedTypes & types) != 0;
+ }
+
+ /**
+ * @return The group that this route belongs to.
+ */
+ public RouteGroup getGroup() {
+ return mGroup;
+ }
+
+ /**
+ * @return the category this route belongs to.
+ */
+ public RouteCategory getCategory() {
+ return mCategory;
+ }
+
+ /**
+ * Get the icon representing this route.
+ * This icon will be used in picker UIs if available.
+ *
+ * @return the icon representing this route or null if no icon is available
+ */
+ public Drawable getIconDrawable() {
+ return mIcon;
+ }
+
+ /**
+ * Set an application-specific tag object for this route.
+ * The application may use this to store arbitrary data associated with the
+ * route for internal tracking.
+ *
+ * <p>Note that the lifespan of a route may be well past the lifespan of
+ * an Activity or other Context; take care that objects you store here
+ * will not keep more data in memory alive than you intend.</p>
+ *
+ * @param tag Arbitrary, app-specific data for this route to hold for later use
+ */
+ public void setTag(Object tag) {
+ mTag = tag;
+ routeUpdated();
+ }
+
+ /**
+ * @return The tag object previously set by the application
+ * @see #setTag(Object)
+ */
+ public Object getTag() {
+ return mTag;
+ }
+
+ /**
+ * @return the type of playback associated with this route
+ * @see UserRouteInfo#setPlaybackType(int)
+ */
+ @PlaybackType
+ public int getPlaybackType() {
+ return mPlaybackType;
+ }
+
+ /**
+ * @return the stream over which the playback associated with this route is performed
+ * @see UserRouteInfo#setPlaybackStream(int)
+ */
+ public int getPlaybackStream() {
+ return mPlaybackStream;
+ }
+
+ /**
+ * Return the current volume for this route. Depending on the route, this may only
+ * be valid if the route is currently selected.
+ *
+ * @return the volume at which the playback associated with this route is performed
+ * @see UserRouteInfo#setVolume(int)
+ */
+ public int getVolume() {
+ if (mPlaybackType == PLAYBACK_TYPE_LOCAL) {
+ int vol = 0;
+ try {
+ vol = sStatic.mAudioService.getStreamVolume(mPlaybackStream);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error getting local stream volume", e);
+ }
+ return vol;
+ } else {
+ return mVolume;
+ }
+ }
+
+ /**
+ * Request a volume change for this route.
+ * @param volume value between 0 and getVolumeMax
+ */
+ public void requestSetVolume(int volume) {
+ if (mPlaybackType == PLAYBACK_TYPE_LOCAL) {
+ try {
+ sStatic.mAudioService.setStreamVolume(mPlaybackStream, volume, 0,
+ ActivityThread.currentPackageName());
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error setting local stream volume", e);
+ }
+ } else {
+ sStatic.requestSetVolume(this, volume);
+ }
+ }
+
+ /**
+ * Request an incremental volume update for this route.
+ * @param direction Delta to apply to the current volume
+ */
+ public void requestUpdateVolume(int direction) {
+ if (mPlaybackType == PLAYBACK_TYPE_LOCAL) {
+ try {
+ final int volume =
+ Math.max(0, Math.min(getVolume() + direction, getVolumeMax()));
+ sStatic.mAudioService.setStreamVolume(mPlaybackStream, volume, 0,
+ ActivityThread.currentPackageName());
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error setting local stream volume", e);
+ }
+ } else {
+ sStatic.requestUpdateVolume(this, direction);
+ }
+ }
+
+ /**
+ * @return the maximum volume at which the playback associated with this route is performed
+ * @see UserRouteInfo#setVolumeMax(int)
+ */
+ public int getVolumeMax() {
+ if (mPlaybackType == PLAYBACK_TYPE_LOCAL) {
+ int volMax = 0;
+ try {
+ volMax = sStatic.mAudioService.getStreamMaxVolume(mPlaybackStream);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error getting local stream volume", e);
+ }
+ return volMax;
+ } else {
+ return mVolumeMax;
+ }
+ }
+
+ /**
+ * @return how volume is handling on the route
+ * @see UserRouteInfo#setVolumeHandling(int)
+ */
+ @PlaybackVolume
+ public int getVolumeHandling() {
+ return mVolumeHandling;
+ }
+
+ /**
+ * Gets the {@link Display} that should be used by the application to show
+ * a {@link android.app.Presentation} on an external display when this route is selected.
+ * Depending on the route, this may only be valid if the route is currently
+ * selected.
+ * <p>
+ * The preferred presentation display may change independently of the route
+ * being selected or unselected. For example, the presentation display
+ * of the default system route may change when an external HDMI display is connected
+ * or disconnected even though the route itself has not changed.
+ * </p><p>
+ * This method may return null if there is no external display associated with
+ * the route or if the display is not ready to show UI yet.
+ * </p><p>
+ * The application should listen for changes to the presentation display
+ * using the {@link Callback#onRoutePresentationDisplayChanged} callback and
+ * show or dismiss its {@link android.app.Presentation} accordingly when the display
+ * becomes available or is removed.
+ * </p><p>
+ * This method only makes sense for {@link #ROUTE_TYPE_LIVE_VIDEO live video} routes.
+ * </p>
+ *
+ * @return The preferred presentation display to use when this route is
+ * selected or null if none.
+ *
+ * @see #ROUTE_TYPE_LIVE_VIDEO
+ * @see android.app.Presentation
+ */
+ public Display getPresentationDisplay() {
+ return mPresentationDisplay;
+ }
+
+ boolean updatePresentationDisplay() {
+ Display display = choosePresentationDisplay();
+ if (mPresentationDisplay != display) {
+ mPresentationDisplay = display;
+ return true;
+ }
+ return false;
+ }
+
+ private Display choosePresentationDisplay() {
+ if ((mSupportedTypes & ROUTE_TYPE_LIVE_VIDEO) != 0) {
+ Display[] displays = sStatic.getAllPresentationDisplays();
+
+ // Ensure that the specified display is valid for presentations.
+ // This check will normally disallow the default display unless it was
+ // configured as a presentation display for some reason.
+ if (mPresentationDisplayId >= 0) {
+ for (Display display : displays) {
+ if (display.getDisplayId() == mPresentationDisplayId) {
+ return display;
+ }
+ }
+ return null;
+ }
+
+ // Find the indicated Wifi display by its address.
+ if (mDeviceAddress != null) {
+ for (Display display : displays) {
+ if (display.getType() == Display.TYPE_WIFI
+ && mDeviceAddress.equals(display.getAddress())) {
+ return display;
+ }
+ }
+ return null;
+ }
+
+ // For the default route, choose the first presentation display from the list.
+ if (this == sStatic.mDefaultAudioVideo && displays.length > 0) {
+ return displays[0];
+ }
+ }
+ return null;
+ }
+
+ /** @hide */
+ public String getDeviceAddress() {
+ return mDeviceAddress;
+ }
+
+ /**
+ * Returns true if this route is enabled and may be selected.
+ *
+ * @return True if this route is enabled.
+ */
+ public boolean isEnabled() {
+ return mEnabled;
+ }
+
+ /**
+ * Returns true if the route is in the process of connecting and is not
+ * yet ready for use.
+ *
+ * @return True if this route is in the process of connecting.
+ */
+ public boolean isConnecting() {
+ return mResolvedStatusCode == STATUS_CONNECTING;
+ }
+
+ /** @hide */
+ public boolean isSelected() {
+ return this == sStatic.mSelectedRoute;
+ }
+
+ /** @hide */
+ public boolean isDefault() {
+ return this == sStatic.mDefaultAudioVideo;
+ }
+
+ /** @hide */
+ public boolean isBluetooth() {
+ return this == sStatic.mBluetoothA2dpRoute;
+ }
+
+ /** @hide */
+ public void select() {
+ selectRouteStatic(mSupportedTypes, this, true);
+ }
+
+ void setStatusInt(CharSequence status) {
+ if (!status.equals(mStatus)) {
+ mStatus = status;
+ if (mGroup != null) {
+ mGroup.memberStatusChanged(this, status);
+ }
+ routeUpdated();
+ }
+ }
+
+ final IRemoteVolumeObserver.Stub mRemoteVolObserver = new IRemoteVolumeObserver.Stub() {
+ @Override
+ public void dispatchRemoteVolumeUpdate(final int direction, final int value) {
+ sStatic.mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (mVcb != null) {
+ if (direction != 0) {
+ mVcb.vcb.onVolumeUpdateRequest(mVcb.route, direction);
+ } else {
+ mVcb.vcb.onVolumeSetRequest(mVcb.route, value);
+ }
+ }
+ }
+ });
+ }
+ };
+
+ void routeUpdated() {
+ updateRoute(this);
+ }
+
+ @Override
+ public String toString() {
+ String supportedTypes = typesToString(getSupportedTypes());
+ return getClass().getSimpleName() + "{ name=" + getName() +
+ ", description=" + getDescription() +
+ ", status=" + getStatus() +
+ ", category=" + getCategory() +
+ ", supportedTypes=" + supportedTypes +
+ ", presentationDisplay=" + mPresentationDisplay + " }";
+ }
+ }
+
+ /**
+ * Information about a route that the application may define and modify.
+ * A user route defaults to {@link RouteInfo#PLAYBACK_TYPE_REMOTE} and
+ * {@link RouteInfo#PLAYBACK_VOLUME_FIXED}.
+ *
+ * @see MediaRouter.RouteInfo
+ */
+ public static class UserRouteInfo extends RouteInfo {
+ RemoteControlClient mRcc;
+ SessionVolumeProvider mSvp;
+
+ UserRouteInfo(RouteCategory category) {
+ super(category);
+ mSupportedTypes = ROUTE_TYPE_USER;
+ mPlaybackType = PLAYBACK_TYPE_REMOTE;
+ mVolumeHandling = PLAYBACK_VOLUME_FIXED;
+ }
+
+ /**
+ * Set the user-visible name of this route.
+ * @param name Name to display to the user to describe this route
+ */
+ public void setName(CharSequence name) {
+ mNameResId = 0;
+ mName = name;
+ routeUpdated();
+ }
+
+ /**
+ * Set the user-visible name of this route.
+ * <p>
+ * The route name identifies the destination represented by the route.
+ * It may be a user-supplied name, an alias, or device serial number.
+ * </p>
+ *
+ * @param resId Resource ID of the name to display to the user to describe this route
+ */
+ public void setName(int resId) {
+ mNameResId = resId;
+ mName = null;
+ routeUpdated();
+ }
+
+ /**
+ * Set the user-visible description of this route.
+ * <p>
+ * The route description describes the kind of destination represented by the route.
+ * It may be a user-supplied string, a model number or brand of device.
+ * </p>
+ *
+ * @param description The description of the route, or null if none.
+ */
+ public void setDescription(CharSequence description) {
+ mDescription = description;
+ routeUpdated();
+ }
+
+ /**
+ * Set the current user-visible status for this route.
+ * @param status Status to display to the user to describe what the endpoint
+ * of this route is currently doing
+ */
+ public void setStatus(CharSequence status) {
+ setStatusInt(status);
+ }
+
+ /**
+ * Set the RemoteControlClient responsible for reporting playback info for this
+ * user route.
+ *
+ * <p>If this route manages remote playback, the data exposed by this
+ * RemoteControlClient will be used to reflect and update information
+ * such as route volume info in related UIs.</p>
+ *
+ * <p>The RemoteControlClient must have been previously registered with
+ * {@link AudioManager#registerRemoteControlClient(RemoteControlClient)}.</p>
+ *
+ * @param rcc RemoteControlClient associated with this route
+ */
+ public void setRemoteControlClient(RemoteControlClient rcc) {
+ mRcc = rcc;
+ updatePlaybackInfoOnRcc();
+ }
+
+ /**
+ * Retrieve the RemoteControlClient associated with this route, if one has been set.
+ *
+ * @return the RemoteControlClient associated with this route
+ * @see #setRemoteControlClient(RemoteControlClient)
+ */
+ public RemoteControlClient getRemoteControlClient() {
+ return mRcc;
+ }
+
+ /**
+ * Set an icon that will be used to represent this route.
+ * The system may use this icon in picker UIs or similar.
+ *
+ * @param icon icon drawable to use to represent this route
+ */
+ public void setIconDrawable(Drawable icon) {
+ mIcon = icon;
+ }
+
+ /**
+ * Set an icon that will be used to represent this route.
+ * The system may use this icon in picker UIs or similar.
+ *
+ * @param resId Resource ID of an icon drawable to use to represent this route
+ */
+ public void setIconResource(@DrawableRes int resId) {
+ setIconDrawable(sStatic.mResources.getDrawable(resId));
+ }
+
+ /**
+ * Set a callback to be notified of volume update requests
+ * @param vcb
+ */
+ public void setVolumeCallback(VolumeCallback vcb) {
+ mVcb = new VolumeCallbackInfo(vcb, this);
+ }
+
+ /**
+ * Defines whether playback associated with this route is "local"
+ * ({@link RouteInfo#PLAYBACK_TYPE_LOCAL}) or "remote"
+ * ({@link RouteInfo#PLAYBACK_TYPE_REMOTE}).
+ * @param type
+ */
+ public void setPlaybackType(@RouteInfo.PlaybackType int type) {
+ if (mPlaybackType != type) {
+ mPlaybackType = type;
+ configureSessionVolume();
+ }
+ }
+
+ /**
+ * Defines whether volume for the playback associated with this route is fixed
+ * ({@link RouteInfo#PLAYBACK_VOLUME_FIXED}) or can modified
+ * ({@link RouteInfo#PLAYBACK_VOLUME_VARIABLE}).
+ * @param volumeHandling
+ */
+ public void setVolumeHandling(@RouteInfo.PlaybackVolume int volumeHandling) {
+ if (mVolumeHandling != volumeHandling) {
+ mVolumeHandling = volumeHandling;
+ configureSessionVolume();
+ }
+ }
+
+ /**
+ * Defines at what volume the playback associated with this route is performed (for user
+ * feedback purposes). This information is only used when the playback is not local.
+ * @param volume
+ */
+ public void setVolume(int volume) {
+ volume = Math.max(0, Math.min(volume, getVolumeMax()));
+ if (mVolume != volume) {
+ mVolume = volume;
+ if (mSvp != null) {
+ mSvp.setCurrentVolume(mVolume);
+ }
+ dispatchRouteVolumeChanged(this);
+ if (mGroup != null) {
+ mGroup.memberVolumeChanged(this);
+ }
+ }
+ }
+
+ @Override
+ public void requestSetVolume(int volume) {
+ if (mVolumeHandling == PLAYBACK_VOLUME_VARIABLE) {
+ if (mVcb == null) {
+ Log.e(TAG, "Cannot requestSetVolume on user route - no volume callback set");
+ return;
+ }
+ mVcb.vcb.onVolumeSetRequest(this, volume);
+ }
+ }
+
+ @Override
+ public void requestUpdateVolume(int direction) {
+ if (mVolumeHandling == PLAYBACK_VOLUME_VARIABLE) {
+ if (mVcb == null) {
+ Log.e(TAG, "Cannot requestChangeVolume on user route - no volumec callback set");
+ return;
+ }
+ mVcb.vcb.onVolumeUpdateRequest(this, direction);
+ }
+ }
+
+ /**
+ * Defines the maximum volume at which the playback associated with this route is performed
+ * (for user feedback purposes). This information is only used when the playback is not
+ * local.
+ * @param volumeMax
+ */
+ public void setVolumeMax(int volumeMax) {
+ if (mVolumeMax != volumeMax) {
+ mVolumeMax = volumeMax;
+ configureSessionVolume();
+ }
+ }
+
+ /**
+ * Defines over what stream type the media is presented.
+ * @param stream
+ */
+ public void setPlaybackStream(int stream) {
+ if (mPlaybackStream != stream) {
+ mPlaybackStream = stream;
+ configureSessionVolume();
+ }
+ }
+
+ private void updatePlaybackInfoOnRcc() {
+ configureSessionVolume();
+ }
+
+ private void configureSessionVolume() {
+ if (mRcc == null) {
+ if (DEBUG) {
+ Log.d(TAG, "No Rcc to configure volume for route " + getName());
+ }
+ return;
+ }
+ MediaSession session = mRcc.getMediaSession();
+ if (session == null) {
+ if (DEBUG) {
+ Log.d(TAG, "Rcc has no session to configure volume");
+ }
+ return;
+ }
+ if (mPlaybackType == RemoteControlClient.PLAYBACK_TYPE_REMOTE) {
+ @VolumeProvider.ControlType int volumeControl =
+ VolumeProvider.VOLUME_CONTROL_FIXED;
+ switch (mVolumeHandling) {
+ case RemoteControlClient.PLAYBACK_VOLUME_VARIABLE:
+ volumeControl = VolumeProvider.VOLUME_CONTROL_ABSOLUTE;
+ break;
+ case RemoteControlClient.PLAYBACK_VOLUME_FIXED:
+ default:
+ break;
+ }
+ // Only register a new listener if necessary
+ if (mSvp == null || mSvp.getVolumeControl() != volumeControl
+ || mSvp.getMaxVolume() != mVolumeMax) {
+ mSvp = new SessionVolumeProvider(volumeControl, mVolumeMax, mVolume);
+ session.setPlaybackToRemote(mSvp);
+ }
+ } else {
+ // We only know how to handle local and remote, fall back to local if not remote.
+ AudioAttributes.Builder bob = new AudioAttributes.Builder();
+ bob.setLegacyStreamType(mPlaybackStream);
+ session.setPlaybackToLocal(bob.build());
+ mSvp = null;
+ }
+ }
+
+ class SessionVolumeProvider extends VolumeProvider {
+
+ public SessionVolumeProvider(@VolumeProvider.ControlType int volumeControl,
+ int maxVolume, int currentVolume) {
+ super(volumeControl, maxVolume, currentVolume);
+ }
+
+ @Override
+ public void onSetVolumeTo(final int volume) {
+ sStatic.mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (mVcb != null) {
+ mVcb.vcb.onVolumeSetRequest(mVcb.route, volume);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onAdjustVolume(final int direction) {
+ sStatic.mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (mVcb != null) {
+ mVcb.vcb.onVolumeUpdateRequest(mVcb.route, direction);
+ }
+ }
+ });
+ }
+ }
+ }
+
+ /**
+ * Information about a route that consists of multiple other routes in a group.
+ */
+ public static class RouteGroup extends RouteInfo {
+ final ArrayList<RouteInfo> mRoutes = new ArrayList<RouteInfo>();
+ private boolean mUpdateName;
+
+ RouteGroup(RouteCategory category) {
+ super(category);
+ mGroup = this;
+ mVolumeHandling = PLAYBACK_VOLUME_FIXED;
+ }
+
+ @Override
+ CharSequence getName(Resources res) {
+ if (mUpdateName) updateName();
+ return super.getName(res);
+ }
+
+ /**
+ * Add a route to this group. The route must not currently belong to another group.
+ *
+ * @param route route to add to this group
+ */
+ public void addRoute(RouteInfo route) {
+ if (route.getGroup() != null) {
+ throw new IllegalStateException("Route " + route + " is already part of a group.");
+ }
+ if (route.getCategory() != mCategory) {
+ throw new IllegalArgumentException(
+ "Route cannot be added to a group with a different category. " +
+ "(Route category=" + route.getCategory() +
+ " group category=" + mCategory + ")");
+ }
+ final int at = mRoutes.size();
+ mRoutes.add(route);
+ route.mGroup = this;
+ mUpdateName = true;
+ updateVolume();
+ routeUpdated();
+ dispatchRouteGrouped(route, this, at);
+ }
+
+ /**
+ * Add a route to this group before the specified index.
+ *
+ * @param route route to add
+ * @param insertAt insert the new route before this index
+ */
+ public void addRoute(RouteInfo route, int insertAt) {
+ if (route.getGroup() != null) {
+ throw new IllegalStateException("Route " + route + " is already part of a group.");
+ }
+ if (route.getCategory() != mCategory) {
+ throw new IllegalArgumentException(
+ "Route cannot be added to a group with a different category. " +
+ "(Route category=" + route.getCategory() +
+ " group category=" + mCategory + ")");
+ }
+ mRoutes.add(insertAt, route);
+ route.mGroup = this;
+ mUpdateName = true;
+ updateVolume();
+ routeUpdated();
+ dispatchRouteGrouped(route, this, insertAt);
+ }
+
+ /**
+ * Remove a route from this group.
+ *
+ * @param route route to remove
+ */
+ public void removeRoute(RouteInfo route) {
+ if (route.getGroup() != this) {
+ throw new IllegalArgumentException("Route " + route +
+ " is not a member of this group.");
+ }
+ mRoutes.remove(route);
+ route.mGroup = null;
+ mUpdateName = true;
+ updateVolume();
+ dispatchRouteUngrouped(route, this);
+ routeUpdated();
+ }
+
+ /**
+ * Remove the route at the specified index from this group.
+ *
+ * @param index index of the route to remove
+ */
+ public void removeRoute(int index) {
+ RouteInfo route = mRoutes.remove(index);
+ route.mGroup = null;
+ mUpdateName = true;
+ updateVolume();
+ dispatchRouteUngrouped(route, this);
+ routeUpdated();
+ }
+
+ /**
+ * @return The number of routes in this group
+ */
+ public int getRouteCount() {
+ return mRoutes.size();
+ }
+
+ /**
+ * Return the route in this group at the specified index
+ *
+ * @param index Index to fetch
+ * @return The route at index
+ */
+ public RouteInfo getRouteAt(int index) {
+ return mRoutes.get(index);
+ }
+
+ /**
+ * Set an icon that will be used to represent this group.
+ * The system may use this icon in picker UIs or similar.
+ *
+ * @param icon icon drawable to use to represent this group
+ */
+ public void setIconDrawable(Drawable icon) {
+ mIcon = icon;
+ }
+
+ /**
+ * Set an icon that will be used to represent this group.
+ * The system may use this icon in picker UIs or similar.
+ *
+ * @param resId Resource ID of an icon drawable to use to represent this group
+ */
+ public void setIconResource(@DrawableRes int resId) {
+ setIconDrawable(sStatic.mResources.getDrawable(resId));
+ }
+
+ @Override
+ public void requestSetVolume(int volume) {
+ final int maxVol = getVolumeMax();
+ if (maxVol == 0) {
+ return;
+ }
+
+ final float scaledVolume = (float) volume / maxVol;
+ final int routeCount = getRouteCount();
+ for (int i = 0; i < routeCount; i++) {
+ final RouteInfo route = getRouteAt(i);
+ final int routeVol = (int) (scaledVolume * route.getVolumeMax());
+ route.requestSetVolume(routeVol);
+ }
+ if (volume != mVolume) {
+ mVolume = volume;
+ dispatchRouteVolumeChanged(this);
+ }
+ }
+
+ @Override
+ public void requestUpdateVolume(int direction) {
+ final int maxVol = getVolumeMax();
+ if (maxVol == 0) {
+ return;
+ }
+
+ final int routeCount = getRouteCount();
+ int volume = 0;
+ for (int i = 0; i < routeCount; i++) {
+ final RouteInfo route = getRouteAt(i);
+ route.requestUpdateVolume(direction);
+ final int routeVol = route.getVolume();
+ if (routeVol > volume) {
+ volume = routeVol;
+ }
+ }
+ if (volume != mVolume) {
+ mVolume = volume;
+ dispatchRouteVolumeChanged(this);
+ }
+ }
+
+ void memberNameChanged(RouteInfo info, CharSequence name) {
+ mUpdateName = true;
+ routeUpdated();
+ }
+
+ void memberStatusChanged(RouteInfo info, CharSequence status) {
+ setStatusInt(status);
+ }
+
+ void memberVolumeChanged(RouteInfo info) {
+ updateVolume();
+ }
+
+ void updateVolume() {
+ // A group always represents the highest component volume value.
+ final int routeCount = getRouteCount();
+ int volume = 0;
+ for (int i = 0; i < routeCount; i++) {
+ final int routeVol = getRouteAt(i).getVolume();
+ if (routeVol > volume) {
+ volume = routeVol;
+ }
+ }
+ if (volume != mVolume) {
+ mVolume = volume;
+ dispatchRouteVolumeChanged(this);
+ }
+ }
+
+ @Override
+ void routeUpdated() {
+ int types = 0;
+ final int count = mRoutes.size();
+ if (count == 0) {
+ // Don't keep empty groups in the router.
+ MediaRouter.removeRouteStatic(this);
+ return;
+ }
+
+ int maxVolume = 0;
+ boolean isLocal = true;
+ boolean isFixedVolume = true;
+ for (int i = 0; i < count; i++) {
+ final RouteInfo route = mRoutes.get(i);
+ types |= route.mSupportedTypes;
+ final int routeMaxVolume = route.getVolumeMax();
+ if (routeMaxVolume > maxVolume) {
+ maxVolume = routeMaxVolume;
+ }
+ isLocal &= route.getPlaybackType() == PLAYBACK_TYPE_LOCAL;
+ isFixedVolume &= route.getVolumeHandling() == PLAYBACK_VOLUME_FIXED;
+ }
+ mPlaybackType = isLocal ? PLAYBACK_TYPE_LOCAL : PLAYBACK_TYPE_REMOTE;
+ mVolumeHandling = isFixedVolume ? PLAYBACK_VOLUME_FIXED : PLAYBACK_VOLUME_VARIABLE;
+ mSupportedTypes = types;
+ mVolumeMax = maxVolume;
+ mIcon = count == 1 ? mRoutes.get(0).getIconDrawable() : null;
+ super.routeUpdated();
+ }
+
+ void updateName() {
+ final StringBuilder sb = new StringBuilder();
+ final int count = mRoutes.size();
+ for (int i = 0; i < count; i++) {
+ final RouteInfo info = mRoutes.get(i);
+ // TODO: There's probably a much more correct way to localize this.
+ if (i > 0) {
+ sb.append(", ");
+ }
+ sb.append(info.getName());
+ }
+ mName = sb.toString();
+ mUpdateName = false;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder(super.toString());
+ sb.append('[');
+ final int count = mRoutes.size();
+ for (int i = 0; i < count; i++) {
+ if (i > 0) sb.append(", ");
+ sb.append(mRoutes.get(i));
+ }
+ sb.append(']');
+ return sb.toString();
+ }
+ }
+
+ /**
+ * Definition of a category of routes. All routes belong to a category.
+ */
+ public static class RouteCategory {
+ CharSequence mName;
+ int mNameResId;
+ int mTypes;
+ final boolean mGroupable;
+ boolean mIsSystem;
+
+ RouteCategory(CharSequence name, int types, boolean groupable) {
+ mName = name;
+ mTypes = types;
+ mGroupable = groupable;
+ }
+
+ RouteCategory(int nameResId, int types, boolean groupable) {
+ mNameResId = nameResId;
+ mTypes = types;
+ mGroupable = groupable;
+ }
+
+ /**
+ * @return the name of this route category
+ */
+ public CharSequence getName() {
+ return getName(sStatic.mResources);
+ }
+
+ /**
+ * Return the properly localized/configuration dependent name of this RouteCategory.
+ *
+ * @param context Context to resolve name resources
+ * @return the name of this route category
+ */
+ public CharSequence getName(Context context) {
+ return getName(context.getResources());
+ }
+
+ CharSequence getName(Resources res) {
+ if (mNameResId != 0) {
+ return res.getText(mNameResId);
+ }
+ return mName;
+ }
+
+ /**
+ * Return the current list of routes in this category that have been added
+ * to the MediaRouter.
+ *
+ * <p>This list will not include routes that are nested within RouteGroups.
+ * A RouteGroup is treated as a single route within its category.</p>
+ *
+ * @param out a List to fill with the routes in this category. If this parameter is
+ * non-null, it will be cleared, filled with the current routes with this
+ * category, and returned. If this parameter is null, a new List will be
+ * allocated to report the category's current routes.
+ * @return A list with the routes in this category that have been added to the MediaRouter.
+ */
+ public List<RouteInfo> getRoutes(List<RouteInfo> out) {
+ if (out == null) {
+ out = new ArrayList<RouteInfo>();
+ } else {
+ out.clear();
+ }
+
+ final int count = getRouteCountStatic();
+ for (int i = 0; i < count; i++) {
+ final RouteInfo route = getRouteAtStatic(i);
+ if (route.mCategory == this) {
+ out.add(route);
+ }
+ }
+ return out;
+ }
+
+ /**
+ * @return Flag set describing the route types supported by this category
+ */
+ public int getSupportedTypes() {
+ return mTypes;
+ }
+
+ /**
+ * Return whether or not this category supports grouping.
+ *
+ * <p>If this method returns true, all routes obtained from this category
+ * via calls to {@link #getRouteAt(int)} will be {@link MediaRouter.RouteGroup}s.</p>
+ *
+ * @return true if this category supports
+ */
+ public boolean isGroupable() {
+ return mGroupable;
+ }
+
+ /**
+ * @return true if this is the category reserved for system routes.
+ * @hide
+ */
+ public boolean isSystem() {
+ return mIsSystem;
+ }
+
+ @Override
+ public String toString() {
+ return "RouteCategory{ name=" + getName() + " types=" + typesToString(mTypes) +
+ " groupable=" + mGroupable + " }";
+ }
+ }
+
+ static class CallbackInfo {
+ public int type;
+ public int flags;
+ public final Callback cb;
+ public final MediaRouter router;
+
+ public CallbackInfo(Callback cb, int type, int flags, MediaRouter router) {
+ this.cb = cb;
+ this.type = type;
+ this.flags = flags;
+ this.router = router;
+ }
+
+ public boolean filterRouteEvent(RouteInfo route) {
+ return filterRouteEvent(route.mSupportedTypes);
+ }
+
+ public boolean filterRouteEvent(int supportedTypes) {
+ return (flags & CALLBACK_FLAG_UNFILTERED_EVENTS) != 0
+ || (type & supportedTypes) != 0;
+ }
+ }
+
+ /**
+ * Interface for receiving events about media routing changes.
+ * All methods of this interface will be called from the application's main thread.
+ * <p>
+ * A Callback will only receive events relevant to routes that the callback
+ * was registered for unless the {@link MediaRouter#CALLBACK_FLAG_UNFILTERED_EVENTS}
+ * flag was specified in {@link MediaRouter#addCallback(int, Callback, int)}.
+ * </p>
+ *
+ * @see MediaRouter#addCallback(int, Callback, int)
+ * @see MediaRouter#removeCallback(Callback)
+ */
+ public static abstract class Callback {
+ /**
+ * Called when the supplied route becomes selected as the active route
+ * for the given route type.
+ *
+ * @param router the MediaRouter reporting the event
+ * @param type Type flag set indicating the routes that have been selected
+ * @param info Route that has been selected for the given route types
+ */
+ public abstract void onRouteSelected(MediaRouter router, int type, RouteInfo info);
+
+ /**
+ * Called when the supplied route becomes unselected as the active route
+ * for the given route type.
+ *
+ * @param router the MediaRouter reporting the event
+ * @param type Type flag set indicating the routes that have been unselected
+ * @param info Route that has been unselected for the given route types
+ */
+ public abstract void onRouteUnselected(MediaRouter router, int type, RouteInfo info);
+
+ /**
+ * Called when a route for the specified type was added.
+ *
+ * @param router the MediaRouter reporting the event
+ * @param info Route that has become available for use
+ */
+ public abstract void onRouteAdded(MediaRouter router, RouteInfo info);
+
+ /**
+ * Called when a route for the specified type was removed.
+ *
+ * @param router the MediaRouter reporting the event
+ * @param info Route that has been removed from availability
+ */
+ public abstract void onRouteRemoved(MediaRouter router, RouteInfo info);
+
+ /**
+ * Called when an aspect of the indicated route has changed.
+ *
+ * <p>This will not indicate that the types supported by this route have
+ * changed, only that cosmetic info such as name or status have been updated.</p>
+ *
+ * @param router the MediaRouter reporting the event
+ * @param info The route that was changed
+ */
+ public abstract void onRouteChanged(MediaRouter router, RouteInfo info);
+
+ /**
+ * Called when a route is added to a group.
+ *
+ * @param router the MediaRouter reporting the event
+ * @param info The route that was added
+ * @param group The group the route was added to
+ * @param index The route index within group that info was added at
+ */
+ public abstract void onRouteGrouped(MediaRouter router, RouteInfo info, RouteGroup group,
+ int index);
+
+ /**
+ * Called when a route is removed from a group.
+ *
+ * @param router the MediaRouter reporting the event
+ * @param info The route that was removed
+ * @param group The group the route was removed from
+ */
+ public abstract void onRouteUngrouped(MediaRouter router, RouteInfo info, RouteGroup group);
+
+ /**
+ * Called when a route's volume changes.
+ *
+ * @param router the MediaRouter reporting the event
+ * @param info The route with altered volume
+ */
+ public abstract void onRouteVolumeChanged(MediaRouter router, RouteInfo info);
+
+ /**
+ * Called when a route's presentation display changes.
+ * <p>
+ * This method is called whenever the route's presentation display becomes
+ * available, is removes or has changes to some of its properties (such as its size).
+ * </p>
+ *
+ * @param router the MediaRouter reporting the event
+ * @param info The route whose presentation display changed
+ *
+ * @see RouteInfo#getPresentationDisplay()
+ */
+ public void onRoutePresentationDisplayChanged(MediaRouter router, RouteInfo info) {
+ }
+ }
+
+ /**
+ * Stub implementation of {@link MediaRouter.Callback}.
+ * Each abstract method is defined as a no-op. Override just the ones
+ * you need.
+ */
+ public static class SimpleCallback extends Callback {
+
+ @Override
+ public void onRouteSelected(MediaRouter router, int type, RouteInfo info) {
+ }
+
+ @Override
+ public void onRouteUnselected(MediaRouter router, int type, RouteInfo info) {
+ }
+
+ @Override
+ public void onRouteAdded(MediaRouter router, RouteInfo info) {
+ }
+
+ @Override
+ public void onRouteRemoved(MediaRouter router, RouteInfo info) {
+ }
+
+ @Override
+ public void onRouteChanged(MediaRouter router, RouteInfo info) {
+ }
+
+ @Override
+ public void onRouteGrouped(MediaRouter router, RouteInfo info, RouteGroup group,
+ int index) {
+ }
+
+ @Override
+ public void onRouteUngrouped(MediaRouter router, RouteInfo info, RouteGroup group) {
+ }
+
+ @Override
+ public void onRouteVolumeChanged(MediaRouter router, RouteInfo info) {
+ }
+ }
+
+ static class VolumeCallbackInfo {
+ public final VolumeCallback vcb;
+ public final RouteInfo route;
+
+ public VolumeCallbackInfo(VolumeCallback vcb, RouteInfo route) {
+ this.vcb = vcb;
+ this.route = route;
+ }
+ }
+
+ /**
+ * Interface for receiving events about volume changes.
+ * All methods of this interface will be called from the application's main thread.
+ *
+ * <p>A VolumeCallback will only receive events relevant to routes that the callback
+ * was registered for.</p>
+ *
+ * @see UserRouteInfo#setVolumeCallback(VolumeCallback)
+ */
+ public static abstract class VolumeCallback {
+ /**
+ * Called when the volume for the route should be increased or decreased.
+ * @param info the route affected by this event
+ * @param direction an integer indicating whether the volume is to be increased
+ * (positive value) or decreased (negative value).
+ * For bundled changes, the absolute value indicates the number of changes
+ * in the same direction, e.g. +3 corresponds to three "volume up" changes.
+ */
+ public abstract void onVolumeUpdateRequest(RouteInfo info, int direction);
+ /**
+ * Called when the volume for the route should be set to the given value
+ * @param info the route affected by this event
+ * @param volume an integer indicating the new volume value that should be used, always
+ * between 0 and the value set by {@link UserRouteInfo#setVolumeMax(int)}.
+ */
+ public abstract void onVolumeSetRequest(RouteInfo info, int volume);
+ }
+
+ static class VolumeChangeReceiver extends BroadcastReceiver {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ if (intent.getAction().equals(AudioManager.VOLUME_CHANGED_ACTION)) {
+ final int streamType = intent.getIntExtra(AudioManager.EXTRA_VOLUME_STREAM_TYPE,
+ -1);
+ if (streamType != AudioManager.STREAM_MUSIC) {
+ return;
+ }
+
+ final int newVolume = intent.getIntExtra(AudioManager.EXTRA_VOLUME_STREAM_VALUE, 0);
+ final int oldVolume = intent.getIntExtra(
+ AudioManager.EXTRA_PREV_VOLUME_STREAM_VALUE, 0);
+ if (newVolume != oldVolume) {
+ systemVolumeChanged(newVolume);
+ }
+ }
+ }
+ }
+
+ static class WifiDisplayStatusChangedReceiver extends BroadcastReceiver {
+ @Override
+ public void onReceive(Context context, Intent intent) {
+ if (intent.getAction().equals(DisplayManager.ACTION_WIFI_DISPLAY_STATUS_CHANGED)) {
+ updateWifiDisplayStatus((WifiDisplayStatus) intent.getParcelableExtra(
+ DisplayManager.EXTRA_WIFI_DISPLAY_STATUS));
+ }
+ }
+ }
+}
diff --git a/android/media/MediaRouterClientState.java b/android/media/MediaRouterClientState.java
new file mode 100644
index 00000000..7643924e
--- /dev/null
+++ b/android/media/MediaRouterClientState.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.util.ArrayList;
+
+/**
+ * Information available from MediaRouterService about the state perceived by
+ * a particular client and the routes that are available to it.
+ *
+ * Clients must not modify the contents of this object.
+ * @hide
+ */
+public final class MediaRouterClientState implements Parcelable {
+ /**
+ * A list of all known routes.
+ */
+ public final ArrayList<RouteInfo> routes;
+
+ public MediaRouterClientState() {
+ routes = new ArrayList<RouteInfo>();
+ }
+
+ MediaRouterClientState(Parcel src) {
+ routes = src.createTypedArrayList(RouteInfo.CREATOR);
+ }
+
+ public RouteInfo getRoute(String id) {
+ final int count = routes.size();
+ for (int i = 0; i < count; i++) {
+ final RouteInfo route = routes.get(i);
+ if (route.id.equals(id)) {
+ return route;
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeTypedList(routes);
+ }
+
+ @Override
+ public String toString() {
+ return "MediaRouterClientState{ routes=" + routes.toString() + " }";
+ }
+
+ public static final Parcelable.Creator<MediaRouterClientState> CREATOR =
+ new Parcelable.Creator<MediaRouterClientState>() {
+ @Override
+ public MediaRouterClientState createFromParcel(Parcel in) {
+ return new MediaRouterClientState(in);
+ }
+
+ @Override
+ public MediaRouterClientState[] newArray(int size) {
+ return new MediaRouterClientState[size];
+ }
+ };
+
+ public static final class RouteInfo implements Parcelable {
+ public String id;
+ public String name;
+ public String description;
+ public int supportedTypes;
+ public boolean enabled;
+ public int statusCode;
+ public int playbackType;
+ public int playbackStream;
+ public int volume;
+ public int volumeMax;
+ public int volumeHandling;
+ public int presentationDisplayId;
+ public @MediaRouter.RouteInfo.DeviceType int deviceType;
+
+ public RouteInfo(String id) {
+ this.id = id;
+ enabled = true;
+ statusCode = MediaRouter.RouteInfo.STATUS_NONE;
+ playbackType = MediaRouter.RouteInfo.PLAYBACK_TYPE_REMOTE;
+ playbackStream = -1;
+ volumeHandling = MediaRouter.RouteInfo.PLAYBACK_VOLUME_FIXED;
+ presentationDisplayId = -1;
+ deviceType = MediaRouter.RouteInfo.DEVICE_TYPE_UNKNOWN;
+ }
+
+ public RouteInfo(RouteInfo other) {
+ id = other.id;
+ name = other.name;
+ description = other.description;
+ supportedTypes = other.supportedTypes;
+ enabled = other.enabled;
+ statusCode = other.statusCode;
+ playbackType = other.playbackType;
+ playbackStream = other.playbackStream;
+ volume = other.volume;
+ volumeMax = other.volumeMax;
+ volumeHandling = other.volumeHandling;
+ presentationDisplayId = other.presentationDisplayId;
+ deviceType = other.deviceType;
+ }
+
+ RouteInfo(Parcel in) {
+ id = in.readString();
+ name = in.readString();
+ description = in.readString();
+ supportedTypes = in.readInt();
+ enabled = in.readInt() != 0;
+ statusCode = in.readInt();
+ playbackType = in.readInt();
+ playbackStream = in.readInt();
+ volume = in.readInt();
+ volumeMax = in.readInt();
+ volumeHandling = in.readInt();
+ presentationDisplayId = in.readInt();
+ deviceType = in.readInt();
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeString(id);
+ dest.writeString(name);
+ dest.writeString(description);
+ dest.writeInt(supportedTypes);
+ dest.writeInt(enabled ? 1 : 0);
+ dest.writeInt(statusCode);
+ dest.writeInt(playbackType);
+ dest.writeInt(playbackStream);
+ dest.writeInt(volume);
+ dest.writeInt(volumeMax);
+ dest.writeInt(volumeHandling);
+ dest.writeInt(presentationDisplayId);
+ dest.writeInt(deviceType);
+ }
+
+ @Override
+ public String toString() {
+ return "RouteInfo{ id=" + id
+ + ", name=" + name
+ + ", description=" + description
+ + ", supportedTypes=0x" + Integer.toHexString(supportedTypes)
+ + ", enabled=" + enabled
+ + ", statusCode=" + statusCode
+ + ", playbackType=" + playbackType
+ + ", playbackStream=" + playbackStream
+ + ", volume=" + volume
+ + ", volumeMax=" + volumeMax
+ + ", volumeHandling=" + volumeHandling
+ + ", presentationDisplayId=" + presentationDisplayId
+ + ", deviceType=" + deviceType
+ + " }";
+ }
+
+ @SuppressWarnings("hiding")
+ public static final Parcelable.Creator<RouteInfo> CREATOR =
+ new Parcelable.Creator<RouteInfo>() {
+ @Override
+ public RouteInfo createFromParcel(Parcel in) {
+ return new RouteInfo(in);
+ }
+
+ @Override
+ public RouteInfo[] newArray(int size) {
+ return new RouteInfo[size];
+ }
+ };
+ }
+}
diff --git a/android/media/MediaScanner.java b/android/media/MediaScanner.java
new file mode 100644
index 00000000..cb4e46fe
--- /dev/null
+++ b/android/media/MediaScanner.java
@@ -0,0 +1,1970 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.ContentProviderClient;
+import android.content.ContentResolver;
+import android.content.ContentUris;
+import android.content.ContentValues;
+import android.content.Context;
+import android.content.SharedPreferences;
+import android.database.Cursor;
+import android.database.SQLException;
+import android.drm.DrmManagerClient;
+import android.graphics.BitmapFactory;
+import android.mtp.MtpConstants;
+import android.net.Uri;
+import android.os.Build;
+import android.os.Environment;
+import android.os.RemoteException;
+import android.os.SystemProperties;
+import android.provider.MediaStore;
+import android.provider.MediaStore.Audio;
+import android.provider.MediaStore.Audio.Playlists;
+import android.provider.MediaStore.Files;
+import android.provider.MediaStore.Files.FileColumns;
+import android.provider.MediaStore.Images;
+import android.provider.MediaStore.Video;
+import android.provider.Settings;
+import android.provider.Settings.SettingNotFoundException;
+import android.sax.Element;
+import android.sax.ElementListener;
+import android.sax.RootElement;
+import android.system.ErrnoException;
+import android.system.Os;
+import android.text.TextUtils;
+import android.util.Log;
+import android.util.Xml;
+
+import dalvik.system.CloseGuard;
+
+import org.xml.sax.Attributes;
+import org.xml.sax.ContentHandler;
+import org.xml.sax.SAXException;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.text.SimpleDateFormat;
+import java.text.ParseException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.Locale;
+import java.util.TimeZone;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+/**
+ * Internal service helper that no-one should use directly.
+ *
+ * The way the scan currently works is:
+ * - The Java MediaScannerService creates a MediaScanner (this class), and calls
+ * MediaScanner.scanDirectories on it.
+ * - scanDirectories() calls the native processDirectory() for each of the specified directories.
+ * - the processDirectory() JNI method wraps the provided mediascanner client in a native
+ * 'MyMediaScannerClient' class, then calls processDirectory() on the native MediaScanner
+ * object (which got created when the Java MediaScanner was created).
+ * - native MediaScanner.processDirectory() calls
+ * doProcessDirectory(), which recurses over the folder, and calls
+ * native MyMediaScannerClient.scanFile() for every file whose extension matches.
+ * - native MyMediaScannerClient.scanFile() calls back on Java MediaScannerClient.scanFile,
+ * which calls doScanFile, which after some setup calls back down to native code, calling
+ * MediaScanner.processFile().
+ * - MediaScanner.processFile() calls one of several methods, depending on the type of the
+ * file: parseMP3, parseMP4, parseMidi, parseOgg or parseWMA.
+ * - each of these methods gets metadata key/value pairs from the file, and repeatedly
+ * calls native MyMediaScannerClient.handleStringTag, which calls back up to its Java
+ * counterparts in this file.
+ * - Java handleStringTag() gathers the key/value pairs that it's interested in.
+ * - once processFile returns and we're back in Java code in doScanFile(), it calls
+ * Java MyMediaScannerClient.endFile(), which takes all the data that's been
+ * gathered and inserts an entry in to the database.
+ *
+ * In summary:
+ * Java MediaScannerService calls
+ * Java MediaScanner scanDirectories, which calls
+ * Java MediaScanner processDirectory (native method), which calls
+ * native MediaScanner processDirectory, which calls
+ * native MyMediaScannerClient scanFile, which calls
+ * Java MyMediaScannerClient scanFile, which calls
+ * Java MediaScannerClient doScanFile, which calls
+ * Java MediaScanner processFile (native method), which calls
+ * native MediaScanner processFile, which calls
+ * native parseMP3, parseMP4, parseMidi, parseOgg or parseWMA, which calls
+ * native MyMediaScanner handleStringTag, which calls
+ * Java MyMediaScanner handleStringTag.
+ * Once MediaScanner processFile returns, an entry is inserted in to the database.
+ *
+ * The MediaScanner class is not thread-safe, so it should only be used in a single threaded manner.
+ *
+ * {@hide}
+ */
+public class MediaScanner implements AutoCloseable {
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private final static String TAG = "MediaScanner";
+
+ private static final String[] FILES_PRESCAN_PROJECTION = new String[] {
+ Files.FileColumns._ID, // 0
+ Files.FileColumns.DATA, // 1
+ Files.FileColumns.FORMAT, // 2
+ Files.FileColumns.DATE_MODIFIED, // 3
+ };
+
+ private static final String[] ID_PROJECTION = new String[] {
+ Files.FileColumns._ID,
+ };
+
+ private static final int FILES_PRESCAN_ID_COLUMN_INDEX = 0;
+ private static final int FILES_PRESCAN_PATH_COLUMN_INDEX = 1;
+ private static final int FILES_PRESCAN_FORMAT_COLUMN_INDEX = 2;
+ private static final int FILES_PRESCAN_DATE_MODIFIED_COLUMN_INDEX = 3;
+
+ private static final String[] PLAYLIST_MEMBERS_PROJECTION = new String[] {
+ Audio.Playlists.Members.PLAYLIST_ID, // 0
+ };
+
+ private static final int ID_PLAYLISTS_COLUMN_INDEX = 0;
+ private static final int PATH_PLAYLISTS_COLUMN_INDEX = 1;
+ private static final int DATE_MODIFIED_PLAYLISTS_COLUMN_INDEX = 2;
+
+ private static final String RINGTONES_DIR = "/ringtones/";
+ private static final String NOTIFICATIONS_DIR = "/notifications/";
+ private static final String ALARMS_DIR = "/alarms/";
+ private static final String MUSIC_DIR = "/music/";
+ private static final String PODCAST_DIR = "/podcasts/";
+
+ public static final String SCANNED_BUILD_PREFS_NAME = "MediaScanBuild";
+ public static final String LAST_INTERNAL_SCAN_FINGERPRINT = "lastScanFingerprint";
+ private static final String SYSTEM_SOUNDS_DIR = "/system/media/audio";
+ private static String sLastInternalScanFingerprint;
+
+ private static final String[] ID3_GENRES = {
+ // ID3v1 Genres
+ "Blues",
+ "Classic Rock",
+ "Country",
+ "Dance",
+ "Disco",
+ "Funk",
+ "Grunge",
+ "Hip-Hop",
+ "Jazz",
+ "Metal",
+ "New Age",
+ "Oldies",
+ "Other",
+ "Pop",
+ "R&B",
+ "Rap",
+ "Reggae",
+ "Rock",
+ "Techno",
+ "Industrial",
+ "Alternative",
+ "Ska",
+ "Death Metal",
+ "Pranks",
+ "Soundtrack",
+ "Euro-Techno",
+ "Ambient",
+ "Trip-Hop",
+ "Vocal",
+ "Jazz+Funk",
+ "Fusion",
+ "Trance",
+ "Classical",
+ "Instrumental",
+ "Acid",
+ "House",
+ "Game",
+ "Sound Clip",
+ "Gospel",
+ "Noise",
+ "AlternRock",
+ "Bass",
+ "Soul",
+ "Punk",
+ "Space",
+ "Meditative",
+ "Instrumental Pop",
+ "Instrumental Rock",
+ "Ethnic",
+ "Gothic",
+ "Darkwave",
+ "Techno-Industrial",
+ "Electronic",
+ "Pop-Folk",
+ "Eurodance",
+ "Dream",
+ "Southern Rock",
+ "Comedy",
+ "Cult",
+ "Gangsta",
+ "Top 40",
+ "Christian Rap",
+ "Pop/Funk",
+ "Jungle",
+ "Native American",
+ "Cabaret",
+ "New Wave",
+ "Psychadelic",
+ "Rave",
+ "Showtunes",
+ "Trailer",
+ "Lo-Fi",
+ "Tribal",
+ "Acid Punk",
+ "Acid Jazz",
+ "Polka",
+ "Retro",
+ "Musical",
+ "Rock & Roll",
+ "Hard Rock",
+ // The following genres are Winamp extensions
+ "Folk",
+ "Folk-Rock",
+ "National Folk",
+ "Swing",
+ "Fast Fusion",
+ "Bebob",
+ "Latin",
+ "Revival",
+ "Celtic",
+ "Bluegrass",
+ "Avantgarde",
+ "Gothic Rock",
+ "Progressive Rock",
+ "Psychedelic Rock",
+ "Symphonic Rock",
+ "Slow Rock",
+ "Big Band",
+ "Chorus",
+ "Easy Listening",
+ "Acoustic",
+ "Humour",
+ "Speech",
+ "Chanson",
+ "Opera",
+ "Chamber Music",
+ "Sonata",
+ "Symphony",
+ "Booty Bass",
+ "Primus",
+ "Porn Groove",
+ "Satire",
+ "Slow Jam",
+ "Club",
+ "Tango",
+ "Samba",
+ "Folklore",
+ "Ballad",
+ "Power Ballad",
+ "Rhythmic Soul",
+ "Freestyle",
+ "Duet",
+ "Punk Rock",
+ "Drum Solo",
+ "A capella",
+ "Euro-House",
+ "Dance Hall",
+ // The following ones seem to be fairly widely supported as well
+ "Goa",
+ "Drum & Bass",
+ "Club-House",
+ "Hardcore",
+ "Terror",
+ "Indie",
+ "Britpop",
+ null,
+ "Polsk Punk",
+ "Beat",
+ "Christian Gangsta",
+ "Heavy Metal",
+ "Black Metal",
+ "Crossover",
+ "Contemporary Christian",
+ "Christian Rock",
+ "Merengue",
+ "Salsa",
+ "Thrash Metal",
+ "Anime",
+ "JPop",
+ "Synthpop",
+ // 148 and up don't seem to have been defined yet.
+ };
+
+ private long mNativeContext;
+ private final Context mContext;
+ private final String mPackageName;
+ private final String mVolumeName;
+ private final ContentProviderClient mMediaProvider;
+ private final Uri mAudioUri;
+ private final Uri mVideoUri;
+ private final Uri mImagesUri;
+ private final Uri mThumbsUri;
+ private final Uri mPlaylistsUri;
+ private final Uri mFilesUri;
+ private final Uri mFilesUriNoNotify;
+ private final boolean mProcessPlaylists;
+ private final boolean mProcessGenres;
+ private int mMtpObjectHandle;
+
+ private final AtomicBoolean mClosed = new AtomicBoolean();
+ private final CloseGuard mCloseGuard = CloseGuard.get();
+
+ /** whether to use bulk inserts or individual inserts for each item */
+ private static final boolean ENABLE_BULK_INSERTS = true;
+
+ // used when scanning the image database so we know whether we have to prune
+ // old thumbnail files
+ private int mOriginalCount;
+ /** Whether the scanner has set a default sound for the ringer ringtone. */
+ private boolean mDefaultRingtoneSet;
+ /** Whether the scanner has set a default sound for the notification ringtone. */
+ private boolean mDefaultNotificationSet;
+ /** Whether the scanner has set a default sound for the alarm ringtone. */
+ private boolean mDefaultAlarmSet;
+ /** The filename for the default sound for the ringer ringtone. */
+ private String mDefaultRingtoneFilename;
+ /** The filename for the default sound for the notification ringtone. */
+ private String mDefaultNotificationFilename;
+ /** The filename for the default sound for the alarm ringtone. */
+ private String mDefaultAlarmAlertFilename;
+ /**
+ * The prefix for system properties that define the default sound for
+ * ringtones. Concatenate the name of the setting from Settings
+ * to get the full system property.
+ */
+ private static final String DEFAULT_RINGTONE_PROPERTY_PREFIX = "ro.config.";
+
+ private final BitmapFactory.Options mBitmapOptions = new BitmapFactory.Options();
+
+ private static class FileEntry {
+ long mRowId;
+ String mPath;
+ long mLastModified;
+ int mFormat;
+ boolean mLastModifiedChanged;
+
+ FileEntry(long rowId, String path, long lastModified, int format) {
+ mRowId = rowId;
+ mPath = path;
+ mLastModified = lastModified;
+ mFormat = format;
+ mLastModifiedChanged = false;
+ }
+
+ @Override
+ public String toString() {
+ return mPath + " mRowId: " + mRowId;
+ }
+ }
+
+ private static class PlaylistEntry {
+ String path;
+ long bestmatchid;
+ int bestmatchlevel;
+ }
+
+ private final ArrayList<PlaylistEntry> mPlaylistEntries = new ArrayList<>();
+ private final ArrayList<FileEntry> mPlayLists = new ArrayList<>();
+
+ private MediaInserter mMediaInserter;
+
+ private DrmManagerClient mDrmManagerClient = null;
+
+ public MediaScanner(Context c, String volumeName) {
+ native_setup();
+ mContext = c;
+ mPackageName = c.getPackageName();
+ mVolumeName = volumeName;
+
+ mBitmapOptions.inSampleSize = 1;
+ mBitmapOptions.inJustDecodeBounds = true;
+
+ setDefaultRingtoneFileNames();
+
+ mMediaProvider = mContext.getContentResolver()
+ .acquireContentProviderClient(MediaStore.AUTHORITY);
+
+ if (sLastInternalScanFingerprint == null) {
+ final SharedPreferences scanSettings =
+ mContext.getSharedPreferences(SCANNED_BUILD_PREFS_NAME, Context.MODE_PRIVATE);
+ sLastInternalScanFingerprint =
+ scanSettings.getString(LAST_INTERNAL_SCAN_FINGERPRINT, new String());
+ }
+
+ mAudioUri = Audio.Media.getContentUri(volumeName);
+ mVideoUri = Video.Media.getContentUri(volumeName);
+ mImagesUri = Images.Media.getContentUri(volumeName);
+ mThumbsUri = Images.Thumbnails.getContentUri(volumeName);
+ mFilesUri = Files.getContentUri(volumeName);
+ mFilesUriNoNotify = mFilesUri.buildUpon().appendQueryParameter("nonotify", "1").build();
+
+ if (!volumeName.equals("internal")) {
+ // we only support playlists on external media
+ mProcessPlaylists = true;
+ mProcessGenres = true;
+ mPlaylistsUri = Playlists.getContentUri(volumeName);
+ } else {
+ mProcessPlaylists = false;
+ mProcessGenres = false;
+ mPlaylistsUri = null;
+ }
+
+ final Locale locale = mContext.getResources().getConfiguration().locale;
+ if (locale != null) {
+ String language = locale.getLanguage();
+ String country = locale.getCountry();
+ if (language != null) {
+ if (country != null) {
+ setLocale(language + "_" + country);
+ } else {
+ setLocale(language);
+ }
+ }
+ }
+
+ mCloseGuard.open("close");
+ }
+
+ private void setDefaultRingtoneFileNames() {
+ mDefaultRingtoneFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX
+ + Settings.System.RINGTONE);
+ mDefaultNotificationFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX
+ + Settings.System.NOTIFICATION_SOUND);
+ mDefaultAlarmAlertFilename = SystemProperties.get(DEFAULT_RINGTONE_PROPERTY_PREFIX
+ + Settings.System.ALARM_ALERT);
+ }
+
+ private final MyMediaScannerClient mClient = new MyMediaScannerClient();
+
+ private boolean isDrmEnabled() {
+ String prop = SystemProperties.get("drm.service.enabled");
+ return prop != null && prop.equals("true");
+ }
+
+ private class MyMediaScannerClient implements MediaScannerClient {
+
+ private final SimpleDateFormat mDateFormatter;
+
+ private String mArtist;
+ private String mAlbumArtist; // use this if mArtist is missing
+ private String mAlbum;
+ private String mTitle;
+ private String mComposer;
+ private String mGenre;
+ private String mMimeType;
+ private int mFileType;
+ private int mTrack;
+ private int mYear;
+ private int mDuration;
+ private String mPath;
+ private long mDate;
+ private long mLastModified;
+ private long mFileSize;
+ private String mWriter;
+ private int mCompilation;
+ private boolean mIsDrm;
+ private boolean mNoMedia; // flag to suppress file from appearing in media tables
+ private int mWidth;
+ private int mHeight;
+
+ public MyMediaScannerClient() {
+ mDateFormatter = new SimpleDateFormat("yyyyMMdd'T'HHmmss");
+ mDateFormatter.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+
+ public FileEntry beginFile(String path, String mimeType, long lastModified,
+ long fileSize, boolean isDirectory, boolean noMedia) {
+ mMimeType = mimeType;
+ mFileType = 0;
+ mFileSize = fileSize;
+ mIsDrm = false;
+
+ if (!isDirectory) {
+ if (!noMedia && isNoMediaFile(path)) {
+ noMedia = true;
+ }
+ mNoMedia = noMedia;
+
+ // try mimeType first, if it is specified
+ if (mimeType != null) {
+ mFileType = MediaFile.getFileTypeForMimeType(mimeType);
+ }
+
+ // if mimeType was not specified, compute file type based on file extension.
+ if (mFileType == 0) {
+ MediaFile.MediaFileType mediaFileType = MediaFile.getFileType(path);
+ if (mediaFileType != null) {
+ mFileType = mediaFileType.fileType;
+ if (mMimeType == null) {
+ mMimeType = mediaFileType.mimeType;
+ }
+ }
+ }
+
+ if (isDrmEnabled() && MediaFile.isDrmFileType(mFileType)) {
+ mFileType = getFileTypeFromDrm(path);
+ }
+ }
+
+ FileEntry entry = makeEntryFor(path);
+ // add some slack to avoid a rounding error
+ long delta = (entry != null) ? (lastModified - entry.mLastModified) : 0;
+ boolean wasModified = delta > 1 || delta < -1;
+ if (entry == null || wasModified) {
+ if (wasModified) {
+ entry.mLastModified = lastModified;
+ } else {
+ entry = new FileEntry(0, path, lastModified,
+ (isDirectory ? MtpConstants.FORMAT_ASSOCIATION : 0));
+ }
+ entry.mLastModifiedChanged = true;
+ }
+
+ if (mProcessPlaylists && MediaFile.isPlayListFileType(mFileType)) {
+ mPlayLists.add(entry);
+ // we don't process playlists in the main scan, so return null
+ return null;
+ }
+
+ // clear all the metadata
+ mArtist = null;
+ mAlbumArtist = null;
+ mAlbum = null;
+ mTitle = null;
+ mComposer = null;
+ mGenre = null;
+ mTrack = 0;
+ mYear = 0;
+ mDuration = 0;
+ mPath = path;
+ mDate = 0;
+ mLastModified = lastModified;
+ mWriter = null;
+ mCompilation = 0;
+ mWidth = 0;
+ mHeight = 0;
+
+ return entry;
+ }
+
+ @Override
+ public void scanFile(String path, long lastModified, long fileSize,
+ boolean isDirectory, boolean noMedia) {
+ // This is the callback funtion from native codes.
+ // Log.v(TAG, "scanFile: "+path);
+ doScanFile(path, null, lastModified, fileSize, isDirectory, false, noMedia);
+ }
+
+ public Uri doScanFile(String path, String mimeType, long lastModified,
+ long fileSize, boolean isDirectory, boolean scanAlways, boolean noMedia) {
+ Uri result = null;
+// long t1 = System.currentTimeMillis();
+ try {
+ FileEntry entry = beginFile(path, mimeType, lastModified,
+ fileSize, isDirectory, noMedia);
+
+ if (entry == null) {
+ return null;
+ }
+
+ // if this file was just inserted via mtp, set the rowid to zero
+ // (even though it already exists in the database), to trigger
+ // the correct code path for updating its entry
+ if (mMtpObjectHandle != 0) {
+ entry.mRowId = 0;
+ }
+
+ if (entry.mPath != null) {
+ if (((!mDefaultNotificationSet &&
+ doesPathHaveFilename(entry.mPath, mDefaultNotificationFilename))
+ || (!mDefaultRingtoneSet &&
+ doesPathHaveFilename(entry.mPath, mDefaultRingtoneFilename))
+ || (!mDefaultAlarmSet &&
+ doesPathHaveFilename(entry.mPath, mDefaultAlarmAlertFilename)))) {
+ Log.w(TAG, "forcing rescan of " + entry.mPath +
+ "since ringtone setting didn't finish");
+ scanAlways = true;
+ } else if (isSystemSoundWithMetadata(entry.mPath)
+ && !Build.FINGERPRINT.equals(sLastInternalScanFingerprint)) {
+ // file is located on the system partition where the date cannot be trusted:
+ // rescan if the build fingerprint has changed since the last scan.
+ Log.i(TAG, "forcing rescan of " + entry.mPath
+ + " since build fingerprint changed");
+ scanAlways = true;
+ }
+ }
+
+ // rescan for metadata if file was modified since last scan
+ if (entry != null && (entry.mLastModifiedChanged || scanAlways)) {
+ if (noMedia) {
+ result = endFile(entry, false, false, false, false, false);
+ } else {
+ String lowpath = path.toLowerCase(Locale.ROOT);
+ boolean ringtones = (lowpath.indexOf(RINGTONES_DIR) > 0);
+ boolean notifications = (lowpath.indexOf(NOTIFICATIONS_DIR) > 0);
+ boolean alarms = (lowpath.indexOf(ALARMS_DIR) > 0);
+ boolean podcasts = (lowpath.indexOf(PODCAST_DIR) > 0);
+ boolean music = (lowpath.indexOf(MUSIC_DIR) > 0) ||
+ (!ringtones && !notifications && !alarms && !podcasts);
+
+ boolean isaudio = MediaFile.isAudioFileType(mFileType);
+ boolean isvideo = MediaFile.isVideoFileType(mFileType);
+ boolean isimage = MediaFile.isImageFileType(mFileType);
+
+ if (isaudio || isvideo || isimage) {
+ path = Environment.maybeTranslateEmulatedPathToInternal(new File(path))
+ .getAbsolutePath();
+ }
+
+ // we only extract metadata for audio and video files
+ if (isaudio || isvideo) {
+ processFile(path, mimeType, this);
+ }
+
+ if (isimage) {
+ processImageFile(path);
+ }
+
+ result = endFile(entry, ringtones, notifications, alarms, music, podcasts);
+ }
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MediaScanner.scanFile()", e);
+ }
+// long t2 = System.currentTimeMillis();
+// Log.v(TAG, "scanFile: " + path + " took " + (t2-t1));
+ return result;
+ }
+
+ private long parseDate(String date) {
+ try {
+ return mDateFormatter.parse(date).getTime();
+ } catch (ParseException e) {
+ return 0;
+ }
+ }
+
+ private int parseSubstring(String s, int start, int defaultValue) {
+ int length = s.length();
+ if (start == length) return defaultValue;
+
+ char ch = s.charAt(start++);
+ // return defaultValue if we have no integer at all
+ if (ch < '0' || ch > '9') return defaultValue;
+
+ int result = ch - '0';
+ while (start < length) {
+ ch = s.charAt(start++);
+ if (ch < '0' || ch > '9') return result;
+ result = result * 10 + (ch - '0');
+ }
+
+ return result;
+ }
+
+ public void handleStringTag(String name, String value) {
+ if (name.equalsIgnoreCase("title") || name.startsWith("title;")) {
+ // Don't trim() here, to preserve the special \001 character
+ // used to force sorting. The media provider will trim() before
+ // inserting the title in to the database.
+ mTitle = value;
+ } else if (name.equalsIgnoreCase("artist") || name.startsWith("artist;")) {
+ mArtist = value.trim();
+ } else if (name.equalsIgnoreCase("albumartist") || name.startsWith("albumartist;")
+ || name.equalsIgnoreCase("band") || name.startsWith("band;")) {
+ mAlbumArtist = value.trim();
+ } else if (name.equalsIgnoreCase("album") || name.startsWith("album;")) {
+ mAlbum = value.trim();
+ } else if (name.equalsIgnoreCase("composer") || name.startsWith("composer;")) {
+ mComposer = value.trim();
+ } else if (mProcessGenres &&
+ (name.equalsIgnoreCase("genre") || name.startsWith("genre;"))) {
+ mGenre = getGenreName(value);
+ } else if (name.equalsIgnoreCase("year") || name.startsWith("year;")) {
+ mYear = parseSubstring(value, 0, 0);
+ } else if (name.equalsIgnoreCase("tracknumber") || name.startsWith("tracknumber;")) {
+ // track number might be of the form "2/12"
+ // we just read the number before the slash
+ int num = parseSubstring(value, 0, 0);
+ mTrack = (mTrack / 1000) * 1000 + num;
+ } else if (name.equalsIgnoreCase("discnumber") ||
+ name.equals("set") || name.startsWith("set;")) {
+ // set number might be of the form "1/3"
+ // we just read the number before the slash
+ int num = parseSubstring(value, 0, 0);
+ mTrack = (num * 1000) + (mTrack % 1000);
+ } else if (name.equalsIgnoreCase("duration")) {
+ mDuration = parseSubstring(value, 0, 0);
+ } else if (name.equalsIgnoreCase("writer") || name.startsWith("writer;")) {
+ mWriter = value.trim();
+ } else if (name.equalsIgnoreCase("compilation")) {
+ mCompilation = parseSubstring(value, 0, 0);
+ } else if (name.equalsIgnoreCase("isdrm")) {
+ mIsDrm = (parseSubstring(value, 0, 0) == 1);
+ } else if (name.equalsIgnoreCase("date")) {
+ mDate = parseDate(value);
+ } else if (name.equalsIgnoreCase("width")) {
+ mWidth = parseSubstring(value, 0, 0);
+ } else if (name.equalsIgnoreCase("height")) {
+ mHeight = parseSubstring(value, 0, 0);
+ } else {
+ //Log.v(TAG, "unknown tag: " + name + " (" + mProcessGenres + ")");
+ }
+ }
+
+ private boolean convertGenreCode(String input, String expected) {
+ String output = getGenreName(input);
+ if (output.equals(expected)) {
+ return true;
+ } else {
+ Log.d(TAG, "'" + input + "' -> '" + output + "', expected '" + expected + "'");
+ return false;
+ }
+ }
+ private void testGenreNameConverter() {
+ convertGenreCode("2", "Country");
+ convertGenreCode("(2)", "Country");
+ convertGenreCode("(2", "(2");
+ convertGenreCode("2 Foo", "Country");
+ convertGenreCode("(2) Foo", "Country");
+ convertGenreCode("(2 Foo", "(2 Foo");
+ convertGenreCode("2Foo", "2Foo");
+ convertGenreCode("(2)Foo", "Country");
+ convertGenreCode("200 Foo", "Foo");
+ convertGenreCode("(200) Foo", "Foo");
+ convertGenreCode("200Foo", "200Foo");
+ convertGenreCode("(200)Foo", "Foo");
+ convertGenreCode("200)Foo", "200)Foo");
+ convertGenreCode("200) Foo", "200) Foo");
+ }
+
+ public String getGenreName(String genreTagValue) {
+
+ if (genreTagValue == null) {
+ return null;
+ }
+ final int length = genreTagValue.length();
+
+ if (length > 0) {
+ boolean parenthesized = false;
+ StringBuffer number = new StringBuffer();
+ int i = 0;
+ for (; i < length; ++i) {
+ char c = genreTagValue.charAt(i);
+ if (i == 0 && c == '(') {
+ parenthesized = true;
+ } else if (Character.isDigit(c)) {
+ number.append(c);
+ } else {
+ break;
+ }
+ }
+ char charAfterNumber = i < length ? genreTagValue.charAt(i) : ' ';
+ if ((parenthesized && charAfterNumber == ')')
+ || !parenthesized && Character.isWhitespace(charAfterNumber)) {
+ try {
+ short genreIndex = Short.parseShort(number.toString());
+ if (genreIndex >= 0) {
+ if (genreIndex < ID3_GENRES.length && ID3_GENRES[genreIndex] != null) {
+ return ID3_GENRES[genreIndex];
+ } else if (genreIndex == 0xFF) {
+ return null;
+ } else if (genreIndex < 0xFF && (i + 1) < length) {
+ // genre is valid but unknown,
+ // if there is a string after the value we take it
+ if (parenthesized && charAfterNumber == ')') {
+ i++;
+ }
+ String ret = genreTagValue.substring(i).trim();
+ if (ret.length() != 0) {
+ return ret;
+ }
+ } else {
+ // else return the number, without parentheses
+ return number.toString();
+ }
+ }
+ } catch (NumberFormatException e) {
+ }
+ }
+ }
+
+ return genreTagValue;
+ }
+
+ private void processImageFile(String path) {
+ try {
+ mBitmapOptions.outWidth = 0;
+ mBitmapOptions.outHeight = 0;
+ BitmapFactory.decodeFile(path, mBitmapOptions);
+ mWidth = mBitmapOptions.outWidth;
+ mHeight = mBitmapOptions.outHeight;
+ } catch (Throwable th) {
+ // ignore;
+ }
+ }
+
+ public void setMimeType(String mimeType) {
+ if ("audio/mp4".equals(mMimeType) &&
+ mimeType.startsWith("video")) {
+ // for feature parity with Donut, we force m4a files to keep the
+ // audio/mp4 mimetype, even if they are really "enhanced podcasts"
+ // with a video track
+ return;
+ }
+ mMimeType = mimeType;
+ mFileType = MediaFile.getFileTypeForMimeType(mimeType);
+ }
+
+ /**
+ * Formats the data into a values array suitable for use with the Media
+ * Content Provider.
+ *
+ * @return a map of values
+ */
+ private ContentValues toValues() {
+ ContentValues map = new ContentValues();
+
+ map.put(MediaStore.MediaColumns.DATA, mPath);
+ map.put(MediaStore.MediaColumns.TITLE, mTitle);
+ map.put(MediaStore.MediaColumns.DATE_MODIFIED, mLastModified);
+ map.put(MediaStore.MediaColumns.SIZE, mFileSize);
+ map.put(MediaStore.MediaColumns.MIME_TYPE, mMimeType);
+ map.put(MediaStore.MediaColumns.IS_DRM, mIsDrm);
+
+ String resolution = null;
+ if (mWidth > 0 && mHeight > 0) {
+ map.put(MediaStore.MediaColumns.WIDTH, mWidth);
+ map.put(MediaStore.MediaColumns.HEIGHT, mHeight);
+ resolution = mWidth + "x" + mHeight;
+ }
+
+ if (!mNoMedia) {
+ if (MediaFile.isVideoFileType(mFileType)) {
+ map.put(Video.Media.ARTIST, (mArtist != null && mArtist.length() > 0
+ ? mArtist : MediaStore.UNKNOWN_STRING));
+ map.put(Video.Media.ALBUM, (mAlbum != null && mAlbum.length() > 0
+ ? mAlbum : MediaStore.UNKNOWN_STRING));
+ map.put(Video.Media.DURATION, mDuration);
+ if (resolution != null) {
+ map.put(Video.Media.RESOLUTION, resolution);
+ }
+ if (mDate > 0) {
+ map.put(Video.Media.DATE_TAKEN, mDate);
+ }
+ } else if (MediaFile.isImageFileType(mFileType)) {
+ // FIXME - add DESCRIPTION
+ } else if (MediaFile.isAudioFileType(mFileType)) {
+ map.put(Audio.Media.ARTIST, (mArtist != null && mArtist.length() > 0) ?
+ mArtist : MediaStore.UNKNOWN_STRING);
+ map.put(Audio.Media.ALBUM_ARTIST, (mAlbumArtist != null &&
+ mAlbumArtist.length() > 0) ? mAlbumArtist : null);
+ map.put(Audio.Media.ALBUM, (mAlbum != null && mAlbum.length() > 0) ?
+ mAlbum : MediaStore.UNKNOWN_STRING);
+ map.put(Audio.Media.COMPOSER, mComposer);
+ map.put(Audio.Media.GENRE, mGenre);
+ if (mYear != 0) {
+ map.put(Audio.Media.YEAR, mYear);
+ }
+ map.put(Audio.Media.TRACK, mTrack);
+ map.put(Audio.Media.DURATION, mDuration);
+ map.put(Audio.Media.COMPILATION, mCompilation);
+ }
+ }
+ return map;
+ }
+
+ private Uri endFile(FileEntry entry, boolean ringtones, boolean notifications,
+ boolean alarms, boolean music, boolean podcasts)
+ throws RemoteException {
+ // update database
+
+ // use album artist if artist is missing
+ if (mArtist == null || mArtist.length() == 0) {
+ mArtist = mAlbumArtist;
+ }
+
+ ContentValues values = toValues();
+ String title = values.getAsString(MediaStore.MediaColumns.TITLE);
+ if (title == null || TextUtils.isEmpty(title.trim())) {
+ title = MediaFile.getFileTitle(values.getAsString(MediaStore.MediaColumns.DATA));
+ values.put(MediaStore.MediaColumns.TITLE, title);
+ }
+ String album = values.getAsString(Audio.Media.ALBUM);
+ if (MediaStore.UNKNOWN_STRING.equals(album)) {
+ album = values.getAsString(MediaStore.MediaColumns.DATA);
+ // extract last path segment before file name
+ int lastSlash = album.lastIndexOf('/');
+ if (lastSlash >= 0) {
+ int previousSlash = 0;
+ while (true) {
+ int idx = album.indexOf('/', previousSlash + 1);
+ if (idx < 0 || idx >= lastSlash) {
+ break;
+ }
+ previousSlash = idx;
+ }
+ if (previousSlash != 0) {
+ album = album.substring(previousSlash + 1, lastSlash);
+ values.put(Audio.Media.ALBUM, album);
+ }
+ }
+ }
+ long rowId = entry.mRowId;
+ if (MediaFile.isAudioFileType(mFileType) && (rowId == 0 || mMtpObjectHandle != 0)) {
+ // Only set these for new entries. For existing entries, they
+ // may have been modified later, and we want to keep the current
+ // values so that custom ringtones still show up in the ringtone
+ // picker.
+ values.put(Audio.Media.IS_RINGTONE, ringtones);
+ values.put(Audio.Media.IS_NOTIFICATION, notifications);
+ values.put(Audio.Media.IS_ALARM, alarms);
+ values.put(Audio.Media.IS_MUSIC, music);
+ values.put(Audio.Media.IS_PODCAST, podcasts);
+ } else if ((mFileType == MediaFile.FILE_TYPE_JPEG
+ || MediaFile.isRawImageFileType(mFileType)) && !mNoMedia) {
+ ExifInterface exif = null;
+ try {
+ exif = new ExifInterface(entry.mPath);
+ } catch (IOException ex) {
+ // exif is null
+ }
+ if (exif != null) {
+ float[] latlng = new float[2];
+ if (exif.getLatLong(latlng)) {
+ values.put(Images.Media.LATITUDE, latlng[0]);
+ values.put(Images.Media.LONGITUDE, latlng[1]);
+ }
+
+ long time = exif.getGpsDateTime();
+ if (time != -1) {
+ values.put(Images.Media.DATE_TAKEN, time);
+ } else {
+ // If no time zone information is available, we should consider using
+ // EXIF local time as taken time if the difference between file time
+ // and EXIF local time is not less than 1 Day, otherwise MediaProvider
+ // will use file time as taken time.
+ time = exif.getDateTime();
+ if (time != -1 && Math.abs(mLastModified * 1000 - time) >= 86400000) {
+ values.put(Images.Media.DATE_TAKEN, time);
+ }
+ }
+
+ int orientation = exif.getAttributeInt(
+ ExifInterface.TAG_ORIENTATION, -1);
+ if (orientation != -1) {
+ // We only recognize a subset of orientation tag values.
+ int degree;
+ switch(orientation) {
+ case ExifInterface.ORIENTATION_ROTATE_90:
+ degree = 90;
+ break;
+ case ExifInterface.ORIENTATION_ROTATE_180:
+ degree = 180;
+ break;
+ case ExifInterface.ORIENTATION_ROTATE_270:
+ degree = 270;
+ break;
+ default:
+ degree = 0;
+ break;
+ }
+ values.put(Images.Media.ORIENTATION, degree);
+ }
+ }
+ }
+
+ Uri tableUri = mFilesUri;
+ MediaInserter inserter = mMediaInserter;
+ if (!mNoMedia) {
+ if (MediaFile.isVideoFileType(mFileType)) {
+ tableUri = mVideoUri;
+ } else if (MediaFile.isImageFileType(mFileType)) {
+ tableUri = mImagesUri;
+ } else if (MediaFile.isAudioFileType(mFileType)) {
+ tableUri = mAudioUri;
+ }
+ }
+ Uri result = null;
+ boolean needToSetSettings = false;
+ // Setting a flag in order not to use bulk insert for the file related with
+ // notifications, ringtones, and alarms, because the rowId of the inserted file is
+ // needed.
+ if (notifications && !mDefaultNotificationSet) {
+ if (TextUtils.isEmpty(mDefaultNotificationFilename) ||
+ doesPathHaveFilename(entry.mPath, mDefaultNotificationFilename)) {
+ needToSetSettings = true;
+ }
+ } else if (ringtones && !mDefaultRingtoneSet) {
+ if (TextUtils.isEmpty(mDefaultRingtoneFilename) ||
+ doesPathHaveFilename(entry.mPath, mDefaultRingtoneFilename)) {
+ needToSetSettings = true;
+ }
+ } else if (alarms && !mDefaultAlarmSet) {
+ if (TextUtils.isEmpty(mDefaultAlarmAlertFilename) ||
+ doesPathHaveFilename(entry.mPath, mDefaultAlarmAlertFilename)) {
+ needToSetSettings = true;
+ }
+ }
+
+ if (rowId == 0) {
+ if (mMtpObjectHandle != 0) {
+ values.put(MediaStore.MediaColumns.MEDIA_SCANNER_NEW_OBJECT_ID, mMtpObjectHandle);
+ }
+ if (tableUri == mFilesUri) {
+ int format = entry.mFormat;
+ if (format == 0) {
+ format = MediaFile.getFormatCode(entry.mPath, mMimeType);
+ }
+ values.put(Files.FileColumns.FORMAT, format);
+ }
+ // New file, insert it.
+ // Directories need to be inserted before the files they contain, so they
+ // get priority when bulk inserting.
+ // If the rowId of the inserted file is needed, it gets inserted immediately,
+ // bypassing the bulk inserter.
+ if (inserter == null || needToSetSettings) {
+ if (inserter != null) {
+ inserter.flushAll();
+ }
+ result = mMediaProvider.insert(tableUri, values);
+ } else if (entry.mFormat == MtpConstants.FORMAT_ASSOCIATION) {
+ inserter.insertwithPriority(tableUri, values);
+ } else {
+ inserter.insert(tableUri, values);
+ }
+
+ if (result != null) {
+ rowId = ContentUris.parseId(result);
+ entry.mRowId = rowId;
+ }
+ } else {
+ // updated file
+ result = ContentUris.withAppendedId(tableUri, rowId);
+ // path should never change, and we want to avoid replacing mixed cased paths
+ // with squashed lower case paths
+ values.remove(MediaStore.MediaColumns.DATA);
+
+ int mediaType = 0;
+ if (!MediaScanner.isNoMediaPath(entry.mPath)) {
+ int fileType = MediaFile.getFileTypeForMimeType(mMimeType);
+ if (MediaFile.isAudioFileType(fileType)) {
+ mediaType = FileColumns.MEDIA_TYPE_AUDIO;
+ } else if (MediaFile.isVideoFileType(fileType)) {
+ mediaType = FileColumns.MEDIA_TYPE_VIDEO;
+ } else if (MediaFile.isImageFileType(fileType)) {
+ mediaType = FileColumns.MEDIA_TYPE_IMAGE;
+ } else if (MediaFile.isPlayListFileType(fileType)) {
+ mediaType = FileColumns.MEDIA_TYPE_PLAYLIST;
+ }
+ values.put(FileColumns.MEDIA_TYPE, mediaType);
+ }
+ mMediaProvider.update(result, values, null, null);
+ }
+
+ if(needToSetSettings) {
+ if (notifications) {
+ setRingtoneIfNotSet(Settings.System.NOTIFICATION_SOUND, tableUri, rowId);
+ mDefaultNotificationSet = true;
+ } else if (ringtones) {
+ setRingtoneIfNotSet(Settings.System.RINGTONE, tableUri, rowId);
+ mDefaultRingtoneSet = true;
+ } else if (alarms) {
+ setRingtoneIfNotSet(Settings.System.ALARM_ALERT, tableUri, rowId);
+ mDefaultAlarmSet = true;
+ }
+ }
+
+ return result;
+ }
+
+ private boolean doesPathHaveFilename(String path, String filename) {
+ int pathFilenameStart = path.lastIndexOf(File.separatorChar) + 1;
+ int filenameLength = filename.length();
+ return path.regionMatches(pathFilenameStart, filename, 0, filenameLength) &&
+ pathFilenameStart + filenameLength == path.length();
+ }
+
+ private void setRingtoneIfNotSet(String settingName, Uri uri, long rowId) {
+ if (wasRingtoneAlreadySet(settingName)) {
+ return;
+ }
+
+ ContentResolver cr = mContext.getContentResolver();
+ String existingSettingValue = Settings.System.getString(cr, settingName);
+ if (TextUtils.isEmpty(existingSettingValue)) {
+ final Uri settingUri = Settings.System.getUriFor(settingName);
+ final Uri ringtoneUri = ContentUris.withAppendedId(uri, rowId);
+ RingtoneManager.setActualDefaultRingtoneUri(mContext,
+ RingtoneManager.getDefaultType(settingUri), ringtoneUri);
+ }
+ Settings.System.putInt(cr, settingSetIndicatorName(settingName), 1);
+ }
+
+ private int getFileTypeFromDrm(String path) {
+ if (!isDrmEnabled()) {
+ return 0;
+ }
+
+ int resultFileType = 0;
+
+ if (mDrmManagerClient == null) {
+ mDrmManagerClient = new DrmManagerClient(mContext);
+ }
+
+ if (mDrmManagerClient.canHandle(path, null)) {
+ mIsDrm = true;
+ String drmMimetype = mDrmManagerClient.getOriginalMimeType(path);
+ if (drmMimetype != null) {
+ mMimeType = drmMimetype;
+ resultFileType = MediaFile.getFileTypeForMimeType(drmMimetype);
+ }
+ }
+ return resultFileType;
+ }
+
+ }; // end of anonymous MediaScannerClient instance
+
+ private static boolean isSystemSoundWithMetadata(String path) {
+ if (path.startsWith(SYSTEM_SOUNDS_DIR + ALARMS_DIR)
+ || path.startsWith(SYSTEM_SOUNDS_DIR + RINGTONES_DIR)
+ || path.startsWith(SYSTEM_SOUNDS_DIR + NOTIFICATIONS_DIR)) {
+ return true;
+ }
+ return false;
+ }
+
+ private String settingSetIndicatorName(String base) {
+ return base + "_set";
+ }
+
+ private boolean wasRingtoneAlreadySet(String name) {
+ ContentResolver cr = mContext.getContentResolver();
+ String indicatorName = settingSetIndicatorName(name);
+ try {
+ return Settings.System.getInt(cr, indicatorName) != 0;
+ } catch (SettingNotFoundException e) {
+ return false;
+ }
+ }
+
+ private void prescan(String filePath, boolean prescanFiles) throws RemoteException {
+ Cursor c = null;
+ String where = null;
+ String[] selectionArgs = null;
+
+ mPlayLists.clear();
+
+ if (filePath != null) {
+ // query for only one file
+ where = MediaStore.Files.FileColumns._ID + ">?" +
+ " AND " + Files.FileColumns.DATA + "=?";
+ selectionArgs = new String[] { "", filePath };
+ } else {
+ where = MediaStore.Files.FileColumns._ID + ">?";
+ selectionArgs = new String[] { "" };
+ }
+
+ mDefaultRingtoneSet = wasRingtoneAlreadySet(Settings.System.RINGTONE);
+ mDefaultNotificationSet = wasRingtoneAlreadySet(Settings.System.NOTIFICATION_SOUND);
+ mDefaultAlarmSet = wasRingtoneAlreadySet(Settings.System.ALARM_ALERT);
+
+ // Tell the provider to not delete the file.
+ // If the file is truly gone the delete is unnecessary, and we want to avoid
+ // accidentally deleting files that are really there (this may happen if the
+ // filesystem is mounted and unmounted while the scanner is running).
+ Uri.Builder builder = mFilesUri.buildUpon();
+ builder.appendQueryParameter(MediaStore.PARAM_DELETE_DATA, "false");
+ MediaBulkDeleter deleter = new MediaBulkDeleter(mMediaProvider, builder.build());
+
+ // Build the list of files from the content provider
+ try {
+ if (prescanFiles) {
+ // First read existing files from the files table.
+ // Because we'll be deleting entries for missing files as we go,
+ // we need to query the database in small batches, to avoid problems
+ // with CursorWindow positioning.
+ long lastId = Long.MIN_VALUE;
+ Uri limitUri = mFilesUri.buildUpon().appendQueryParameter("limit", "1000").build();
+
+ while (true) {
+ selectionArgs[0] = "" + lastId;
+ if (c != null) {
+ c.close();
+ c = null;
+ }
+ c = mMediaProvider.query(limitUri, FILES_PRESCAN_PROJECTION,
+ where, selectionArgs, MediaStore.Files.FileColumns._ID, null);
+ if (c == null) {
+ break;
+ }
+
+ int num = c.getCount();
+
+ if (num == 0) {
+ break;
+ }
+ while (c.moveToNext()) {
+ long rowId = c.getLong(FILES_PRESCAN_ID_COLUMN_INDEX);
+ String path = c.getString(FILES_PRESCAN_PATH_COLUMN_INDEX);
+ int format = c.getInt(FILES_PRESCAN_FORMAT_COLUMN_INDEX);
+ long lastModified = c.getLong(FILES_PRESCAN_DATE_MODIFIED_COLUMN_INDEX);
+ lastId = rowId;
+
+ // Only consider entries with absolute path names.
+ // This allows storing URIs in the database without the
+ // media scanner removing them.
+ if (path != null && path.startsWith("/")) {
+ boolean exists = false;
+ try {
+ exists = Os.access(path, android.system.OsConstants.F_OK);
+ } catch (ErrnoException e1) {
+ }
+ if (!exists && !MtpConstants.isAbstractObject(format)) {
+ // do not delete missing playlists, since they may have been
+ // modified by the user.
+ // The user can delete them in the media player instead.
+ // instead, clear the path and lastModified fields in the row
+ MediaFile.MediaFileType mediaFileType = MediaFile.getFileType(path);
+ int fileType = (mediaFileType == null ? 0 : mediaFileType.fileType);
+
+ if (!MediaFile.isPlayListFileType(fileType)) {
+ deleter.delete(rowId);
+ if (path.toLowerCase(Locale.US).endsWith("/.nomedia")) {
+ deleter.flush();
+ String parent = new File(path).getParent();
+ mMediaProvider.call(MediaStore.UNHIDE_CALL, parent, null);
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ finally {
+ if (c != null) {
+ c.close();
+ }
+ deleter.flush();
+ }
+
+ // compute original size of images
+ mOriginalCount = 0;
+ c = mMediaProvider.query(mImagesUri, ID_PROJECTION, null, null, null, null);
+ if (c != null) {
+ mOriginalCount = c.getCount();
+ c.close();
+ }
+ }
+
+ private void pruneDeadThumbnailFiles() {
+ HashSet<String> existingFiles = new HashSet<String>();
+ String directory = "/sdcard/DCIM/.thumbnails";
+ String [] files = (new File(directory)).list();
+ Cursor c = null;
+ if (files == null)
+ files = new String[0];
+
+ for (int i = 0; i < files.length; i++) {
+ String fullPathString = directory + "/" + files[i];
+ existingFiles.add(fullPathString);
+ }
+
+ try {
+ c = mMediaProvider.query(
+ mThumbsUri,
+ new String [] { "_data" },
+ null,
+ null,
+ null, null);
+ Log.v(TAG, "pruneDeadThumbnailFiles... " + c);
+ if (c != null && c.moveToFirst()) {
+ do {
+ String fullPathString = c.getString(0);
+ existingFiles.remove(fullPathString);
+ } while (c.moveToNext());
+ }
+
+ for (String fileToDelete : existingFiles) {
+ if (false)
+ Log.v(TAG, "fileToDelete is " + fileToDelete);
+ try {
+ (new File(fileToDelete)).delete();
+ } catch (SecurityException ex) {
+ }
+ }
+
+ Log.v(TAG, "/pruneDeadThumbnailFiles... " + c);
+ } catch (RemoteException e) {
+ // We will soon be killed...
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ }
+
+ static class MediaBulkDeleter {
+ StringBuilder whereClause = new StringBuilder();
+ ArrayList<String> whereArgs = new ArrayList<String>(100);
+ final ContentProviderClient mProvider;
+ final Uri mBaseUri;
+
+ public MediaBulkDeleter(ContentProviderClient provider, Uri baseUri) {
+ mProvider = provider;
+ mBaseUri = baseUri;
+ }
+
+ public void delete(long id) throws RemoteException {
+ if (whereClause.length() != 0) {
+ whereClause.append(",");
+ }
+ whereClause.append("?");
+ whereArgs.add("" + id);
+ if (whereArgs.size() > 100) {
+ flush();
+ }
+ }
+ public void flush() throws RemoteException {
+ int size = whereArgs.size();
+ if (size > 0) {
+ String [] foo = new String [size];
+ foo = whereArgs.toArray(foo);
+ int numrows = mProvider.delete(mBaseUri,
+ MediaStore.MediaColumns._ID + " IN (" +
+ whereClause.toString() + ")", foo);
+ //Log.i("@@@@@@@@@", "rows deleted: " + numrows);
+ whereClause.setLength(0);
+ whereArgs.clear();
+ }
+ }
+ }
+
+ private void postscan(final String[] directories) throws RemoteException {
+
+ // handle playlists last, after we know what media files are on the storage.
+ if (mProcessPlaylists) {
+ processPlayLists();
+ }
+
+ if (mOriginalCount == 0 && mImagesUri.equals(Images.Media.getContentUri("external")))
+ pruneDeadThumbnailFiles();
+
+ // allow GC to clean up
+ mPlayLists.clear();
+ }
+
+ private void releaseResources() {
+ // release the DrmManagerClient resources
+ if (mDrmManagerClient != null) {
+ mDrmManagerClient.close();
+ mDrmManagerClient = null;
+ }
+ }
+
+ public void scanDirectories(String[] directories) {
+ try {
+ long start = System.currentTimeMillis();
+ prescan(null, true);
+ long prescan = System.currentTimeMillis();
+
+ if (ENABLE_BULK_INSERTS) {
+ // create MediaInserter for bulk inserts
+ mMediaInserter = new MediaInserter(mMediaProvider, 500);
+ }
+
+ for (int i = 0; i < directories.length; i++) {
+ processDirectory(directories[i], mClient);
+ }
+
+ if (ENABLE_BULK_INSERTS) {
+ // flush remaining inserts
+ mMediaInserter.flushAll();
+ mMediaInserter = null;
+ }
+
+ long scan = System.currentTimeMillis();
+ postscan(directories);
+ long end = System.currentTimeMillis();
+
+ if (false) {
+ Log.d(TAG, " prescan time: " + (prescan - start) + "ms\n");
+ Log.d(TAG, " scan time: " + (scan - prescan) + "ms\n");
+ Log.d(TAG, "postscan time: " + (end - scan) + "ms\n");
+ Log.d(TAG, " total time: " + (end - start) + "ms\n");
+ }
+ } catch (SQLException e) {
+ // this might happen if the SD card is removed while the media scanner is running
+ Log.e(TAG, "SQLException in MediaScanner.scan()", e);
+ } catch (UnsupportedOperationException e) {
+ // this might happen if the SD card is removed while the media scanner is running
+ Log.e(TAG, "UnsupportedOperationException in MediaScanner.scan()", e);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MediaScanner.scan()", e);
+ } finally {
+ releaseResources();
+ }
+ }
+
+ // this function is used to scan a single file
+ public Uri scanSingleFile(String path, String mimeType) {
+ try {
+ prescan(path, true);
+
+ File file = new File(path);
+ if (!file.exists() || !file.canRead()) {
+ return null;
+ }
+
+ // lastModified is in milliseconds on Files.
+ long lastModifiedSeconds = file.lastModified() / 1000;
+
+ // always scan the file, so we can return the content://media Uri for existing files
+ return mClient.doScanFile(path, mimeType, lastModifiedSeconds, file.length(),
+ false, true, MediaScanner.isNoMediaPath(path));
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MediaScanner.scanFile()", e);
+ return null;
+ } finally {
+ releaseResources();
+ }
+ }
+
+ private static boolean isNoMediaFile(String path) {
+ File file = new File(path);
+ if (file.isDirectory()) return false;
+
+ // special case certain file names
+ // I use regionMatches() instead of substring() below
+ // to avoid memory allocation
+ int lastSlash = path.lastIndexOf('/');
+ if (lastSlash >= 0 && lastSlash + 2 < path.length()) {
+ // ignore those ._* files created by MacOS
+ if (path.regionMatches(lastSlash + 1, "._", 0, 2)) {
+ return true;
+ }
+
+ // ignore album art files created by Windows Media Player:
+ // Folder.jpg, AlbumArtSmall.jpg, AlbumArt_{...}_Large.jpg
+ // and AlbumArt_{...}_Small.jpg
+ if (path.regionMatches(true, path.length() - 4, ".jpg", 0, 4)) {
+ if (path.regionMatches(true, lastSlash + 1, "AlbumArt_{", 0, 10) ||
+ path.regionMatches(true, lastSlash + 1, "AlbumArt.", 0, 9)) {
+ return true;
+ }
+ int length = path.length() - lastSlash - 1;
+ if ((length == 17 && path.regionMatches(
+ true, lastSlash + 1, "AlbumArtSmall", 0, 13)) ||
+ (length == 10
+ && path.regionMatches(true, lastSlash + 1, "Folder", 0, 6))) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ private static HashMap<String,String> mNoMediaPaths = new HashMap<String,String>();
+ private static HashMap<String,String> mMediaPaths = new HashMap<String,String>();
+
+ /* MediaProvider calls this when a .nomedia file is added or removed */
+ public static void clearMediaPathCache(boolean clearMediaPaths, boolean clearNoMediaPaths) {
+ synchronized (MediaScanner.class) {
+ if (clearMediaPaths) {
+ mMediaPaths.clear();
+ }
+ if (clearNoMediaPaths) {
+ mNoMediaPaths.clear();
+ }
+ }
+ }
+
+ public static boolean isNoMediaPath(String path) {
+ if (path == null) {
+ return false;
+ }
+ // return true if file or any parent directory has name starting with a dot
+ if (path.indexOf("/.") >= 0) {
+ return true;
+ }
+
+ int firstSlash = path.lastIndexOf('/');
+ if (firstSlash <= 0) {
+ return false;
+ }
+ String parent = path.substring(0, firstSlash);
+
+ synchronized (MediaScanner.class) {
+ if (mNoMediaPaths.containsKey(parent)) {
+ return true;
+ } else if (!mMediaPaths.containsKey(parent)) {
+ // check to see if any parent directories have a ".nomedia" file
+ // start from 1 so we don't bother checking in the root directory
+ int offset = 1;
+ while (offset >= 0) {
+ int slashIndex = path.indexOf('/', offset);
+ if (slashIndex > offset) {
+ slashIndex++; // move past slash
+ File file = new File(path.substring(0, slashIndex) + ".nomedia");
+ if (file.exists()) {
+ // we have a .nomedia in one of the parent directories
+ mNoMediaPaths.put(parent, "");
+ return true;
+ }
+ }
+ offset = slashIndex;
+ }
+ mMediaPaths.put(parent, "");
+ }
+ }
+
+ return isNoMediaFile(path);
+ }
+
+ public void scanMtpFile(String path, int objectHandle, int format) {
+ MediaFile.MediaFileType mediaFileType = MediaFile.getFileType(path);
+ int fileType = (mediaFileType == null ? 0 : mediaFileType.fileType);
+ File file = new File(path);
+ long lastModifiedSeconds = file.lastModified() / 1000;
+
+ if (!MediaFile.isAudioFileType(fileType) && !MediaFile.isVideoFileType(fileType) &&
+ !MediaFile.isImageFileType(fileType) && !MediaFile.isPlayListFileType(fileType) &&
+ !MediaFile.isDrmFileType(fileType)) {
+
+ // no need to use the media scanner, but we need to update last modified and file size
+ ContentValues values = new ContentValues();
+ values.put(Files.FileColumns.SIZE, file.length());
+ values.put(Files.FileColumns.DATE_MODIFIED, lastModifiedSeconds);
+ try {
+ String[] whereArgs = new String[] { Integer.toString(objectHandle) };
+ mMediaProvider.update(Files.getMtpObjectsUri(mVolumeName), values,
+ "_id=?", whereArgs);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in scanMtpFile", e);
+ }
+ return;
+ }
+
+ mMtpObjectHandle = objectHandle;
+ Cursor fileList = null;
+ try {
+ if (MediaFile.isPlayListFileType(fileType)) {
+ // build file cache so we can look up tracks in the playlist
+ prescan(null, true);
+
+ FileEntry entry = makeEntryFor(path);
+ if (entry != null) {
+ fileList = mMediaProvider.query(mFilesUri,
+ FILES_PRESCAN_PROJECTION, null, null, null, null);
+ processPlayList(entry, fileList);
+ }
+ } else {
+ // MTP will create a file entry for us so we don't want to do it in prescan
+ prescan(path, false);
+
+ // always scan the file, so we can return the content://media Uri for existing files
+ mClient.doScanFile(path, mediaFileType.mimeType, lastModifiedSeconds, file.length(),
+ (format == MtpConstants.FORMAT_ASSOCIATION), true, isNoMediaPath(path));
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MediaScanner.scanFile()", e);
+ } finally {
+ mMtpObjectHandle = 0;
+ if (fileList != null) {
+ fileList.close();
+ }
+ releaseResources();
+ }
+ }
+
+ FileEntry makeEntryFor(String path) {
+ String where;
+ String[] selectionArgs;
+
+ Cursor c = null;
+ try {
+ where = Files.FileColumns.DATA + "=?";
+ selectionArgs = new String[] { path };
+ c = mMediaProvider.query(mFilesUriNoNotify, FILES_PRESCAN_PROJECTION,
+ where, selectionArgs, null, null);
+ if (c.moveToFirst()) {
+ long rowId = c.getLong(FILES_PRESCAN_ID_COLUMN_INDEX);
+ int format = c.getInt(FILES_PRESCAN_FORMAT_COLUMN_INDEX);
+ long lastModified = c.getLong(FILES_PRESCAN_DATE_MODIFIED_COLUMN_INDEX);
+ return new FileEntry(rowId, path, lastModified, format);
+ }
+ } catch (RemoteException e) {
+ } finally {
+ if (c != null) {
+ c.close();
+ }
+ }
+ return null;
+ }
+
+ // returns the number of matching file/directory names, starting from the right
+ private int matchPaths(String path1, String path2) {
+ int result = 0;
+ int end1 = path1.length();
+ int end2 = path2.length();
+
+ while (end1 > 0 && end2 > 0) {
+ int slash1 = path1.lastIndexOf('/', end1 - 1);
+ int slash2 = path2.lastIndexOf('/', end2 - 1);
+ int backSlash1 = path1.lastIndexOf('\\', end1 - 1);
+ int backSlash2 = path2.lastIndexOf('\\', end2 - 1);
+ int start1 = (slash1 > backSlash1 ? slash1 : backSlash1);
+ int start2 = (slash2 > backSlash2 ? slash2 : backSlash2);
+ if (start1 < 0) start1 = 0; else start1++;
+ if (start2 < 0) start2 = 0; else start2++;
+ int length = end1 - start1;
+ if (end2 - start2 != length) break;
+ if (path1.regionMatches(true, start1, path2, start2, length)) {
+ result++;
+ end1 = start1 - 1;
+ end2 = start2 - 1;
+ } else break;
+ }
+
+ return result;
+ }
+
+ private boolean matchEntries(long rowId, String data) {
+
+ int len = mPlaylistEntries.size();
+ boolean done = true;
+ for (int i = 0; i < len; i++) {
+ PlaylistEntry entry = mPlaylistEntries.get(i);
+ if (entry.bestmatchlevel == Integer.MAX_VALUE) {
+ continue; // this entry has been matched already
+ }
+ done = false;
+ if (data.equalsIgnoreCase(entry.path)) {
+ entry.bestmatchid = rowId;
+ entry.bestmatchlevel = Integer.MAX_VALUE;
+ continue; // no need for path matching
+ }
+
+ int matchLength = matchPaths(data, entry.path);
+ if (matchLength > entry.bestmatchlevel) {
+ entry.bestmatchid = rowId;
+ entry.bestmatchlevel = matchLength;
+ }
+ }
+ return done;
+ }
+
+ private void cachePlaylistEntry(String line, String playListDirectory) {
+ PlaylistEntry entry = new PlaylistEntry();
+ // watch for trailing whitespace
+ int entryLength = line.length();
+ while (entryLength > 0 && Character.isWhitespace(line.charAt(entryLength - 1))) entryLength--;
+ // path should be longer than 3 characters.
+ // avoid index out of bounds errors below by returning here.
+ if (entryLength < 3) return;
+ if (entryLength < line.length()) line = line.substring(0, entryLength);
+
+ // does entry appear to be an absolute path?
+ // look for Unix or DOS absolute paths
+ char ch1 = line.charAt(0);
+ boolean fullPath = (ch1 == '/' ||
+ (Character.isLetter(ch1) && line.charAt(1) == ':' && line.charAt(2) == '\\'));
+ // if we have a relative path, combine entry with playListDirectory
+ if (!fullPath)
+ line = playListDirectory + line;
+ entry.path = line;
+ //FIXME - should we look for "../" within the path?
+
+ mPlaylistEntries.add(entry);
+ }
+
+ private void processCachedPlaylist(Cursor fileList, ContentValues values, Uri playlistUri) {
+ fileList.moveToPosition(-1);
+ while (fileList.moveToNext()) {
+ long rowId = fileList.getLong(FILES_PRESCAN_ID_COLUMN_INDEX);
+ String data = fileList.getString(FILES_PRESCAN_PATH_COLUMN_INDEX);
+ if (matchEntries(rowId, data)) {
+ break;
+ }
+ }
+
+ int len = mPlaylistEntries.size();
+ int index = 0;
+ for (int i = 0; i < len; i++) {
+ PlaylistEntry entry = mPlaylistEntries.get(i);
+ if (entry.bestmatchlevel > 0) {
+ try {
+ values.clear();
+ values.put(MediaStore.Audio.Playlists.Members.PLAY_ORDER, Integer.valueOf(index));
+ values.put(MediaStore.Audio.Playlists.Members.AUDIO_ID, Long.valueOf(entry.bestmatchid));
+ mMediaProvider.insert(playlistUri, values);
+ index++;
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MediaScanner.processCachedPlaylist()", e);
+ return;
+ }
+ }
+ }
+ mPlaylistEntries.clear();
+ }
+
+ private void processM3uPlayList(String path, String playListDirectory, Uri uri,
+ ContentValues values, Cursor fileList) {
+ BufferedReader reader = null;
+ try {
+ File f = new File(path);
+ if (f.exists()) {
+ reader = new BufferedReader(
+ new InputStreamReader(new FileInputStream(f)), 8192);
+ String line = reader.readLine();
+ mPlaylistEntries.clear();
+ while (line != null) {
+ // ignore comment lines, which begin with '#'
+ if (line.length() > 0 && line.charAt(0) != '#') {
+ cachePlaylistEntry(line, playListDirectory);
+ }
+ line = reader.readLine();
+ }
+
+ processCachedPlaylist(fileList, values, uri);
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "IOException in MediaScanner.processM3uPlayList()", e);
+ } finally {
+ try {
+ if (reader != null)
+ reader.close();
+ } catch (IOException e) {
+ Log.e(TAG, "IOException in MediaScanner.processM3uPlayList()", e);
+ }
+ }
+ }
+
+ private void processPlsPlayList(String path, String playListDirectory, Uri uri,
+ ContentValues values, Cursor fileList) {
+ BufferedReader reader = null;
+ try {
+ File f = new File(path);
+ if (f.exists()) {
+ reader = new BufferedReader(
+ new InputStreamReader(new FileInputStream(f)), 8192);
+ String line = reader.readLine();
+ mPlaylistEntries.clear();
+ while (line != null) {
+ // ignore comment lines, which begin with '#'
+ if (line.startsWith("File")) {
+ int equals = line.indexOf('=');
+ if (equals > 0) {
+ cachePlaylistEntry(line.substring(equals + 1), playListDirectory);
+ }
+ }
+ line = reader.readLine();
+ }
+
+ processCachedPlaylist(fileList, values, uri);
+ }
+ } catch (IOException e) {
+ Log.e(TAG, "IOException in MediaScanner.processPlsPlayList()", e);
+ } finally {
+ try {
+ if (reader != null)
+ reader.close();
+ } catch (IOException e) {
+ Log.e(TAG, "IOException in MediaScanner.processPlsPlayList()", e);
+ }
+ }
+ }
+
+ class WplHandler implements ElementListener {
+
+ final ContentHandler handler;
+ String playListDirectory;
+
+ public WplHandler(String playListDirectory, Uri uri, Cursor fileList) {
+ this.playListDirectory = playListDirectory;
+
+ RootElement root = new RootElement("smil");
+ Element body = root.getChild("body");
+ Element seq = body.getChild("seq");
+ Element media = seq.getChild("media");
+ media.setElementListener(this);
+
+ this.handler = root.getContentHandler();
+ }
+
+ @Override
+ public void start(Attributes attributes) {
+ String path = attributes.getValue("", "src");
+ if (path != null) {
+ cachePlaylistEntry(path, playListDirectory);
+ }
+ }
+
+ @Override
+ public void end() {
+ }
+
+ ContentHandler getContentHandler() {
+ return handler;
+ }
+ }
+
+ private void processWplPlayList(String path, String playListDirectory, Uri uri,
+ ContentValues values, Cursor fileList) {
+ FileInputStream fis = null;
+ try {
+ File f = new File(path);
+ if (f.exists()) {
+ fis = new FileInputStream(f);
+
+ mPlaylistEntries.clear();
+ Xml.parse(fis, Xml.findEncodingByName("UTF-8"),
+ new WplHandler(playListDirectory, uri, fileList).getContentHandler());
+
+ processCachedPlaylist(fileList, values, uri);
+ }
+ } catch (SAXException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ } finally {
+ try {
+ if (fis != null)
+ fis.close();
+ } catch (IOException e) {
+ Log.e(TAG, "IOException in MediaScanner.processWplPlayList()", e);
+ }
+ }
+ }
+
+ private void processPlayList(FileEntry entry, Cursor fileList) throws RemoteException {
+ String path = entry.mPath;
+ ContentValues values = new ContentValues();
+ int lastSlash = path.lastIndexOf('/');
+ if (lastSlash < 0) throw new IllegalArgumentException("bad path " + path);
+ Uri uri, membersUri;
+ long rowId = entry.mRowId;
+
+ // make sure we have a name
+ String name = values.getAsString(MediaStore.Audio.Playlists.NAME);
+ if (name == null) {
+ name = values.getAsString(MediaStore.MediaColumns.TITLE);
+ if (name == null) {
+ // extract name from file name
+ int lastDot = path.lastIndexOf('.');
+ name = (lastDot < 0 ? path.substring(lastSlash + 1)
+ : path.substring(lastSlash + 1, lastDot));
+ }
+ }
+
+ values.put(MediaStore.Audio.Playlists.NAME, name);
+ values.put(MediaStore.Audio.Playlists.DATE_MODIFIED, entry.mLastModified);
+
+ if (rowId == 0) {
+ values.put(MediaStore.Audio.Playlists.DATA, path);
+ uri = mMediaProvider.insert(mPlaylistsUri, values);
+ rowId = ContentUris.parseId(uri);
+ membersUri = Uri.withAppendedPath(uri, Playlists.Members.CONTENT_DIRECTORY);
+ } else {
+ uri = ContentUris.withAppendedId(mPlaylistsUri, rowId);
+ mMediaProvider.update(uri, values, null, null);
+
+ // delete members of existing playlist
+ membersUri = Uri.withAppendedPath(uri, Playlists.Members.CONTENT_DIRECTORY);
+ mMediaProvider.delete(membersUri, null, null);
+ }
+
+ String playListDirectory = path.substring(0, lastSlash + 1);
+ MediaFile.MediaFileType mediaFileType = MediaFile.getFileType(path);
+ int fileType = (mediaFileType == null ? 0 : mediaFileType.fileType);
+
+ if (fileType == MediaFile.FILE_TYPE_M3U) {
+ processM3uPlayList(path, playListDirectory, membersUri, values, fileList);
+ } else if (fileType == MediaFile.FILE_TYPE_PLS) {
+ processPlsPlayList(path, playListDirectory, membersUri, values, fileList);
+ } else if (fileType == MediaFile.FILE_TYPE_WPL) {
+ processWplPlayList(path, playListDirectory, membersUri, values, fileList);
+ }
+ }
+
+ private void processPlayLists() throws RemoteException {
+ Iterator<FileEntry> iterator = mPlayLists.iterator();
+ Cursor fileList = null;
+ try {
+ // use the files uri and projection because we need the format column,
+ // but restrict the query to just audio files
+ fileList = mMediaProvider.query(mFilesUri, FILES_PRESCAN_PROJECTION,
+ "media_type=2", null, null, null);
+ while (iterator.hasNext()) {
+ FileEntry entry = iterator.next();
+ // only process playlist files if they are new or have been modified since the last scan
+ if (entry.mLastModifiedChanged) {
+ processPlayList(entry, fileList);
+ }
+ }
+ } catch (RemoteException e1) {
+ } finally {
+ if (fileList != null) {
+ fileList.close();
+ }
+ }
+ }
+
+ private native void processDirectory(String path, MediaScannerClient client);
+ private native void processFile(String path, String mimeType, MediaScannerClient client);
+ private native void setLocale(String locale);
+
+ public native byte[] extractAlbumArt(FileDescriptor fd);
+
+ private static native final void native_init();
+ private native final void native_setup();
+ private native final void native_finalize();
+
+ @Override
+ public void close() {
+ mCloseGuard.close();
+ if (mClosed.compareAndSet(false, true)) {
+ mMediaProvider.close();
+ native_finalize();
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mCloseGuard != null) {
+ mCloseGuard.warnIfOpen();
+ }
+
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+}
diff --git a/android/media/MediaScannerClient.java b/android/media/MediaScannerClient.java
new file mode 100644
index 00000000..b3266714
--- /dev/null
+++ b/android/media/MediaScannerClient.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * {@hide}
+ */
+public interface MediaScannerClient
+{
+ public void scanFile(String path, long lastModified, long fileSize,
+ boolean isDirectory, boolean noMedia);
+
+ /**
+ * Called by native code to return metadata extracted from media files.
+ */
+ public void handleStringTag(String name, String value);
+
+ /**
+ * Called by native code to return mime type extracted from DRM content.
+ */
+ public void setMimeType(String mimeType);
+}
diff --git a/android/media/MediaScannerConnection.java b/android/media/MediaScannerConnection.java
new file mode 100644
index 00000000..471fa2c4
--- /dev/null
+++ b/android/media/MediaScannerConnection.java
@@ -0,0 +1,272 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.media.IMediaScannerListener;
+import android.media.IMediaScannerService;
+import android.net.Uri;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.util.Log;
+
+
+/**
+ * MediaScannerConnection provides a way for applications to pass a
+ * newly created or downloaded media file to the media scanner service.
+ * The media scanner service will read metadata from the file and add
+ * the file to the media content provider.
+ * The MediaScannerConnectionClient provides an interface for the
+ * media scanner service to return the Uri for a newly scanned file
+ * to the client of the MediaScannerConnection class.
+ */
+public class MediaScannerConnection implements ServiceConnection {
+
+ private static final String TAG = "MediaScannerConnection";
+
+ private Context mContext;
+ private MediaScannerConnectionClient mClient;
+ private IMediaScannerService mService;
+ private boolean mConnected; // true if connect() has been called since last disconnect()
+
+ private final IMediaScannerListener.Stub mListener = new IMediaScannerListener.Stub() {
+ public void scanCompleted(String path, Uri uri) {
+ MediaScannerConnectionClient client = mClient;
+ if (client != null) {
+ client.onScanCompleted(path, uri);
+ }
+ }
+ };
+
+ /**
+ * Interface for notifying clients of the result of scanning a
+ * requested media file.
+ */
+ public interface OnScanCompletedListener {
+ /**
+ * Called to notify the client when the media scanner has finished
+ * scanning a file.
+ * @param path the path to the file that has been scanned.
+ * @param uri the Uri for the file if the scanning operation succeeded
+ * and the file was added to the media database, or null if scanning failed.
+ */
+ public void onScanCompleted(String path, Uri uri);
+ }
+
+ /**
+ * An interface for notifying clients of MediaScannerConnection
+ * when a connection to the MediaScanner service has been established
+ * and when the scanning of a file has completed.
+ */
+ public interface MediaScannerConnectionClient extends OnScanCompletedListener {
+ /**
+ * Called to notify the client when a connection to the
+ * MediaScanner service has been established.
+ */
+ public void onMediaScannerConnected();
+
+ /**
+ * Called to notify the client when the media scanner has finished
+ * scanning a file.
+ * @param path the path to the file that has been scanned.
+ * @param uri the Uri for the file if the scanning operation succeeded
+ * and the file was added to the media database, or null if scanning failed.
+ */
+ public void onScanCompleted(String path, Uri uri);
+ }
+
+ /**
+ * Constructs a new MediaScannerConnection object.
+ * @param context the Context object, required for establishing a connection to
+ * the media scanner service.
+ * @param client an optional object implementing the MediaScannerConnectionClient
+ * interface, for receiving notifications from the media scanner.
+ */
+ public MediaScannerConnection(Context context, MediaScannerConnectionClient client) {
+ mContext = context;
+ mClient = client;
+ }
+
+ /**
+ * Initiates a connection to the media scanner service.
+ * {@link MediaScannerConnectionClient#onMediaScannerConnected()}
+ * will be called when the connection is established.
+ */
+ public void connect() {
+ synchronized (this) {
+ if (!mConnected) {
+ Intent intent = new Intent(IMediaScannerService.class.getName());
+ intent.setComponent(
+ new ComponentName("com.android.providers.media",
+ "com.android.providers.media.MediaScannerService"));
+ mContext.bindService(intent, this, Context.BIND_AUTO_CREATE);
+ mConnected = true;
+ }
+ }
+ }
+
+ /**
+ * Releases the connection to the media scanner service.
+ */
+ public void disconnect() {
+ synchronized (this) {
+ if (mConnected) {
+ if (false) {
+ Log.v(TAG, "Disconnecting from Media Scanner");
+ }
+ try {
+ mContext.unbindService(this);
+ if (mClient instanceof ClientProxy) {
+ mClient = null;
+ }
+ mService = null;
+ } catch (IllegalArgumentException ex) {
+ if (false) {
+ Log.v(TAG, "disconnect failed: " + ex);
+ }
+ }
+ mConnected = false;
+ }
+ }
+ }
+
+ /**
+ * Returns whether we are connected to the media scanner service
+ * @return true if we are connected, false otherwise
+ */
+ public synchronized boolean isConnected() {
+ return (mService != null && mConnected);
+ }
+
+ /**
+ * Requests the media scanner to scan a file.
+ * Success or failure of the scanning operation cannot be determined until
+ * {@link MediaScannerConnectionClient#onScanCompleted(String, Uri)} is called.
+ *
+ * @param path the path to the file to be scanned.
+ * @param mimeType an optional mimeType for the file.
+ * If mimeType is null, then the mimeType will be inferred from the file extension.
+ */
+ public void scanFile(String path, String mimeType) {
+ synchronized (this) {
+ if (mService == null || !mConnected) {
+ throw new IllegalStateException("not connected to MediaScannerService");
+ }
+ try {
+ if (false) {
+ Log.v(TAG, "Scanning file " + path);
+ }
+ mService.requestScanFile(path, mimeType, mListener);
+ } catch (RemoteException e) {
+ if (false) {
+ Log.d(TAG, "Failed to scan file " + path);
+ }
+ }
+ }
+ }
+
+ static class ClientProxy implements MediaScannerConnectionClient {
+ final String[] mPaths;
+ final String[] mMimeTypes;
+ final OnScanCompletedListener mClient;
+ MediaScannerConnection mConnection;
+ int mNextPath;
+
+ ClientProxy(String[] paths, String[] mimeTypes, OnScanCompletedListener client) {
+ mPaths = paths;
+ mMimeTypes = mimeTypes;
+ mClient = client;
+ }
+
+ public void onMediaScannerConnected() {
+ scanNextPath();
+ }
+
+ public void onScanCompleted(String path, Uri uri) {
+ if (mClient != null) {
+ mClient.onScanCompleted(path, uri);
+ }
+ scanNextPath();
+ }
+
+ void scanNextPath() {
+ if (mNextPath >= mPaths.length) {
+ mConnection.disconnect();
+ mConnection = null;
+ return;
+ }
+ String mimeType = mMimeTypes != null ? mMimeTypes[mNextPath] : null;
+ mConnection.scanFile(mPaths[mNextPath], mimeType);
+ mNextPath++;
+ }
+ }
+
+ /**
+ * Convenience for constructing a {@link MediaScannerConnection}, calling
+ * {@link #connect} on it, and calling {@link #scanFile} with the given
+ * <var>path</var> and <var>mimeType</var> when the connection is
+ * established.
+ * @param context The caller's Context, required for establishing a connection to
+ * the media scanner service.
+ * Success or failure of the scanning operation cannot be determined until
+ * {@link MediaScannerConnectionClient#onScanCompleted(String, Uri)} is called.
+ * @param paths Array of paths to be scanned.
+ * @param mimeTypes Optional array of MIME types for each path.
+ * If mimeType is null, then the mimeType will be inferred from the file extension.
+ * @param callback Optional callback through which you can receive the
+ * scanned URI and MIME type; If null, the file will be scanned but
+ * you will not get a result back.
+ * @see #scanFile(String, String)
+ */
+ public static void scanFile(Context context, String[] paths, String[] mimeTypes,
+ OnScanCompletedListener callback) {
+ ClientProxy client = new ClientProxy(paths, mimeTypes, callback);
+ MediaScannerConnection connection = new MediaScannerConnection(context, client);
+ client.mConnection = connection;
+ connection.connect();
+ }
+
+ /**
+ * Part of the ServiceConnection interface. Do not call.
+ */
+ public void onServiceConnected(ComponentName className, IBinder service) {
+ if (false) {
+ Log.v(TAG, "Connected to Media Scanner");
+ }
+ synchronized (this) {
+ mService = IMediaScannerService.Stub.asInterface(service);
+ if (mService != null && mClient != null) {
+ mClient.onMediaScannerConnected();
+ }
+ }
+ }
+
+ /**
+ * Part of the ServiceConnection interface. Do not call.
+ */
+ public void onServiceDisconnected(ComponentName className) {
+ if (false) {
+ Log.v(TAG, "Disconnected from Media Scanner");
+ }
+ synchronized (this) {
+ mService = null;
+ }
+ }
+}
diff --git a/android/media/MediaSync.java b/android/media/MediaSync.java
new file mode 100644
index 00000000..799f4bf4
--- /dev/null
+++ b/android/media/MediaSync.java
@@ -0,0 +1,643 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.media.AudioTrack;
+import android.media.PlaybackParams;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+import android.view.Surface;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.util.concurrent.TimeUnit;
+import java.util.LinkedList;
+import java.util.List;
+
+/**
+ * MediaSync class can be used to synchronously play audio and video streams.
+ * It can be used to play audio-only or video-only stream, too.
+ *
+ * <p>MediaSync is generally used like this:
+ * <pre>
+ * MediaSync sync = new MediaSync();
+ * sync.setSurface(surface);
+ * Surface inputSurface = sync.createInputSurface();
+ * ...
+ * // MediaCodec videoDecoder = ...;
+ * videoDecoder.configure(format, inputSurface, ...);
+ * ...
+ * sync.setAudioTrack(audioTrack);
+ * sync.setCallback(new MediaSync.Callback() {
+ * {@literal @Override}
+ * public void onAudioBufferConsumed(MediaSync sync, ByteBuffer audioBuffer, int bufferId) {
+ * ...
+ * }
+ * }, null);
+ * // This needs to be done since sync is paused on creation.
+ * sync.setPlaybackParams(new PlaybackParams().setSpeed(1.f));
+ *
+ * for (;;) {
+ * ...
+ * // send video frames to surface for rendering, e.g., call
+ * // videoDecoder.releaseOutputBuffer(videoOutputBufferIx, videoPresentationTimeNs);
+ * // More details are available as below.
+ * ...
+ * sync.queueAudio(audioByteBuffer, bufferId, audioPresentationTimeUs); // non-blocking.
+ * // The audioByteBuffer and bufferId will be returned via callback.
+ * // More details are available as below.
+ * ...
+ * ...
+ * }
+ * sync.setPlaybackParams(new PlaybackParams().setSpeed(0.f));
+ * sync.release();
+ * sync = null;
+ *
+ * // The following code snippet illustrates how video/audio raw frames are created by
+ * // MediaCodec's, how they are fed to MediaSync and how they are returned by MediaSync.
+ * // This is the callback from MediaCodec.
+ * onOutputBufferAvailable(MediaCodec codec, int bufferId, BufferInfo info) {
+ * // ...
+ * if (codec == videoDecoder) {
+ * // surface timestamp must contain media presentation time in nanoseconds.
+ * codec.releaseOutputBuffer(bufferId, 1000 * info.presentationTime);
+ * } else {
+ * ByteBuffer audioByteBuffer = codec.getOutputBuffer(bufferId);
+ * sync.queueAudio(audioByteBuffer, bufferId, info.presentationTime);
+ * }
+ * // ...
+ * }
+ *
+ * // This is the callback from MediaSync.
+ * onAudioBufferConsumed(MediaSync sync, ByteBuffer buffer, int bufferId) {
+ * // ...
+ * audioDecoder.releaseBuffer(bufferId, false);
+ * // ...
+ * }
+ *
+ * </pre>
+ *
+ * The client needs to configure corresponding sink by setting the Surface and/or AudioTrack
+ * based on the stream type it will play.
+ * <p>
+ * For video, the client needs to call {@link #createInputSurface} to obtain a surface on
+ * which it will render video frames.
+ * <p>
+ * For audio, the client needs to set up audio track correctly, e.g., using {@link
+ * AudioTrack#MODE_STREAM}. The audio buffers are sent to MediaSync directly via {@link
+ * #queueAudio}, and are returned to the client via {@link Callback#onAudioBufferConsumed}
+ * asynchronously. The client should not modify an audio buffer till it's returned.
+ * <p>
+ * The client can optionally pre-fill audio/video buffers by setting playback rate to 0.0,
+ * and then feed audio/video buffers to corresponding components. This can reduce possible
+ * initial underrun.
+ * <p>
+ */
+public final class MediaSync {
+ /**
+ * MediaSync callback interface. Used to notify the user asynchronously
+ * of various MediaSync events.
+ */
+ public static abstract class Callback {
+ /**
+ * Called when returning an audio buffer which has been consumed.
+ *
+ * @param sync The MediaSync object.
+ * @param audioBuffer The returned audio buffer.
+ * @param bufferId The ID associated with audioBuffer as passed into
+ * {@link MediaSync#queueAudio}.
+ */
+ public abstract void onAudioBufferConsumed(
+ @NonNull MediaSync sync, @NonNull ByteBuffer audioBuffer, int bufferId);
+ }
+
+ /** Audio track failed.
+ * @see android.media.MediaSync.OnErrorListener
+ */
+ public static final int MEDIASYNC_ERROR_AUDIOTRACK_FAIL = 1;
+
+ /** The surface failed to handle video buffers.
+ * @see android.media.MediaSync.OnErrorListener
+ */
+ public static final int MEDIASYNC_ERROR_SURFACE_FAIL = 2;
+
+ /**
+ * Interface definition of a callback to be invoked when there
+ * has been an error during an asynchronous operation (other errors
+ * will throw exceptions at method call time).
+ */
+ public interface OnErrorListener {
+ /**
+ * Called to indicate an error.
+ *
+ * @param sync The MediaSync the error pertains to
+ * @param what The type of error that has occurred:
+ * <ul>
+ * <li>{@link #MEDIASYNC_ERROR_AUDIOTRACK_FAIL}
+ * <li>{@link #MEDIASYNC_ERROR_SURFACE_FAIL}
+ * </ul>
+ * @param extra an extra code, specific to the error. Typically
+ * implementation dependent.
+ */
+ void onError(@NonNull MediaSync sync, int what, int extra);
+ }
+
+ private static final String TAG = "MediaSync";
+
+ private static final int EVENT_CALLBACK = 1;
+ private static final int EVENT_SET_CALLBACK = 2;
+
+ private static final int CB_RETURN_AUDIO_BUFFER = 1;
+
+ private static class AudioBuffer {
+ public ByteBuffer mByteBuffer;
+ public int mBufferIndex;
+ long mPresentationTimeUs;
+
+ public AudioBuffer(@NonNull ByteBuffer byteBuffer, int bufferId,
+ long presentationTimeUs) {
+ mByteBuffer = byteBuffer;
+ mBufferIndex = bufferId;
+ mPresentationTimeUs = presentationTimeUs;
+ }
+ }
+
+ private final Object mCallbackLock = new Object();
+ private Handler mCallbackHandler = null;
+ private MediaSync.Callback mCallback = null;
+
+ private final Object mOnErrorListenerLock = new Object();
+ private Handler mOnErrorListenerHandler = null;
+ private MediaSync.OnErrorListener mOnErrorListener = null;
+
+ private Thread mAudioThread = null;
+ // Created on mAudioThread when mAudioThread is started. When used on user thread, they should
+ // be guarded by checking mAudioThread.
+ private Handler mAudioHandler = null;
+ private Looper mAudioLooper = null;
+
+ private final Object mAudioLock = new Object();
+ private AudioTrack mAudioTrack = null;
+ private List<AudioBuffer> mAudioBuffers = new LinkedList<AudioBuffer>();
+ // this is only used for paused/running decisions, so it is not affected by clock drift
+ private float mPlaybackRate = 0.0f;
+
+ private long mNativeContext;
+
+ /**
+ * Class constructor. On creation, MediaSync is paused, i.e., playback rate is 0.0f.
+ */
+ public MediaSync() {
+ native_setup();
+ }
+
+ private native final void native_setup();
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ private native final void native_finalize();
+
+ /**
+ * Make sure you call this when you're done to free up any opened
+ * component instance instead of relying on the garbage collector
+ * to do this for you at some point in the future.
+ */
+ public final void release() {
+ returnAudioBuffers();
+ if (mAudioThread != null) {
+ if (mAudioLooper != null) {
+ mAudioLooper.quit();
+ }
+ }
+ setCallback(null, null);
+ native_release();
+ }
+
+ private native final void native_release();
+
+ /**
+ * Sets an asynchronous callback for actionable MediaSync events.
+ * <p>
+ * This method can be called multiple times to update a previously set callback. If the
+ * handler is changed, undelivered notifications scheduled for the old handler may be dropped.
+ * <p>
+ * <b>Do not call this inside callback.</b>
+ *
+ * @param cb The callback that will run. Use {@code null} to stop receiving callbacks.
+ * @param handler The Handler that will run the callback. Use {@code null} to use MediaSync's
+ * internal handler if it exists.
+ */
+ public void setCallback(@Nullable /* MediaSync. */ Callback cb, @Nullable Handler handler) {
+ synchronized(mCallbackLock) {
+ if (handler != null) {
+ mCallbackHandler = handler;
+ } else {
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+ if (looper == null) {
+ mCallbackHandler = null;
+ } else {
+ mCallbackHandler = new Handler(looper);
+ }
+ }
+
+ mCallback = cb;
+ }
+ }
+
+ /**
+ * Sets an asynchronous callback for error events.
+ * <p>
+ * This method can be called multiple times to update a previously set listener. If the
+ * handler is changed, undelivered notifications scheduled for the old handler may be dropped.
+ * <p>
+ * <b>Do not call this inside callback.</b>
+ *
+ * @param listener The callback that will run. Use {@code null} to stop receiving callbacks.
+ * @param handler The Handler that will run the callback. Use {@code null} to use MediaSync's
+ * internal handler if it exists.
+ */
+ public void setOnErrorListener(@Nullable /* MediaSync. */ OnErrorListener listener,
+ @Nullable Handler handler) {
+ synchronized(mOnErrorListenerLock) {
+ if (handler != null) {
+ mOnErrorListenerHandler = handler;
+ } else {
+ Looper looper;
+ if ((looper = Looper.myLooper()) == null) {
+ looper = Looper.getMainLooper();
+ }
+ if (looper == null) {
+ mOnErrorListenerHandler = null;
+ } else {
+ mOnErrorListenerHandler = new Handler(looper);
+ }
+ }
+
+ mOnErrorListener = listener;
+ }
+ }
+
+ /**
+ * Sets the output surface for MediaSync.
+ * <p>
+ * Currently, this is only supported in the Initialized state.
+ *
+ * @param surface Specify a surface on which to render the video data.
+ * @throws IllegalArgumentException if the surface has been released, is invalid,
+ * or can not be connected.
+ * @throws IllegalStateException if setting the surface is not supported, e.g.
+ * not in the Initialized state, or another surface has already been set.
+ */
+ public void setSurface(@Nullable Surface surface) {
+ native_setSurface(surface);
+ }
+
+ private native final void native_setSurface(@Nullable Surface surface);
+
+ /**
+ * Sets the audio track for MediaSync.
+ * <p>
+ * Currently, this is only supported in the Initialized state.
+ *
+ * @param audioTrack Specify an AudioTrack through which to render the audio data.
+ * @throws IllegalArgumentException if the audioTrack has been released, or is invalid.
+ * @throws IllegalStateException if setting the audio track is not supported, e.g.
+ * not in the Initialized state, or another audio track has already been set.
+ */
+ public void setAudioTrack(@Nullable AudioTrack audioTrack) {
+ native_setAudioTrack(audioTrack);
+ mAudioTrack = audioTrack;
+ if (audioTrack != null && mAudioThread == null) {
+ createAudioThread();
+ }
+ }
+
+ private native final void native_setAudioTrack(@Nullable AudioTrack audioTrack);
+
+ /**
+ * Requests a Surface to use as the input. This may only be called after
+ * {@link #setSurface}.
+ * <p>
+ * The application is responsible for calling release() on the Surface when
+ * done.
+ * @throws IllegalStateException if not set, or another input surface has
+ * already been created.
+ */
+ @NonNull
+ public native final Surface createInputSurface();
+
+ /**
+ * Sets playback rate using {@link PlaybackParams}.
+ * <p>
+ * When using MediaSync with {@link AudioTrack}, set playback params using this
+ * call instead of calling it directly on the track, so that the sync is aware of
+ * the params change.
+ * <p>
+ * This call also works if there is no audio track.
+ *
+ * @param params the playback params to use. {@link PlaybackParams#getSpeed
+ * Speed} is the ratio between desired playback rate and normal one. 1.0 means
+ * normal playback speed. 0.0 means pause. Value larger than 1.0 means faster playback,
+ * while value between 0.0 and 1.0 for slower playback. <b>Note:</b> the normal rate
+ * does not change as a result of this call. To restore the original rate at any time,
+ * use speed of 1.0.
+ *
+ * @throws IllegalStateException if the internal sync engine or the audio track has not
+ * been initialized.
+ * @throws IllegalArgumentException if the params are not supported.
+ */
+ public void setPlaybackParams(@NonNull PlaybackParams params) {
+ synchronized(mAudioLock) {
+ mPlaybackRate = native_setPlaybackParams(params);;
+ }
+ if (mPlaybackRate != 0.0 && mAudioThread != null) {
+ postRenderAudio(0);
+ }
+ }
+
+ /**
+ * Gets the playback rate using {@link PlaybackParams}.
+ *
+ * @return the playback rate being used.
+ *
+ * @throws IllegalStateException if the internal sync engine or the audio track has not
+ * been initialized.
+ */
+ @NonNull
+ public native PlaybackParams getPlaybackParams();
+
+ private native float native_setPlaybackParams(@NonNull PlaybackParams params);
+
+ /**
+ * Sets A/V sync mode.
+ *
+ * @param params the A/V sync params to apply
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ * @throws IllegalArgumentException if params are not supported.
+ */
+ public void setSyncParams(@NonNull SyncParams params) {
+ synchronized(mAudioLock) {
+ mPlaybackRate = native_setSyncParams(params);;
+ }
+ if (mPlaybackRate != 0.0 && mAudioThread != null) {
+ postRenderAudio(0);
+ }
+ }
+
+ private native float native_setSyncParams(@NonNull SyncParams params);
+
+ /**
+ * Gets the A/V sync mode.
+ *
+ * @return the A/V sync params
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ @NonNull
+ public native SyncParams getSyncParams();
+
+ /**
+ * Flushes all buffers from the sync object.
+ * <p>
+ * All pending unprocessed audio and video buffers are discarded. If an audio track was
+ * configured, it is flushed and stopped. If a video output surface was configured, the
+ * last frame queued to it is left on the frame. Queue a blank video frame to clear the
+ * surface,
+ * <p>
+ * No callbacks are received for the flushed buffers.
+ *
+ * @throws IllegalStateException if the internal player engine has not been
+ * initialized.
+ */
+ public void flush() {
+ synchronized(mAudioLock) {
+ mAudioBuffers.clear();
+ mCallbackHandler.removeCallbacksAndMessages(null);
+ }
+ if (mAudioTrack != null) {
+ mAudioTrack.pause();
+ mAudioTrack.flush();
+ // Call stop() to signal to the AudioSink to completely fill the
+ // internal buffer before resuming playback.
+ mAudioTrack.stop();
+ }
+ native_flush();
+ }
+
+ private native final void native_flush();
+
+ /**
+ * Get current playback position.
+ * <p>
+ * The MediaTimestamp represents how the media time correlates to the system time in
+ * a linear fashion using an anchor and a clock rate. During regular playback, the media
+ * time moves fairly constantly (though the anchor frame may be rebased to a current
+ * system time, the linear correlation stays steady). Therefore, this method does not
+ * need to be called often.
+ * <p>
+ * To help users get current playback position, this method always anchors the timestamp
+ * to the current {@link System#nanoTime system time}, so
+ * {@link MediaTimestamp#getAnchorMediaTimeUs} can be used as current playback position.
+ *
+ * @return a MediaTimestamp object if a timestamp is available, or {@code null} if no timestamp
+ * is available, e.g. because the media player has not been initialized.
+ *
+ * @see MediaTimestamp
+ */
+ @Nullable
+ public MediaTimestamp getTimestamp()
+ {
+ try {
+ // TODO: create the timestamp in native
+ MediaTimestamp timestamp = new MediaTimestamp();
+ if (native_getTimestamp(timestamp)) {
+ return timestamp;
+ } else {
+ return null;
+ }
+ } catch (IllegalStateException e) {
+ return null;
+ }
+ }
+
+ private native final boolean native_getTimestamp(@NonNull MediaTimestamp timestamp);
+
+ /**
+ * Queues the audio data asynchronously for playback (AudioTrack must be in streaming mode).
+ * If the audio track was flushed as a result of {@link #flush}, it will be restarted.
+ * @param audioData the buffer that holds the data to play. This buffer will be returned
+ * to the client via registered callback.
+ * @param bufferId an integer used to identify audioData. It will be returned to
+ * the client along with audioData. This helps applications to keep track of audioData,
+ * e.g., it can be used to store the output buffer index used by the audio codec.
+ * @param presentationTimeUs the presentation timestamp in microseconds for the first frame
+ * in the buffer.
+ * @throws IllegalStateException if audio track is not set or internal configureation
+ * has not been done correctly.
+ */
+ public void queueAudio(
+ @NonNull ByteBuffer audioData, int bufferId, long presentationTimeUs) {
+ if (mAudioTrack == null || mAudioThread == null) {
+ throw new IllegalStateException(
+ "AudioTrack is NOT set or audio thread is not created");
+ }
+
+ synchronized(mAudioLock) {
+ mAudioBuffers.add(new AudioBuffer(audioData, bufferId, presentationTimeUs));
+ }
+
+ if (mPlaybackRate != 0.0) {
+ postRenderAudio(0);
+ }
+ }
+
+ // When called on user thread, make sure to check mAudioThread != null.
+ private void postRenderAudio(long delayMillis) {
+ mAudioHandler.postDelayed(new Runnable() {
+ public void run() {
+ synchronized(mAudioLock) {
+ if (mPlaybackRate == 0.0) {
+ return;
+ }
+
+ if (mAudioBuffers.isEmpty()) {
+ return;
+ }
+
+ AudioBuffer audioBuffer = mAudioBuffers.get(0);
+ int size = audioBuffer.mByteBuffer.remaining();
+ // restart audio track after flush
+ if (size > 0 && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
+ try {
+ mAudioTrack.play();
+ } catch (IllegalStateException e) {
+ Log.w(TAG, "could not start audio track");
+ }
+ }
+ int sizeWritten = mAudioTrack.write(
+ audioBuffer.mByteBuffer,
+ size,
+ AudioTrack.WRITE_NON_BLOCKING);
+ if (sizeWritten > 0) {
+ if (audioBuffer.mPresentationTimeUs != -1) {
+ native_updateQueuedAudioData(
+ size, audioBuffer.mPresentationTimeUs);
+ audioBuffer.mPresentationTimeUs = -1;
+ }
+
+ if (sizeWritten == size) {
+ postReturnByteBuffer(audioBuffer);
+ mAudioBuffers.remove(0);
+ if (!mAudioBuffers.isEmpty()) {
+ postRenderAudio(0);
+ }
+ return;
+ }
+ }
+ long pendingTimeMs = TimeUnit.MICROSECONDS.toMillis(
+ native_getPlayTimeForPendingAudioFrames());
+ postRenderAudio(pendingTimeMs / 2);
+ }
+ }
+ }, delayMillis);
+ }
+
+ private native final void native_updateQueuedAudioData(
+ int sizeInBytes, long presentationTimeUs);
+
+ private native final long native_getPlayTimeForPendingAudioFrames();
+
+ private final void postReturnByteBuffer(@NonNull final AudioBuffer audioBuffer) {
+ synchronized(mCallbackLock) {
+ if (mCallbackHandler != null) {
+ final MediaSync sync = this;
+ mCallbackHandler.post(new Runnable() {
+ public void run() {
+ Callback callback;
+ synchronized(mCallbackLock) {
+ callback = mCallback;
+ if (mCallbackHandler == null
+ || mCallbackHandler.getLooper().getThread()
+ != Thread.currentThread()) {
+ // callback handler has been changed.
+ return;
+ }
+ }
+ if (callback != null) {
+ callback.onAudioBufferConsumed(sync, audioBuffer.mByteBuffer,
+ audioBuffer.mBufferIndex);
+ }
+ }
+ });
+ }
+ }
+ }
+
+ private final void returnAudioBuffers() {
+ synchronized(mAudioLock) {
+ for (AudioBuffer audioBuffer: mAudioBuffers) {
+ postReturnByteBuffer(audioBuffer);
+ }
+ mAudioBuffers.clear();
+ }
+ }
+
+ private void createAudioThread() {
+ mAudioThread = new Thread() {
+ @Override
+ public void run() {
+ Looper.prepare();
+ synchronized(mAudioLock) {
+ mAudioLooper = Looper.myLooper();
+ mAudioHandler = new Handler();
+ mAudioLock.notify();
+ }
+ Looper.loop();
+ }
+ };
+ mAudioThread.start();
+
+ synchronized(mAudioLock) {
+ try {
+ mAudioLock.wait();
+ } catch(InterruptedException e) {
+ }
+ }
+ }
+
+ static {
+ System.loadLibrary("media_jni");
+ native_init();
+ }
+
+ private static native final void native_init();
+}
diff --git a/android/media/MediaSyncEvent.java b/android/media/MediaSyncEvent.java
new file mode 100644
index 00000000..04448f04
--- /dev/null
+++ b/android/media/MediaSyncEvent.java
@@ -0,0 +1,122 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * The MediaSyncEvent class defines events that can be used to synchronize playback or capture
+ * actions between different players and recorders.
+ * <p>For instance, {@link AudioRecord#startRecording(MediaSyncEvent)} is used to start capture
+ * only when the playback on a particular audio session is complete.
+ * The audio session ID is retrieved from a player (e.g {@link MediaPlayer}, {@link AudioTrack} or
+ * {@link ToneGenerator}) by use of the getAudioSessionId() method.
+ */
+public class MediaSyncEvent {
+
+ /**
+ * No sync event specified. When used with a synchronized playback or capture method, the
+ * behavior is equivalent to calling the corresponding non synchronized method.
+ */
+ public static final int SYNC_EVENT_NONE = AudioSystem.SYNC_EVENT_NONE;
+
+ /**
+ * The corresponding action is triggered only when the presentation is completed
+ * (meaning the media has been presented to the user) on the specified session.
+ * A synchronization of this type requires a source audio session ID to be set via
+ * {@link #setAudioSessionId(int)} method.
+ */
+ public static final int SYNC_EVENT_PRESENTATION_COMPLETE =
+ AudioSystem.SYNC_EVENT_PRESENTATION_COMPLETE;
+
+
+ /**
+ * Creates a synchronization event of the sepcified type.
+ *
+ * <p>The type specifies which kind of event is monitored.
+ * For instance, event {@link #SYNC_EVENT_PRESENTATION_COMPLETE} corresponds to the audio being
+ * presented to the user on a particular audio session.
+ * @param eventType the synchronization event type.
+ * @return the MediaSyncEvent created.
+ * @throws java.lang.IllegalArgumentException
+ */
+ public static MediaSyncEvent createEvent(int eventType)
+ throws IllegalArgumentException {
+ if (!isValidType(eventType)) {
+ throw (new IllegalArgumentException(eventType
+ + "is not a valid MediaSyncEvent type."));
+ } else {
+ return new MediaSyncEvent(eventType);
+ }
+ }
+
+ private final int mType;
+ private int mAudioSession = 0;
+
+ private MediaSyncEvent(int eventType) {
+ mType = eventType;
+ }
+
+ /**
+ * Sets the event source audio session ID.
+ *
+ * <p>The audio session ID specifies on which audio session the synchronization event should be
+ * monitored.
+ * It is mandatory for certain event types (e.g. {@link #SYNC_EVENT_PRESENTATION_COMPLETE}).
+ * For instance, the audio session ID can be retrieved via
+ * {@link MediaPlayer#getAudioSessionId()} when monitoring an event on a particular MediaPlayer.
+ * @param audioSessionId the audio session ID of the event source being monitored.
+ * @return the MediaSyncEvent the method is called on.
+ * @throws java.lang.IllegalArgumentException
+ */
+ public MediaSyncEvent setAudioSessionId(int audioSessionId)
+ throws IllegalArgumentException {
+ if (audioSessionId > 0) {
+ mAudioSession = audioSessionId;
+ } else {
+ throw (new IllegalArgumentException(audioSessionId + " is not a valid session ID."));
+ }
+ return this;
+ }
+
+ /**
+ * Gets the synchronization event type.
+ *
+ * @return the synchronization event type.
+ */
+ public int getType() {
+ return mType;
+ }
+
+ /**
+ * Gets the synchronization event audio session ID.
+ *
+ * @return the synchronization audio session ID. The returned audio session ID is 0 if it has
+ * not been set.
+ */
+ public int getAudioSessionId() {
+ return mAudioSession;
+ }
+
+ private static boolean isValidType(int type) {
+ switch (type) {
+ case SYNC_EVENT_NONE:
+ case SYNC_EVENT_PRESENTATION_COMPLETE:
+ return true;
+ default:
+ return false;
+ }
+ }
+}
diff --git a/android/media/MediaTimeProvider.java b/android/media/MediaTimeProvider.java
new file mode 100644
index 00000000..fe377125
--- /dev/null
+++ b/android/media/MediaTimeProvider.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/** @hide */
+public interface MediaTimeProvider {
+ // we do not allow negative media time
+ /**
+ * Presentation time value if no timed event notification is requested.
+ */
+ public final static long NO_TIME = -1;
+
+ /**
+ * Cancels all previous notification request from this listener if any. It
+ * registers the listener to get seek and stop notifications. If timeUs is
+ * not negative, it also registers the listener for a timed event
+ * notification when the presentation time reaches (becomes greater) than
+ * the value specified. This happens immediately if the current media time
+ * is larger than or equal to timeUs.
+ *
+ * @param timeUs presentation time to get timed event callback at (or
+ * {@link #NO_TIME})
+ */
+ public void notifyAt(long timeUs, OnMediaTimeListener listener);
+
+ /**
+ * Cancels all previous notification request from this listener if any. It
+ * registers the listener to get seek and stop notifications. If the media
+ * is stopped, the listener will immediately receive a stop notification.
+ * Otherwise, it will receive a timed event notificaton.
+ */
+ public void scheduleUpdate(OnMediaTimeListener listener);
+
+ /**
+ * Cancels all previous notification request from this listener if any.
+ */
+ public void cancelNotifications(OnMediaTimeListener listener);
+
+ /**
+ * Get the current presentation time.
+ *
+ * @param precise Whether getting a precise time is important. This is
+ * more costly.
+ * @param monotonic Whether returned time should be monotonic: that is,
+ * greater than or equal to the last returned time. Don't
+ * always set this to true. E.g. this has undesired
+ * consequences if the media is seeked between calls.
+ * @throws IllegalStateException if the media is not initialized
+ */
+ public long getCurrentTimeUs(boolean precise, boolean monotonic)
+ throws IllegalStateException;
+
+ /** @hide */
+ public static interface OnMediaTimeListener {
+ /**
+ * Called when the registered time was reached naturally.
+ *
+ * @param timeUs current media time
+ */
+ void onTimedEvent(long timeUs);
+
+ /**
+ * Called when the media time changed due to seeking.
+ *
+ * @param timeUs current media time
+ */
+ void onSeek(long timeUs);
+
+ /**
+ * Called when the playback stopped. This is not called on pause, only
+ * on full stop, at which point there is no further current media time.
+ */
+ void onStop();
+ }
+}
+
diff --git a/android/media/MediaTimestamp.java b/android/media/MediaTimestamp.java
new file mode 100644
index 00000000..5ea6bbe8
--- /dev/null
+++ b/android/media/MediaTimestamp.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * An immutable object that represents the linear correlation between the media time
+ * and the system time. It contains the media clock rate, together with the media timestamp
+ * of an anchor frame and the system time when that frame was presented or is committed
+ * to be presented.
+ * <p>
+ * The phrase "present" means that audio/video produced on device is detectable by an external
+ * observer off device.
+ * The time is based on the implementation's best effort, using whatever knowledge
+ * is available to the system, but cannot account for any delay unknown to the implementation.
+ * The anchor frame could be any frame, including a just-rendered frame, or even a theoretical
+ * or in-between frame, based on the source of the MediaTimestamp.
+ * When the anchor frame is a just-rendered one, the media time stands for
+ * current position of the playback or recording.
+ *
+ * @see MediaSync#getTimestamp
+ * @see MediaPlayer#getTimestamp
+ */
+public final class MediaTimestamp
+{
+ /**
+ * Get the media time of the anchor in microseconds.
+ */
+ public long getAnchorMediaTimeUs() {
+ return mediaTimeUs;
+ }
+
+ /**
+ * Get the {@link java.lang.System#nanoTime system time} corresponding to the media time
+ * in nanoseconds.
+ */
+ public long getAnchorSytemNanoTime() {
+ return nanoTime;
+ }
+
+ /**
+ * Get the rate of the media clock in relation to the system time.
+ * <p>
+ * It is 1.0 if media clock advances in sync with the system clock;
+ * greater than 1.0 if media clock is faster than the system clock;
+ * less than 1.0 if media clock is slower than the system clock.
+ */
+ public float getMediaClockRate() {
+ return clockRate;
+ }
+
+ /** @hide - accessor shorthand */
+ public final long mediaTimeUs;
+ /** @hide - accessor shorthand */
+ public final long nanoTime;
+ /** @hide - accessor shorthand */
+ public final float clockRate;
+
+ /** @hide */
+ MediaTimestamp(long mediaUs, long systemNs, float rate) {
+ mediaTimeUs = mediaUs;
+ nanoTime = systemNs;
+ clockRate = rate;
+ }
+
+ /** @hide */
+ MediaTimestamp() {
+ mediaTimeUs = 0;
+ nanoTime = 0;
+ clockRate = 1.0f;
+ }
+}
diff --git a/android/media/Metadata.java b/android/media/Metadata.java
new file mode 100644
index 00000000..4b8f81e0
--- /dev/null
+++ b/android/media/Metadata.java
@@ -0,0 +1,553 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+import android.util.Log;
+import android.util.MathUtils;
+
+import java.util.Calendar;
+import java.util.Collections;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.Set;
+import java.util.TimeZone;
+
+
+/**
+ Class to hold the media's metadata. Metadata are used
+ for human consumption and can be embedded in the media (e.g
+ shoutcast) or available from an external source. The source can be
+ local (e.g thumbnail stored in the DB) or remote.
+
+ Metadata is like a Bundle. It is sparse and each key can occur at
+ most once. The key is an integer and the value is the actual metadata.
+
+ The caller is expected to know the type of the metadata and call
+ the right get* method to fetch its value.
+
+ @hide
+ @deprecated Use {@link MediaMetadata}.
+ */
+@Deprecated public class Metadata
+{
+ // The metadata are keyed using integers rather than more heavy
+ // weight strings. We considered using Bundle to ship the metadata
+ // between the native layer and the java layer but dropped that
+ // option since keeping in sync a native implementation of Bundle
+ // and the java one would be too burdensome. Besides Bundle uses
+ // String for its keys.
+ // The key range [0 8192) is reserved for the system.
+ //
+ // We manually serialize the data in Parcels. For large memory
+ // blob (bitmaps, raw pictures) we use MemoryFile which allow the
+ // client to make the data purge-able once it is done with it.
+ //
+
+ /**
+ * {@hide}
+ */
+ public static final int ANY = 0; // Never used for metadata returned, only for filtering.
+ // Keep in sync with kAny in MediaPlayerService.cpp
+
+ // Playback capabilities.
+ /**
+ * Indicate whether the media can be paused
+ */
+ public static final int PAUSE_AVAILABLE = 1; // Boolean
+ /**
+ * Indicate whether the media can be backward seeked
+ */
+ public static final int SEEK_BACKWARD_AVAILABLE = 2; // Boolean
+ /**
+ * Indicate whether the media can be forward seeked
+ */
+ public static final int SEEK_FORWARD_AVAILABLE = 3; // Boolean
+ /**
+ * Indicate whether the media can be seeked
+ */
+ public static final int SEEK_AVAILABLE = 4; // Boolean
+
+ // TODO: Should we use numbers compatible with the metadata retriever?
+ /**
+ * {@hide}
+ */
+ public static final int TITLE = 5; // String
+ /**
+ * {@hide}
+ */
+ public static final int COMMENT = 6; // String
+ /**
+ * {@hide}
+ */
+ public static final int COPYRIGHT = 7; // String
+ /**
+ * {@hide}
+ */
+ public static final int ALBUM = 8; // String
+ /**
+ * {@hide}
+ */
+ public static final int ARTIST = 9; // String
+ /**
+ * {@hide}
+ */
+ public static final int AUTHOR = 10; // String
+ /**
+ * {@hide}
+ */
+ public static final int COMPOSER = 11; // String
+ /**
+ * {@hide}
+ */
+ public static final int GENRE = 12; // String
+ /**
+ * {@hide}
+ */
+ public static final int DATE = 13; // Date
+ /**
+ * {@hide}
+ */
+ public static final int DURATION = 14; // Integer(millisec)
+ /**
+ * {@hide}
+ */
+ public static final int CD_TRACK_NUM = 15; // Integer 1-based
+ /**
+ * {@hide}
+ */
+ public static final int CD_TRACK_MAX = 16; // Integer
+ /**
+ * {@hide}
+ */
+ public static final int RATING = 17; // String
+ /**
+ * {@hide}
+ */
+ public static final int ALBUM_ART = 18; // byte[]
+ /**
+ * {@hide}
+ */
+ public static final int VIDEO_FRAME = 19; // Bitmap
+
+ /**
+ * {@hide}
+ */
+ public static final int BIT_RATE = 20; // Integer, Aggregate rate of
+ // all the streams in bps.
+
+ /**
+ * {@hide}
+ */
+ public static final int AUDIO_BIT_RATE = 21; // Integer, bps
+ /**
+ * {@hide}
+ */
+ public static final int VIDEO_BIT_RATE = 22; // Integer, bps
+ /**
+ * {@hide}
+ */
+ public static final int AUDIO_SAMPLE_RATE = 23; // Integer, Hz
+ /**
+ * {@hide}
+ */
+ public static final int VIDEO_FRAME_RATE = 24; // Integer, Hz
+
+ // See RFC2046 and RFC4281.
+ /**
+ * {@hide}
+ */
+ public static final int MIME_TYPE = 25; // String
+ /**
+ * {@hide}
+ */
+ public static final int AUDIO_CODEC = 26; // String
+ /**
+ * {@hide}
+ */
+ public static final int VIDEO_CODEC = 27; // String
+
+ /**
+ * {@hide}
+ */
+ public static final int VIDEO_HEIGHT = 28; // Integer
+ /**
+ * {@hide}
+ */
+ public static final int VIDEO_WIDTH = 29; // Integer
+ /**
+ * {@hide}
+ */
+ public static final int NUM_TRACKS = 30; // Integer
+ /**
+ * {@hide}
+ */
+ public static final int DRM_CRIPPLED = 31; // Boolean
+
+ private static final int LAST_SYSTEM = 31;
+ private static final int FIRST_CUSTOM = 8192;
+
+ // Shorthands to set the MediaPlayer's metadata filter.
+ /**
+ * {@hide}
+ */
+ public static final Set<Integer> MATCH_NONE = Collections.EMPTY_SET;
+ /**
+ * {@hide}
+ */
+ public static final Set<Integer> MATCH_ALL = Collections.singleton(ANY);
+
+ /**
+ * {@hide}
+ */
+ public static final int STRING_VAL = 1;
+ /**
+ * {@hide}
+ */
+ public static final int INTEGER_VAL = 2;
+ /**
+ * {@hide}
+ */
+ public static final int BOOLEAN_VAL = 3;
+ /**
+ * {@hide}
+ */
+ public static final int LONG_VAL = 4;
+ /**
+ * {@hide}
+ */
+ public static final int DOUBLE_VAL = 5;
+ /**
+ * {@hide}
+ */
+ public static final int DATE_VAL = 6;
+ /**
+ * {@hide}
+ */
+ public static final int BYTE_ARRAY_VAL = 7;
+ // FIXME: misses a type for shared heap is missing (MemoryFile).
+ // FIXME: misses a type for bitmaps.
+ private static final int LAST_TYPE = 7;
+
+ private static final String TAG = "media.Metadata";
+ private static final int kInt32Size = 4;
+ private static final int kMetaHeaderSize = 2 * kInt32Size; // size + marker
+ private static final int kRecordHeaderSize = 3 * kInt32Size; // size + id + type
+
+ private static final int kMetaMarker = 0x4d455441; // 'M' 'E' 'T' 'A'
+
+ // After a successful parsing, set the parcel with the serialized metadata.
+ private Parcel mParcel;
+
+ // Map to associate a Metadata key (e.g TITLE) with the offset of
+ // the record's payload in the parcel.
+ // Used to look up if a key was present too.
+ // Key: Metadata ID
+ // Value: Offset of the metadata type field in the record.
+ private final HashMap<Integer, Integer> mKeyToPosMap =
+ new HashMap<Integer, Integer>();
+
+ /**
+ * {@hide}
+ */
+ public Metadata() { }
+
+ /**
+ * Go over all the records, collecting metadata keys and records'
+ * type field offset in the Parcel. These are stored in
+ * mKeyToPosMap for latter retrieval.
+ * Format of a metadata record:
+ <pre>
+ 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | record size |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | metadata key | // TITLE
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | metadata type | // STRING_VAL
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | |
+ | .... metadata payload .... |
+ | |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ </pre>
+ * @param parcel With the serialized records.
+ * @param bytesLeft How many bytes in the parcel should be processed.
+ * @return false if an error occurred during parsing.
+ */
+ private boolean scanAllRecords(Parcel parcel, int bytesLeft) {
+ int recCount = 0;
+ boolean error = false;
+
+ mKeyToPosMap.clear();
+ while (bytesLeft > kRecordHeaderSize) {
+ final int start = parcel.dataPosition();
+ // Check the size.
+ final int size = parcel.readInt();
+
+ if (size <= kRecordHeaderSize) { // at least 1 byte should be present.
+ Log.e(TAG, "Record is too short");
+ error = true;
+ break;
+ }
+
+ // Check the metadata key.
+ final int metadataId = parcel.readInt();
+ if (!checkMetadataId(metadataId)) {
+ error = true;
+ break;
+ }
+
+ // Store the record offset which points to the type
+ // field so we can later on read/unmarshall the record
+ // payload.
+ if (mKeyToPosMap.containsKey(metadataId)) {
+ Log.e(TAG, "Duplicate metadata ID found");
+ error = true;
+ break;
+ }
+
+ mKeyToPosMap.put(metadataId, parcel.dataPosition());
+
+ // Check the metadata type.
+ final int metadataType = parcel.readInt();
+ if (metadataType <= 0 || metadataType > LAST_TYPE) {
+ Log.e(TAG, "Invalid metadata type " + metadataType);
+ error = true;
+ break;
+ }
+
+ // Skip to the next one.
+ try {
+ parcel.setDataPosition(MathUtils.addOrThrow(start, size));
+ } catch (IllegalArgumentException e) {
+ Log.e(TAG, "Invalid size: " + e.getMessage());
+ error = true;
+ break;
+ }
+
+ bytesLeft -= size;
+ ++recCount;
+ }
+
+ if (0 != bytesLeft || error) {
+ Log.e(TAG, "Ran out of data or error on record " + recCount);
+ mKeyToPosMap.clear();
+ return false;
+ } else {
+ return true;
+ }
+ }
+
+ /**
+ * Check a parcel containing metadata is well formed. The header
+ * is checked as well as the individual records format. However, the
+ * data inside the record is not checked because we do lazy access
+ * (we check/unmarshall only data the user asks for.)
+ *
+ * Format of a metadata parcel:
+ <pre>
+ 1 2 3
+ 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | metadata total size |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | 'M' | 'E' | 'T' | 'A' |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ | |
+ | .... metadata records .... |
+ | |
+ +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
+ </pre>
+ *
+ * @param parcel With the serialized data. Metadata keeps a
+ * reference on it to access it later on. The caller
+ * should not modify the parcel after this call (and
+ * not call recycle on it.)
+ * @return false if an error occurred.
+ * {@hide}
+ */
+ public boolean parse(Parcel parcel) {
+ if (parcel.dataAvail() < kMetaHeaderSize) {
+ Log.e(TAG, "Not enough data " + parcel.dataAvail());
+ return false;
+ }
+
+ final int pin = parcel.dataPosition(); // to roll back in case of errors.
+ final int size = parcel.readInt();
+
+ // The extra kInt32Size below is to account for the int32 'size' just read.
+ if (parcel.dataAvail() + kInt32Size < size || size < kMetaHeaderSize) {
+ Log.e(TAG, "Bad size " + size + " avail " + parcel.dataAvail() + " position " + pin);
+ parcel.setDataPosition(pin);
+ return false;
+ }
+
+ // Checks if the 'M' 'E' 'T' 'A' marker is present.
+ final int kShouldBeMetaMarker = parcel.readInt();
+ if (kShouldBeMetaMarker != kMetaMarker ) {
+ Log.e(TAG, "Marker missing " + Integer.toHexString(kShouldBeMetaMarker));
+ parcel.setDataPosition(pin);
+ return false;
+ }
+
+ // Scan the records to collect metadata ids and offsets.
+ if (!scanAllRecords(parcel, size - kMetaHeaderSize)) {
+ parcel.setDataPosition(pin);
+ return false;
+ }
+ mParcel = parcel;
+ return true;
+ }
+
+ /**
+ * @return The set of metadata ID found.
+ */
+ public Set<Integer> keySet() {
+ return mKeyToPosMap.keySet();
+ }
+
+ /**
+ * @return true if a value is present for the given key.
+ */
+ public boolean has(final int metadataId) {
+ if (!checkMetadataId(metadataId)) {
+ throw new IllegalArgumentException("Invalid key: " + metadataId);
+ }
+ return mKeyToPosMap.containsKey(metadataId);
+ }
+
+ // Accessors.
+ // Caller must make sure the key is present using the {@code has}
+ // method otherwise a RuntimeException will occur.
+
+ /**
+ * {@hide}
+ */
+ public String getString(final int key) {
+ checkType(key, STRING_VAL);
+ return mParcel.readString();
+ }
+
+ /**
+ * {@hide}
+ */
+ public int getInt(final int key) {
+ checkType(key, INTEGER_VAL);
+ return mParcel.readInt();
+ }
+
+ /**
+ * Get the boolean value indicated by key
+ */
+ public boolean getBoolean(final int key) {
+ checkType(key, BOOLEAN_VAL);
+ return mParcel.readInt() == 1;
+ }
+
+ /**
+ * {@hide}
+ */
+ public long getLong(final int key) {
+ checkType(key, LONG_VAL); /**
+ * {@hide}
+ */
+ return mParcel.readLong();
+ }
+
+ /**
+ * {@hide}
+ */
+ public double getDouble(final int key) {
+ checkType(key, DOUBLE_VAL);
+ return mParcel.readDouble();
+ }
+
+ /**
+ * {@hide}
+ */
+ public byte[] getByteArray(final int key) {
+ checkType(key, BYTE_ARRAY_VAL);
+ return mParcel.createByteArray();
+ }
+
+ /**
+ * {@hide}
+ */
+ public Date getDate(final int key) {
+ checkType(key, DATE_VAL);
+ final long timeSinceEpoch = mParcel.readLong();
+ final String timeZone = mParcel.readString();
+
+ if (timeZone.length() == 0) {
+ return new Date(timeSinceEpoch);
+ } else {
+ TimeZone tz = TimeZone.getTimeZone(timeZone);
+ Calendar cal = Calendar.getInstance(tz);
+
+ cal.setTimeInMillis(timeSinceEpoch);
+ return cal.getTime();
+ }
+ }
+
+ /**
+ * @return the last available system metadata id. Ids are
+ * 1-indexed.
+ * {@hide}
+ */
+ public static int lastSytemId() { return LAST_SYSTEM; }
+
+ /**
+ * @return the first available cutom metadata id.
+ * {@hide}
+ */
+ public static int firstCustomId() { return FIRST_CUSTOM; }
+
+ /**
+ * @return the last value of known type. Types are 1-indexed.
+ * {@hide}
+ */
+ public static int lastType() { return LAST_TYPE; }
+
+ /**
+ * Check val is either a system id or a custom one.
+ * @param val Metadata key to test.
+ * @return true if it is in a valid range.
+ **/
+ private boolean checkMetadataId(final int val) {
+ if (val <= ANY || (LAST_SYSTEM < val && val < FIRST_CUSTOM)) {
+ Log.e(TAG, "Invalid metadata ID " + val);
+ return false;
+ }
+ return true;
+ }
+
+ /**
+ * Check the type of the data match what is expected.
+ */
+ private void checkType(final int key, final int expectedType) {
+ final int pos = mKeyToPosMap.get(key);
+
+ mParcel.setDataPosition(pos);
+
+ final int type = mParcel.readInt();
+ if (type != expectedType) {
+ throw new IllegalStateException("Wrong type " + expectedType + " but got " + type);
+ }
+ }
+}
diff --git a/android/media/MiniThumbFile.java b/android/media/MiniThumbFile.java
new file mode 100644
index 00000000..664308c4
--- /dev/null
+++ b/android/media/MiniThumbFile.java
@@ -0,0 +1,273 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.net.Uri;
+import android.os.Environment;
+import android.util.Log;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.nio.ByteBuffer;
+import java.nio.channels.FileChannel;
+import java.nio.channels.FileLock;
+import java.util.Hashtable;
+
+/**
+ * This class handles the mini-thumb file. A mini-thumb file consists
+ * of blocks, indexed by id. Each block has BYTES_PER_MINTHUMB bytes in the
+ * following format:
+ *
+ * 1 byte status (0 = empty, 1 = mini-thumb available)
+ * 8 bytes magic (a magic number to match what's in the database)
+ * 4 bytes data length (LEN)
+ * LEN bytes jpeg data
+ * (the remaining bytes are unused)
+ *
+ * @hide This file is shared between MediaStore and MediaProvider and should remained internal use
+ * only.
+ */
+public class MiniThumbFile {
+ private static final String TAG = "MiniThumbFile";
+ private static final int MINI_THUMB_DATA_FILE_VERSION = 3;
+ public static final int BYTES_PER_MINTHUMB = 10000;
+ private static final int HEADER_SIZE = 1 + 8 + 4;
+ private Uri mUri;
+ private RandomAccessFile mMiniThumbFile;
+ private FileChannel mChannel;
+ private ByteBuffer mBuffer;
+ private static final Hashtable<String, MiniThumbFile> sThumbFiles =
+ new Hashtable<String, MiniThumbFile>();
+
+ /**
+ * We store different types of thumbnails in different files. To remain backward compatibility,
+ * we should hashcode of content://media/external/images/media remains the same.
+ */
+ public static synchronized void reset() {
+ for (MiniThumbFile file : sThumbFiles.values()) {
+ file.deactivate();
+ }
+ sThumbFiles.clear();
+ }
+
+ public static synchronized MiniThumbFile instance(Uri uri) {
+ String type = uri.getPathSegments().get(1);
+ MiniThumbFile file = sThumbFiles.get(type);
+ // Log.v(TAG, "get minithumbfile for type: "+type);
+ if (file == null) {
+ file = new MiniThumbFile(
+ Uri.parse("content://media/external/" + type + "/media"));
+ sThumbFiles.put(type, file);
+ }
+
+ return file;
+ }
+
+ private String randomAccessFilePath(int version) {
+ String directoryName =
+ Environment.getExternalStorageDirectory().toString()
+ + "/DCIM/.thumbnails";
+ return directoryName + "/.thumbdata" + version + "-" + mUri.hashCode();
+ }
+
+ private void removeOldFile() {
+ String oldPath = randomAccessFilePath(MINI_THUMB_DATA_FILE_VERSION - 1);
+ File oldFile = new File(oldPath);
+ if (oldFile.exists()) {
+ try {
+ oldFile.delete();
+ } catch (SecurityException ex) {
+ // ignore
+ }
+ }
+ }
+
+ private RandomAccessFile miniThumbDataFile() {
+ if (mMiniThumbFile == null) {
+ removeOldFile();
+ String path = randomAccessFilePath(MINI_THUMB_DATA_FILE_VERSION);
+ File directory = new File(path).getParentFile();
+ if (!directory.isDirectory()) {
+ if (!directory.mkdirs()) {
+ Log.e(TAG, "Unable to create .thumbnails directory "
+ + directory.toString());
+ }
+ }
+ File f = new File(path);
+ try {
+ mMiniThumbFile = new RandomAccessFile(f, "rw");
+ } catch (IOException ex) {
+ // Open as read-only so we can at least read the existing
+ // thumbnails.
+ try {
+ mMiniThumbFile = new RandomAccessFile(f, "r");
+ } catch (IOException ex2) {
+ // ignore exception
+ }
+ }
+ if (mMiniThumbFile != null) {
+ mChannel = mMiniThumbFile.getChannel();
+ }
+ }
+ return mMiniThumbFile;
+ }
+
+ public MiniThumbFile(Uri uri) {
+ mUri = uri;
+ mBuffer = ByteBuffer.allocateDirect(BYTES_PER_MINTHUMB);
+ }
+
+ public synchronized void deactivate() {
+ if (mMiniThumbFile != null) {
+ try {
+ mMiniThumbFile.close();
+ mMiniThumbFile = null;
+ } catch (IOException ex) {
+ // ignore exception
+ }
+ }
+ }
+
+ // Get the magic number for the specified id in the mini-thumb file.
+ // Returns 0 if the magic is not available.
+ public synchronized long getMagic(long id) {
+ // check the mini thumb file for the right data. Right is
+ // defined as having the right magic number at the offset
+ // reserved for this "id".
+ RandomAccessFile r = miniThumbDataFile();
+ if (r != null) {
+ long pos = id * BYTES_PER_MINTHUMB;
+ FileLock lock = null;
+ try {
+ mBuffer.clear();
+ mBuffer.limit(1 + 8);
+
+ lock = mChannel.lock(pos, 1 + 8, true);
+ // check that we can read the following 9 bytes
+ // (1 for the "status" and 8 for the long)
+ if (mChannel.read(mBuffer, pos) == 9) {
+ mBuffer.position(0);
+ if (mBuffer.get() == 1) {
+ return mBuffer.getLong();
+ }
+ }
+ } catch (IOException ex) {
+ Log.v(TAG, "Got exception checking file magic: ", ex);
+ } catch (RuntimeException ex) {
+ // Other NIO related exception like disk full, read only channel..etc
+ Log.e(TAG, "Got exception when reading magic, id = " + id +
+ ", disk full or mount read-only? " + ex.getClass());
+ } finally {
+ try {
+ if (lock != null) lock.release();
+ }
+ catch (IOException ex) {
+ // ignore it.
+ }
+ }
+ }
+ return 0;
+ }
+
+ public synchronized void saveMiniThumbToFile(byte[] data, long id, long magic)
+ throws IOException {
+ RandomAccessFile r = miniThumbDataFile();
+ if (r == null) return;
+
+ long pos = id * BYTES_PER_MINTHUMB;
+ FileLock lock = null;
+ try {
+ if (data != null) {
+ if (data.length > BYTES_PER_MINTHUMB - HEADER_SIZE) {
+ // not enough space to store it.
+ return;
+ }
+ mBuffer.clear();
+ mBuffer.put((byte) 1);
+ mBuffer.putLong(magic);
+ mBuffer.putInt(data.length);
+ mBuffer.put(data);
+ mBuffer.flip();
+
+ lock = mChannel.lock(pos, BYTES_PER_MINTHUMB, false);
+ mChannel.write(mBuffer, pos);
+ }
+ } catch (IOException ex) {
+ Log.e(TAG, "couldn't save mini thumbnail data for "
+ + id + "; ", ex);
+ throw ex;
+ } catch (RuntimeException ex) {
+ // Other NIO related exception like disk full, read only channel..etc
+ Log.e(TAG, "couldn't save mini thumbnail data for "
+ + id + "; disk full or mount read-only? " + ex.getClass());
+ } finally {
+ try {
+ if (lock != null) lock.release();
+ }
+ catch (IOException ex) {
+ // ignore it.
+ }
+ }
+ }
+
+ /**
+ * Gallery app can use this method to retrieve mini-thumbnail. Full size
+ * images share the same IDs with their corresponding thumbnails.
+ *
+ * @param id the ID of the image (same of full size image).
+ * @param data the buffer to store mini-thumbnail.
+ */
+ public synchronized byte [] getMiniThumbFromFile(long id, byte [] data) {
+ RandomAccessFile r = miniThumbDataFile();
+ if (r == null) return null;
+
+ long pos = id * BYTES_PER_MINTHUMB;
+ FileLock lock = null;
+ try {
+ mBuffer.clear();
+ lock = mChannel.lock(pos, BYTES_PER_MINTHUMB, true);
+ int size = mChannel.read(mBuffer, pos);
+ if (size > 1 + 8 + 4) { // flag, magic, length
+ mBuffer.position(0);
+ byte flag = mBuffer.get();
+ long magic = mBuffer.getLong();
+ int length = mBuffer.getInt();
+
+ if (size >= 1 + 8 + 4 + length && length != 0 && magic != 0 && flag == 1 &&
+ data.length >= length) {
+ mBuffer.get(data, 0, length);
+ return data;
+ }
+ }
+ } catch (IOException ex) {
+ Log.w(TAG, "got exception when reading thumbnail id=" + id + ", exception: " + ex);
+ } catch (RuntimeException ex) {
+ // Other NIO related exception like disk full, read only channel..etc
+ Log.e(TAG, "Got exception when reading thumbnail, id = " + id +
+ ", disk full or mount read-only? " + ex.getClass());
+ } finally {
+ try {
+ if (lock != null) lock.release();
+ }
+ catch (IOException ex) {
+ // ignore it.
+ }
+ }
+ return null;
+ }
+}
diff --git a/android/media/NotProvisionedException.java b/android/media/NotProvisionedException.java
new file mode 100644
index 00000000..32b8151a
--- /dev/null
+++ b/android/media/NotProvisionedException.java
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Exception thrown when an operation on a MediaDrm object is attempted
+ * and the device does not have a certificate. The app should obtain and
+ * install a certificate using the MediaDrm provisioning methods then retry
+ * the operation.
+ */
+public final class NotProvisionedException extends MediaDrmException {
+ public NotProvisionedException(String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/android/media/PlaybackParams.java b/android/media/PlaybackParams.java
new file mode 100644
index 00000000..938a953a
--- /dev/null
+++ b/android/media/PlaybackParams.java
@@ -0,0 +1,250 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Structure for common playback params.
+ *
+ * Used by {@link AudioTrack} {@link AudioTrack#getPlaybackParams()} and
+ * {@link AudioTrack#setPlaybackParams(PlaybackParams)}
+ * to control playback behavior.
+ * <p> <strong>audio fallback mode:</strong>
+ * select out-of-range parameter handling.
+ * <ul>
+ * <li> {@link PlaybackParams#AUDIO_FALLBACK_MODE_DEFAULT}:
+ * System will determine best handling. </li>
+ * <li> {@link PlaybackParams#AUDIO_FALLBACK_MODE_MUTE}:
+ * Play silence for params normally out of range.</li>
+ * <li> {@link PlaybackParams#AUDIO_FALLBACK_MODE_FAIL}:
+ * Return {@link java.lang.IllegalArgumentException} from
+ * <code>AudioTrack.setPlaybackParams(PlaybackParams)</code>.</li>
+ * </ul>
+ * <p> <strong>pitch:</strong> increases or decreases the tonal frequency of the audio content.
+ * It is expressed as a multiplicative factor, where normal pitch is 1.0f.
+ * <p> <strong>speed:</strong> increases or decreases the time to
+ * play back a set of audio or video frames.
+ * It is expressed as a multiplicative factor, where normal speed is 1.0f.
+ * <p> Different combinations of speed and pitch may be used for audio playback;
+ * some common ones:
+ * <ul>
+ * <li> <em>Pitch equals 1.0f.</em> Speed change will be done with pitch preserved,
+ * often called <em>timestretching</em>.</li>
+ * <li> <em>Pitch equals speed.</em> Speed change will be done by <em>resampling</em>,
+ * similar to {@link AudioTrack#setPlaybackRate(int)}.</li>
+ * </ul>
+ */
+public final class PlaybackParams implements Parcelable {
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_FALLBACK_MODE_DEFAULT,
+ AUDIO_FALLBACK_MODE_MUTE,
+ AUDIO_FALLBACK_MODE_FAIL,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioFallbackMode {}
+ public static final int AUDIO_FALLBACK_MODE_DEFAULT = 0;
+ public static final int AUDIO_FALLBACK_MODE_MUTE = 1;
+ public static final int AUDIO_FALLBACK_MODE_FAIL = 2;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_STRETCH_MODE_DEFAULT,
+ AUDIO_STRETCH_MODE_VOICE,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioStretchMode {}
+ /** @hide */
+ public static final int AUDIO_STRETCH_MODE_DEFAULT = 0;
+ /** @hide */
+ public static final int AUDIO_STRETCH_MODE_VOICE = 1;
+
+ // flags to indicate which params are actually set
+ private static final int SET_SPEED = 1 << 0;
+ private static final int SET_PITCH = 1 << 1;
+ private static final int SET_AUDIO_FALLBACK_MODE = 1 << 2;
+ private static final int SET_AUDIO_STRETCH_MODE = 1 << 3;
+ private int mSet = 0;
+
+ // params
+ private int mAudioFallbackMode = AUDIO_FALLBACK_MODE_DEFAULT;
+ private int mAudioStretchMode = AUDIO_STRETCH_MODE_DEFAULT;
+ private float mPitch = 1.0f;
+ private float mSpeed = 1.0f;
+
+ public PlaybackParams() {
+ }
+
+ private PlaybackParams(Parcel in) {
+ mSet = in.readInt();
+ mAudioFallbackMode = in.readInt();
+ mAudioStretchMode = in.readInt();
+ mPitch = in.readFloat();
+ if (mPitch < 0.f) {
+ mPitch = 0.f;
+ }
+ mSpeed = in.readFloat();
+ }
+
+ /**
+ * Allows defaults to be returned for properties not set.
+ * Otherwise a {@link java.lang.IllegalArgumentException} exception
+ * is raised when getting those properties
+ * which have defaults but have never been set.
+ * @return this <code>PlaybackParams</code> instance.
+ */
+ public PlaybackParams allowDefaults() {
+ mSet |= SET_AUDIO_FALLBACK_MODE | SET_AUDIO_STRETCH_MODE | SET_PITCH | SET_SPEED;
+ return this;
+ }
+
+ /**
+ * Sets the audio fallback mode.
+ * @param audioFallbackMode
+ * @return this <code>PlaybackParams</code> instance.
+ */
+ public PlaybackParams setAudioFallbackMode(@AudioFallbackMode int audioFallbackMode) {
+ mAudioFallbackMode = audioFallbackMode;
+ mSet |= SET_AUDIO_FALLBACK_MODE;
+ return this;
+ }
+
+ /**
+ * Retrieves the audio fallback mode.
+ * @return audio fallback mode
+ * @throws IllegalStateException if the audio fallback mode is not set.
+ */
+ public @AudioFallbackMode int getAudioFallbackMode() {
+ if ((mSet & SET_AUDIO_FALLBACK_MODE) == 0) {
+ throw new IllegalStateException("audio fallback mode not set");
+ }
+ return mAudioFallbackMode;
+ }
+
+ /**
+ * @hide
+ * Sets the audio stretch mode.
+ * @param audioStretchMode
+ * @return this <code>PlaybackParams</code> instance.
+ */
+ public PlaybackParams setAudioStretchMode(@AudioStretchMode int audioStretchMode) {
+ mAudioStretchMode = audioStretchMode;
+ mSet |= SET_AUDIO_STRETCH_MODE;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Retrieves the audio stretch mode.
+ * @return audio stretch mode
+ * @throws IllegalStateException if the audio stretch mode is not set.
+ */
+ public @AudioStretchMode int getAudioStretchMode() {
+ if ((mSet & SET_AUDIO_STRETCH_MODE) == 0) {
+ throw new IllegalStateException("audio stretch mode not set");
+ }
+ return mAudioStretchMode;
+ }
+
+ /**
+ * Sets the pitch factor.
+ * @param pitch
+ * @return this <code>PlaybackParams</code> instance.
+ * @throws IllegalArgumentException if the pitch is negative.
+ */
+ public PlaybackParams setPitch(float pitch) {
+ if (pitch < 0.f) {
+ throw new IllegalArgumentException("pitch must not be negative");
+ }
+ mPitch = pitch;
+ mSet |= SET_PITCH;
+ return this;
+ }
+
+ /**
+ * Retrieves the pitch factor.
+ * @return pitch
+ * @throws IllegalStateException if pitch is not set.
+ */
+ public float getPitch() {
+ if ((mSet & SET_PITCH) == 0) {
+ throw new IllegalStateException("pitch not set");
+ }
+ return mPitch;
+ }
+
+ /**
+ * Sets the speed factor.
+ * @param speed
+ * @return this <code>PlaybackParams</code> instance.
+ */
+ public PlaybackParams setSpeed(float speed) {
+ mSpeed = speed;
+ mSet |= SET_SPEED;
+ return this;
+ }
+
+ /**
+ * Retrieves the speed factor.
+ * @return speed
+ * @throws IllegalStateException if speed is not set.
+ */
+ public float getSpeed() {
+ if ((mSet & SET_SPEED) == 0) {
+ throw new IllegalStateException("speed not set");
+ }
+ return mSpeed;
+ }
+
+ public static final Parcelable.Creator<PlaybackParams> CREATOR =
+ new Parcelable.Creator<PlaybackParams>() {
+ @Override
+ public PlaybackParams createFromParcel(Parcel in) {
+ return new PlaybackParams(in);
+ }
+
+ @Override
+ public PlaybackParams[] newArray(int size) {
+ return new PlaybackParams[size];
+ }
+ };
+
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mSet);
+ dest.writeInt(mAudioFallbackMode);
+ dest.writeInt(mAudioStretchMode);
+ dest.writeFloat(mPitch);
+ dest.writeFloat(mSpeed);
+ }
+}
diff --git a/android/media/PlayerBase.java b/android/media/PlayerBase.java
new file mode 100644
index 00000000..4808d7a5
--- /dev/null
+++ b/android/media/PlayerBase.java
@@ -0,0 +1,588 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.app.ActivityThread;
+import android.app.AppOpsManager;
+import android.content.Context;
+import android.media.VolumeShaper;
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.Process;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.Log;
+
+import com.android.internal.app.IAppOpsCallback;
+import com.android.internal.app.IAppOpsService;
+
+import java.lang.IllegalArgumentException;
+import java.lang.ref.WeakReference;
+import java.util.Objects;
+
+/**
+ * Class to encapsulate a number of common player operations:
+ * - AppOps for OP_PLAY_AUDIO
+ * - more to come (routing, transport control)
+ * @hide
+ */
+public abstract class PlayerBase {
+
+ private static final String TAG = "PlayerBase";
+ private static final boolean DEBUG = false;
+ private static IAudioService sService; //lazy initialization, use getService()
+ /** Debug app ops */
+ private static final boolean DEBUG_APP_OPS = false;
+
+ // parameters of the player that affect AppOps
+ protected AudioAttributes mAttributes;
+ protected float mLeftVolume = 1.0f;
+ protected float mRightVolume = 1.0f;
+ protected float mAuxEffectSendLevel = 0.0f;
+
+ // for AppOps
+ private IAppOpsService mAppOps; // may be null
+ private IAppOpsCallback mAppOpsCallback;
+ private boolean mHasAppOpsPlayAudio = true; // sync'd on mLock
+ private final Object mLock = new Object();
+
+ private final int mImplType;
+ // uniquely identifies the Player Interface throughout the system (P I Id)
+ private int mPlayerIId;
+
+ private int mState; // sync'd on mLock
+ private int mStartDelayMs = 0; // sync'd on mLock
+ private float mPanMultiplierL = 1.0f; // sync'd on mLock
+ private float mPanMultiplierR = 1.0f; // sync'd on mLock
+
+ /**
+ * Constructor. Must be given audio attributes, as they are required for AppOps.
+ * @param attr non-null audio attributes
+ * @param class non-null class of the implementation of this abstract class
+ */
+ PlayerBase(@NonNull AudioAttributes attr, int implType) {
+ if (attr == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes");
+ }
+ mAttributes = attr;
+ mImplType = implType;
+ mState = AudioPlaybackConfiguration.PLAYER_STATE_IDLE;
+ };
+
+ /**
+ * Call from derived class when instantiation / initialization is successful
+ */
+ protected void baseRegisterPlayer() {
+ int newPiid = AudioPlaybackConfiguration.PLAYER_PIID_INVALID;
+ IBinder b = ServiceManager.getService(Context.APP_OPS_SERVICE);
+ mAppOps = IAppOpsService.Stub.asInterface(b);
+ // initialize mHasAppOpsPlayAudio
+ updateAppOpsPlayAudio();
+ // register a callback to monitor whether the OP_PLAY_AUDIO is still allowed
+ mAppOpsCallback = new IAppOpsCallbackWrapper(this);
+ try {
+ mAppOps.startWatchingMode(AppOpsManager.OP_PLAY_AUDIO,
+ ActivityThread.currentPackageName(), mAppOpsCallback);
+ } catch (RemoteException e) {
+ mHasAppOpsPlayAudio = false;
+ }
+ try {
+ newPiid = getService().trackPlayer(
+ new PlayerIdCard(mImplType, mAttributes, new IPlayerWrapper(this)));
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error talking to audio service, player will not be tracked", e);
+ }
+ mPlayerIId = newPiid;
+ }
+
+ /**
+ * To be called whenever the audio attributes of the player change
+ * @param attr non-null audio attributes
+ */
+ void baseUpdateAudioAttributes(@NonNull AudioAttributes attr) {
+ if (attr == null) {
+ throw new IllegalArgumentException("Illegal null AudioAttributes");
+ }
+ try {
+ getService().playerAttributes(mPlayerIId, attr);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error talking to audio service, STARTED state will not be tracked", e);
+ }
+ synchronized (mLock) {
+ mAttributes = attr;
+ updateAppOpsPlayAudio_sync();
+ }
+ }
+
+ void baseStart() {
+ if (DEBUG) { Log.v(TAG, "baseStart() piid=" + mPlayerIId); }
+ try {
+ synchronized (mLock) {
+ mState = AudioPlaybackConfiguration.PLAYER_STATE_STARTED;
+ getService().playerEvent(mPlayerIId, mState);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error talking to audio service, STARTED state will not be tracked", e);
+ }
+ synchronized (mLock) {
+ if (isRestricted_sync()) {
+ playerSetVolume(true/*muting*/,0, 0);
+ }
+ }
+ }
+
+ void baseSetStartDelayMs(int delayMs) {
+ synchronized(mLock) {
+ mStartDelayMs = Math.max(delayMs, 0);
+ }
+ }
+
+ protected int getStartDelayMs() {
+ synchronized(mLock) {
+ return mStartDelayMs;
+ }
+ }
+
+ void basePause() {
+ if (DEBUG) { Log.v(TAG, "basePause() piid=" + mPlayerIId); }
+ try {
+ synchronized (mLock) {
+ mState = AudioPlaybackConfiguration.PLAYER_STATE_PAUSED;
+ getService().playerEvent(mPlayerIId, mState);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error talking to audio service, PAUSED state will not be tracked", e);
+ }
+ }
+
+ void baseStop() {
+ if (DEBUG) { Log.v(TAG, "baseStop() piid=" + mPlayerIId); }
+ try {
+ synchronized (mLock) {
+ mState = AudioPlaybackConfiguration.PLAYER_STATE_STOPPED;
+ getService().playerEvent(mPlayerIId, mState);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error talking to audio service, STOPPED state will not be tracked", e);
+ }
+ }
+
+ void baseSetPan(float pan) {
+ final float p = Math.min(Math.max(-1.0f, pan), 1.0f);
+ synchronized (mLock) {
+ if (p >= 0.0f) {
+ mPanMultiplierL = 1.0f - p;
+ mPanMultiplierR = 1.0f;
+ } else {
+ mPanMultiplierL = 1.0f;
+ mPanMultiplierR = 1.0f + p;
+ }
+ }
+ baseSetVolume(mLeftVolume, mRightVolume);
+ }
+
+ void baseSetVolume(float leftVolume, float rightVolume) {
+ final boolean hasAppOpsPlayAudio;
+ synchronized (mLock) {
+ mLeftVolume = leftVolume;
+ mRightVolume = rightVolume;
+ hasAppOpsPlayAudio = mHasAppOpsPlayAudio;
+ if (isRestricted_sync()) {
+ return;
+ }
+ }
+ playerSetVolume(!hasAppOpsPlayAudio/*muting*/,
+ leftVolume * mPanMultiplierL, rightVolume * mPanMultiplierR);
+ }
+
+ int baseSetAuxEffectSendLevel(float level) {
+ synchronized (mLock) {
+ mAuxEffectSendLevel = level;
+ if (isRestricted_sync()) {
+ return AudioSystem.SUCCESS;
+ }
+ }
+ return playerSetAuxEffectSendLevel(false/*muting*/, level);
+ }
+
+ /**
+ * To be called from a subclass release or finalize method.
+ * Releases AppOps related resources.
+ */
+ void baseRelease() {
+ if (DEBUG) { Log.v(TAG, "baseRelease() piid=" + mPlayerIId + " state=" + mState); }
+ try {
+ synchronized (mLock) {
+ if (mState != AudioPlaybackConfiguration.PLAYER_STATE_RELEASED) {
+ getService().releasePlayer(mPlayerIId);
+ mState = AudioPlaybackConfiguration.PLAYER_STATE_RELEASED;
+ }
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error talking to audio service, the player will still be tracked", e);
+ }
+ try {
+ if (mAppOps != null) {
+ mAppOps.stopWatchingMode(mAppOpsCallback);
+ }
+ } catch (Exception e) {
+ // nothing to do here, the object is supposed to be released anyway
+ }
+ }
+
+ private void updateAppOpsPlayAudio() {
+ synchronized (mLock) {
+ updateAppOpsPlayAudio_sync();
+ }
+ }
+
+ /**
+ * To be called whenever a condition that might affect audibility of this player is updated.
+ * Must be called synchronized on mLock.
+ */
+ void updateAppOpsPlayAudio_sync() {
+ boolean oldHasAppOpsPlayAudio = mHasAppOpsPlayAudio;
+ try {
+ int mode = AppOpsManager.MODE_IGNORED;
+ if (mAppOps != null) {
+ mode = mAppOps.checkAudioOperation(AppOpsManager.OP_PLAY_AUDIO,
+ mAttributes.getUsage(),
+ Process.myUid(), ActivityThread.currentPackageName());
+ }
+ mHasAppOpsPlayAudio = (mode == AppOpsManager.MODE_ALLOWED);
+ } catch (RemoteException e) {
+ mHasAppOpsPlayAudio = false;
+ }
+
+ // AppsOps alters a player's volume; when the restriction changes, reflect it on the actual
+ // volume used by the player
+ try {
+ if (oldHasAppOpsPlayAudio != mHasAppOpsPlayAudio) {
+ getService().playerHasOpPlayAudio(mPlayerIId, mHasAppOpsPlayAudio);
+ if (mHasAppOpsPlayAudio) {
+ if (DEBUG_APP_OPS) {
+ Log.v(TAG, "updateAppOpsPlayAudio: unmuting player, vol=" + mLeftVolume
+ + "/" + mRightVolume);
+ }
+ playerSetVolume(false/*muting*/,
+ mLeftVolume * mPanMultiplierL, mRightVolume * mPanMultiplierR);
+ playerSetAuxEffectSendLevel(false/*muting*/, mAuxEffectSendLevel);
+ } else {
+ if (DEBUG_APP_OPS) {
+ Log.v(TAG, "updateAppOpsPlayAudio: muting player");
+ }
+ playerSetVolume(true/*muting*/, 0.0f, 0.0f);
+ playerSetAuxEffectSendLevel(true/*muting*/, 0.0f);
+ }
+ }
+ } catch (Exception e) {
+ // failing silently, player might not be in right state
+ }
+ }
+
+ /**
+ * To be called by the subclass whenever an operation is potentially restricted.
+ * As the media player-common behavior are incorporated into this class, the subclass's need
+ * to call this method should be removed, and this method could become private.
+ * FIXME can this method be private so subclasses don't have to worry about when to check
+ * the restrictions.
+ * @return
+ */
+ boolean isRestricted_sync() {
+ // check app ops
+ if (mHasAppOpsPlayAudio) {
+ return false;
+ }
+ // check bypass flag
+ if ((mAttributes.getAllFlags() & AudioAttributes.FLAG_BYPASS_INTERRUPTION_POLICY) != 0) {
+ return false;
+ }
+ // check force audibility flag and camera restriction
+ if (((mAttributes.getAllFlags() & AudioAttributes.FLAG_AUDIBILITY_ENFORCED) != 0)
+ && (mAttributes.getUsage() == AudioAttributes.USAGE_ASSISTANCE_SONIFICATION)) {
+ boolean cameraSoundForced = false;
+ try {
+ cameraSoundForced = getService().isCameraSoundForced();
+ } catch (RemoteException e) {
+ Log.e(TAG, "Cannot access AudioService in isRestricted_sync()");
+ } catch (NullPointerException e) {
+ Log.e(TAG, "Null AudioService in isRestricted_sync()");
+ }
+ if (cameraSoundForced) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ private static IAudioService getService()
+ {
+ if (sService != null) {
+ return sService;
+ }
+ IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE);
+ sService = IAudioService.Stub.asInterface(b);
+ return sService;
+ }
+
+ /**
+ * @hide
+ * @param delayMs
+ */
+ public void setStartDelayMs(int delayMs) {
+ baseSetStartDelayMs(delayMs);
+ }
+
+ //=====================================================================
+ // Abstract methods a subclass needs to implement
+ /**
+ * Abstract method for the subclass behavior's for volume and muting commands
+ * @param muting if true, the player is to be muted, and the volume values can be ignored
+ * @param leftVolume the left volume to use if muting is false
+ * @param rightVolume the right volume to use if muting is false
+ */
+ abstract void playerSetVolume(boolean muting, float leftVolume, float rightVolume);
+
+ /**
+ * Abstract method to apply a {@link VolumeShaper.Configuration}
+ * and a {@link VolumeShaper.Operation} to the Player.
+ * This should be overridden by the Player to call into the native
+ * VolumeShaper implementation. Multiple {@code VolumeShapers} may be
+ * concurrently active for a given Player, each accessible by the
+ * {@code VolumeShaper} id.
+ *
+ * The {@code VolumeShaper} implementation caches the id returned
+ * when applying a fully specified configuration
+ * from {VolumeShaper.Configuration.Builder} to track later
+ * operation changes requested on it.
+ *
+ * @param configuration a {@code VolumeShaper.Configuration} object
+ * created by {@link VolumeShaper.Configuration.Builder} or
+ * an created from a {@code VolumeShaper} id
+ * by the {@link VolumeShaper.Configuration} constructor.
+ * @param operation a {@code VolumeShaper.Operation}.
+ * @return a negative error status or a
+ * non-negative {@code VolumeShaper} id on success.
+ */
+ /* package */ abstract int playerApplyVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration,
+ @NonNull VolumeShaper.Operation operation);
+
+ /**
+ * Abstract method to get the current VolumeShaper state.
+ * @param id the {@code VolumeShaper} id returned from
+ * sending a fully specified {@code VolumeShaper.Configuration}
+ * through {@link #playerApplyVolumeShaper}
+ * @return a {@code VolumeShaper.State} object or null if
+ * there is no {@code VolumeShaper} for the id.
+ */
+ /* package */ abstract @Nullable VolumeShaper.State playerGetVolumeShaperState(int id);
+
+ abstract int playerSetAuxEffectSendLevel(boolean muting, float level);
+ abstract void playerStart();
+ abstract void playerPause();
+ abstract void playerStop();
+
+ //=====================================================================
+ private static class IAppOpsCallbackWrapper extends IAppOpsCallback.Stub {
+ private final WeakReference<PlayerBase> mWeakPB;
+
+ public IAppOpsCallbackWrapper(PlayerBase pb) {
+ mWeakPB = new WeakReference<PlayerBase>(pb);
+ }
+
+ @Override
+ public void opChanged(int op, int uid, String packageName) {
+ if (op == AppOpsManager.OP_PLAY_AUDIO) {
+ if (DEBUG_APP_OPS) { Log.v(TAG, "opChanged: op=PLAY_AUDIO pack=" + packageName); }
+ final PlayerBase pb = mWeakPB.get();
+ if (pb != null) {
+ pb.updateAppOpsPlayAudio();
+ }
+ }
+ }
+ }
+
+ //=====================================================================
+ /**
+ * Wrapper around an implementation of IPlayer for all subclasses of PlayerBase
+ * that doesn't keep a strong reference on PlayerBase
+ */
+ private static class IPlayerWrapper extends IPlayer.Stub {
+ private final WeakReference<PlayerBase> mWeakPB;
+
+ public IPlayerWrapper(PlayerBase pb) {
+ mWeakPB = new WeakReference<PlayerBase>(pb);
+ }
+
+ @Override
+ public void start() {
+ final PlayerBase pb = mWeakPB.get();
+ if (pb != null) {
+ pb.playerStart();
+ }
+ }
+
+ @Override
+ public void pause() {
+ final PlayerBase pb = mWeakPB.get();
+ if (pb != null) {
+ pb.playerPause();
+ }
+ }
+
+ @Override
+ public void stop() {
+ final PlayerBase pb = mWeakPB.get();
+ if (pb != null) {
+ pb.playerStop();
+ }
+ }
+
+ @Override
+ public void setVolume(float vol) {
+ final PlayerBase pb = mWeakPB.get();
+ if (pb != null) {
+ pb.baseSetVolume(vol, vol);
+ }
+ }
+
+ @Override
+ public void setPan(float pan) {
+ final PlayerBase pb = mWeakPB.get();
+ if (pb != null) {
+ pb.baseSetPan(pan);
+ }
+ }
+
+ @Override
+ public void setStartDelayMs(int delayMs) {
+ final PlayerBase pb = mWeakPB.get();
+ if (pb != null) {
+ pb.baseSetStartDelayMs(delayMs);
+ }
+ }
+
+ @Override
+ public void applyVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration,
+ @NonNull VolumeShaper.Operation operation) {
+ final PlayerBase pb = mWeakPB.get();
+ if (pb != null) {
+ pb.playerApplyVolumeShaper(configuration, operation);
+ }
+ }
+ }
+
+ //=====================================================================
+ /**
+ * Class holding all the information about a player that needs to be known at registration time
+ */
+ public static class PlayerIdCard implements Parcelable {
+ public final int mPlayerType;
+
+ public static final int AUDIO_ATTRIBUTES_NONE = 0;
+ public static final int AUDIO_ATTRIBUTES_DEFINED = 1;
+ public final AudioAttributes mAttributes;
+ public final IPlayer mIPlayer;
+
+ PlayerIdCard(int type, @NonNull AudioAttributes attr, @NonNull IPlayer iplayer) {
+ mPlayerType = type;
+ mAttributes = attr;
+ mIPlayer = iplayer;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mPlayerType);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mPlayerType);
+ mAttributes.writeToParcel(dest, 0);
+ dest.writeStrongBinder(mIPlayer == null ? null : mIPlayer.asBinder());
+ }
+
+ public static final Parcelable.Creator<PlayerIdCard> CREATOR
+ = new Parcelable.Creator<PlayerIdCard>() {
+ /**
+ * Rebuilds an PlayerIdCard previously stored with writeToParcel().
+ * @param p Parcel object to read the PlayerIdCard from
+ * @return a new PlayerIdCard created from the data in the parcel
+ */
+ public PlayerIdCard createFromParcel(Parcel p) {
+ return new PlayerIdCard(p);
+ }
+ public PlayerIdCard[] newArray(int size) {
+ return new PlayerIdCard[size];
+ }
+ };
+
+ private PlayerIdCard(Parcel in) {
+ mPlayerType = in.readInt();
+ mAttributes = AudioAttributes.CREATOR.createFromParcel(in);
+ // IPlayer can be null if unmarshalling a Parcel coming from who knows where
+ final IBinder b = in.readStrongBinder();
+ mIPlayer = (b == null ? null : IPlayer.Stub.asInterface(b));
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) return true;
+ if (o == null || !(o instanceof PlayerIdCard)) return false;
+
+ PlayerIdCard that = (PlayerIdCard) o;
+
+ // FIXME change to the binder player interface once supported as a member
+ return ((mPlayerType == that.mPlayerType) && mAttributes.equals(that.mAttributes));
+ }
+ }
+
+ //=====================================================================
+ // Utilities
+
+ /**
+ * Use to generate warning or exception in legacy code paths that allowed passing stream types
+ * to qualify audio playback.
+ * @param streamType the stream type to check
+ * @throws IllegalArgumentException
+ */
+ public static void deprecateStreamTypeForPlayback(int streamType, String className,
+ String opName) throws IllegalArgumentException {
+ // STREAM_ACCESSIBILITY was introduced at the same time the use of stream types
+ // for audio playback was deprecated, so it is not allowed at all to qualify a playback
+ // use case
+ if (streamType == AudioManager.STREAM_ACCESSIBILITY) {
+ throw new IllegalArgumentException("Use of STREAM_ACCESSIBILITY is reserved for "
+ + "volume control");
+ }
+ Log.w(className, "Use of stream types is deprecated for operations other than " +
+ "volume control");
+ Log.w(className, "See the documentation of " + opName + " for what to use instead with " +
+ "android.media.AudioAttributes to qualify your playback use case");
+ }
+}
diff --git a/android/media/PlayerProxy.java b/android/media/PlayerProxy.java
new file mode 100644
index 00000000..5f3997a5
--- /dev/null
+++ b/android/media/PlayerProxy.java
@@ -0,0 +1,153 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.media.VolumeShaper;
+import android.os.RemoteException;
+import android.util.Log;
+
+import java.lang.IllegalArgumentException;
+import java.util.Objects;
+
+/**
+ * Class to remotely control a player.
+ * @hide
+ */
+@SystemApi
+public class PlayerProxy {
+
+ private final static String TAG = "PlayerProxy";
+ private final static boolean DEBUG = false;
+
+ private final AudioPlaybackConfiguration mConf; // never null
+
+ /**
+ * @hide
+ * Constructor. Proxy for this player associated with this AudioPlaybackConfiguration
+ * @param conf the configuration being proxied.
+ */
+ PlayerProxy(@NonNull AudioPlaybackConfiguration apc) {
+ if (apc == null) {
+ throw new IllegalArgumentException("Illegal null AudioPlaybackConfiguration");
+ }
+ mConf = apc;
+ };
+
+ //=====================================================================
+ // Methods matching the IPlayer interface
+ /**
+ * @hide
+ */
+ @SystemApi
+ public void start() {
+ try {
+ mConf.getIPlayer().start();
+ } catch (NullPointerException|RemoteException e) {
+ throw new IllegalStateException(
+ "No player to proxy for start operation, player already released?", e);
+ }
+ }
+
+ /**
+ * @hide
+ */
+ @SystemApi
+ public void pause() {
+ try {
+ mConf.getIPlayer().pause();
+ } catch (NullPointerException|RemoteException e) {
+ throw new IllegalStateException(
+ "No player to proxy for pause operation, player already released?", e);
+ }
+ }
+
+ /**
+ * @hide
+ */
+ @SystemApi
+ public void stop() {
+ try {
+ mConf.getIPlayer().stop();
+ } catch (NullPointerException|RemoteException e) {
+ throw new IllegalStateException(
+ "No player to proxy for stop operation, player already released?", e);
+ }
+ }
+
+ /**
+ * @hide
+ * @param vol
+ */
+ @SystemApi
+ public void setVolume(float vol) {
+ try {
+ mConf.getIPlayer().setVolume(vol);
+ } catch (NullPointerException|RemoteException e) {
+ throw new IllegalStateException(
+ "No player to proxy for setVolume operation, player already released?", e);
+ }
+ }
+
+ /**
+ * @hide
+ * @param pan
+ */
+ @SystemApi
+ public void setPan(float pan) {
+ try {
+ mConf.getIPlayer().setPan(pan);
+ } catch (NullPointerException|RemoteException e) {
+ throw new IllegalStateException(
+ "No player to proxy for setPan operation, player already released?", e);
+ }
+ }
+
+ /**
+ * @hide
+ * @param delayMs
+ */
+ @SystemApi
+ public void setStartDelayMs(int delayMs) {
+ try {
+ mConf.getIPlayer().setStartDelayMs(delayMs);
+ } catch (NullPointerException|RemoteException e) {
+ throw new IllegalStateException(
+ "No player to proxy for setStartDelayMs operation, player already released?",
+ e);
+ }
+ }
+
+ /**
+ * @hide
+ * @param configuration
+ * @param operation
+ * @return volume shaper id or error
+ */
+ public void applyVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration,
+ @NonNull VolumeShaper.Operation operation) {
+ try {
+ mConf.getIPlayer().applyVolumeShaper(configuration, operation);
+ } catch (NullPointerException|RemoteException e) {
+ throw new IllegalStateException(
+ "No player to proxy for applyVolumeShaper operation,"
+ + " player already released?", e);
+ }
+ }
+}
diff --git a/android/media/Rating.java b/android/media/Rating.java
new file mode 100644
index 00000000..04d5364f
--- /dev/null
+++ b/android/media/Rating.java
@@ -0,0 +1,308 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.IntDef;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * A class to encapsulate rating information used as content metadata.
+ * A rating is defined by its rating style (see {@link #RATING_HEART},
+ * {@link #RATING_THUMB_UP_DOWN}, {@link #RATING_3_STARS}, {@link #RATING_4_STARS},
+ * {@link #RATING_5_STARS} or {@link #RATING_PERCENTAGE}) and the actual rating value (which may
+ * be defined as "unrated"), both of which are defined when the rating instance is constructed
+ * through one of the factory methods.
+ */
+public final class Rating implements Parcelable {
+ private final static String TAG = "Rating";
+
+ /**
+ * @hide
+ */
+ @IntDef({RATING_NONE, RATING_HEART, RATING_THUMB_UP_DOWN, RATING_3_STARS, RATING_4_STARS,
+ RATING_5_STARS, RATING_PERCENTAGE})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Style {}
+
+ /**
+ * @hide
+ */
+ @IntDef({RATING_3_STARS, RATING_4_STARS, RATING_5_STARS})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface StarStyle {}
+
+ /**
+ * Indicates a rating style is not supported. A Rating will never have this
+ * type, but can be used by other classes to indicate they do not support
+ * Rating.
+ */
+ public final static int RATING_NONE = 0;
+
+ /**
+ * A rating style with a single degree of rating, "heart" vs "no heart". Can be used to
+ * indicate the content referred to is a favorite (or not).
+ */
+ public final static int RATING_HEART = 1;
+
+ /**
+ * A rating style for "thumb up" vs "thumb down".
+ */
+ public final static int RATING_THUMB_UP_DOWN = 2;
+
+ /**
+ * A rating style with 0 to 3 stars.
+ */
+ public final static int RATING_3_STARS = 3;
+
+ /**
+ * A rating style with 0 to 4 stars.
+ */
+ public final static int RATING_4_STARS = 4;
+
+ /**
+ * A rating style with 0 to 5 stars.
+ */
+ public final static int RATING_5_STARS = 5;
+
+ /**
+ * A rating style expressed as a percentage.
+ */
+ public final static int RATING_PERCENTAGE = 6;
+
+ private final static float RATING_NOT_RATED = -1.0f;
+
+ private final int mRatingStyle;
+
+ private final float mRatingValue;
+
+ private Rating(@Style int ratingStyle, float rating) {
+ mRatingStyle = ratingStyle;
+ mRatingValue = rating;
+ }
+
+ @Override
+ public String toString() {
+ return "Rating:style=" + mRatingStyle + " rating="
+ + (mRatingValue < 0.0f ? "unrated" : String.valueOf(mRatingValue));
+ }
+
+ @Override
+ public int describeContents() {
+ return mRatingStyle;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mRatingStyle);
+ dest.writeFloat(mRatingValue);
+ }
+
+ public static final Parcelable.Creator<Rating> CREATOR
+ = new Parcelable.Creator<Rating>() {
+ /**
+ * Rebuilds a Rating previously stored with writeToParcel().
+ * @param p Parcel object to read the Rating from
+ * @return a new Rating created from the data in the parcel
+ */
+ @Override
+ public Rating createFromParcel(Parcel p) {
+ return new Rating(p.readInt(), p.readFloat());
+ }
+
+ @Override
+ public Rating[] newArray(int size) {
+ return new Rating[size];
+ }
+ };
+
+ /**
+ * Return a Rating instance with no rating.
+ * Create and return a new Rating instance with no rating known for the given
+ * rating style.
+ * @param ratingStyle one of {@link #RATING_HEART}, {@link #RATING_THUMB_UP_DOWN},
+ * {@link #RATING_3_STARS}, {@link #RATING_4_STARS}, {@link #RATING_5_STARS},
+ * or {@link #RATING_PERCENTAGE}.
+ * @return null if an invalid rating style is passed, a new Rating instance otherwise.
+ */
+ public static Rating newUnratedRating(@Style int ratingStyle) {
+ switch(ratingStyle) {
+ case RATING_HEART:
+ case RATING_THUMB_UP_DOWN:
+ case RATING_3_STARS:
+ case RATING_4_STARS:
+ case RATING_5_STARS:
+ case RATING_PERCENTAGE:
+ return new Rating(ratingStyle, RATING_NOT_RATED);
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Return a Rating instance with a heart-based rating.
+ * Create and return a new Rating instance with a rating style of {@link #RATING_HEART},
+ * and a heart-based rating.
+ * @param hasHeart true for a "heart selected" rating, false for "heart unselected".
+ * @return a new Rating instance.
+ */
+ public static Rating newHeartRating(boolean hasHeart) {
+ return new Rating(RATING_HEART, hasHeart ? 1.0f : 0.0f);
+ }
+
+ /**
+ * Return a Rating instance with a thumb-based rating.
+ * Create and return a new Rating instance with a {@link #RATING_THUMB_UP_DOWN}
+ * rating style, and a "thumb up" or "thumb down" rating.
+ * @param thumbIsUp true for a "thumb up" rating, false for "thumb down".
+ * @return a new Rating instance.
+ */
+ public static Rating newThumbRating(boolean thumbIsUp) {
+ return new Rating(RATING_THUMB_UP_DOWN, thumbIsUp ? 1.0f : 0.0f);
+ }
+
+ /**
+ * Return a Rating instance with a star-based rating.
+ * Create and return a new Rating instance with one of the star-base rating styles
+ * and the given integer or fractional number of stars. Non integer values can for instance
+ * be used to represent an average rating value, which might not be an integer number of stars.
+ * @param starRatingStyle one of {@link #RATING_3_STARS}, {@link #RATING_4_STARS},
+ * {@link #RATING_5_STARS}.
+ * @param starRating a number ranging from 0.0f to 3.0f, 4.0f or 5.0f according to
+ * the rating style.
+ * @return null if the rating style is invalid, or the rating is out of range,
+ * a new Rating instance otherwise.
+ */
+ public static Rating newStarRating(@StarStyle int starRatingStyle, float starRating) {
+ float maxRating = -1.0f;
+ switch(starRatingStyle) {
+ case RATING_3_STARS:
+ maxRating = 3.0f;
+ break;
+ case RATING_4_STARS:
+ maxRating = 4.0f;
+ break;
+ case RATING_5_STARS:
+ maxRating = 5.0f;
+ break;
+ default:
+ Log.e(TAG, "Invalid rating style (" + starRatingStyle + ") for a star rating");
+ return null;
+ }
+ if ((starRating < 0.0f) || (starRating > maxRating)) {
+ Log.e(TAG, "Trying to set out of range star-based rating");
+ return null;
+ }
+ return new Rating(starRatingStyle, starRating);
+ }
+
+ /**
+ * Return a Rating instance with a percentage-based rating.
+ * Create and return a new Rating instance with a {@link #RATING_PERCENTAGE}
+ * rating style, and a rating of the given percentage.
+ * @param percent the value of the rating
+ * @return null if the rating is out of range, a new Rating instance otherwise.
+ */
+ public static Rating newPercentageRating(float percent) {
+ if ((percent < 0.0f) || (percent > 100.0f)) {
+ Log.e(TAG, "Invalid percentage-based rating value");
+ return null;
+ } else {
+ return new Rating(RATING_PERCENTAGE, percent);
+ }
+ }
+
+ /**
+ * Return whether there is a rating value available.
+ * @return true if the instance was not created with {@link #newUnratedRating(int)}.
+ */
+ public boolean isRated() {
+ return mRatingValue >= 0.0f;
+ }
+
+ /**
+ * Return the rating style.
+ * @return one of {@link #RATING_HEART}, {@link #RATING_THUMB_UP_DOWN},
+ * {@link #RATING_3_STARS}, {@link #RATING_4_STARS}, {@link #RATING_5_STARS},
+ * or {@link #RATING_PERCENTAGE}.
+ */
+ @Style
+ public int getRatingStyle() {
+ return mRatingStyle;
+ }
+
+ /**
+ * Return whether the rating is "heart selected".
+ * @return true if the rating is "heart selected", false if the rating is "heart unselected",
+ * if the rating style is not {@link #RATING_HEART} or if it is unrated.
+ */
+ public boolean hasHeart() {
+ if (mRatingStyle != RATING_HEART) {
+ return false;
+ } else {
+ return (mRatingValue == 1.0f);
+ }
+ }
+
+ /**
+ * Return whether the rating is "thumb up".
+ * @return true if the rating is "thumb up", false if the rating is "thumb down",
+ * if the rating style is not {@link #RATING_THUMB_UP_DOWN} or if it is unrated.
+ */
+ public boolean isThumbUp() {
+ if (mRatingStyle != RATING_THUMB_UP_DOWN) {
+ return false;
+ } else {
+ return (mRatingValue == 1.0f);
+ }
+ }
+
+ /**
+ * Return the star-based rating value.
+ * @return a rating value greater or equal to 0.0f, or a negative value if the rating style is
+ * not star-based, or if it is unrated.
+ */
+ public float getStarRating() {
+ switch (mRatingStyle) {
+ case RATING_3_STARS:
+ case RATING_4_STARS:
+ case RATING_5_STARS:
+ if (isRated()) {
+ return mRatingValue;
+ }
+ default:
+ return -1.0f;
+ }
+ }
+
+ /**
+ * Return the percentage-based rating value.
+ * @return a rating value greater or equal to 0.0f, or a negative value if the rating style is
+ * not percentage-based, or if it is unrated.
+ */
+ public float getPercentRating() {
+ if ((mRatingStyle != RATING_PERCENTAGE) || !isRated()) {
+ return -1.0f;
+ } else {
+ return mRatingValue;
+ }
+ }
+}
diff --git a/android/media/RemoteControlClient.java b/android/media/RemoteControlClient.java
new file mode 100644
index 00000000..6d32eff9
--- /dev/null
+++ b/android/media/RemoteControlClient.java
@@ -0,0 +1,1025 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.app.PendingIntent;
+import android.content.ComponentName;
+import android.content.Intent;
+import android.graphics.Bitmap;
+import android.media.session.MediaSessionLegacyHelper;
+import android.media.session.PlaybackState;
+import android.media.session.MediaSession;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.os.SystemClock;
+import android.util.Log;
+
+import java.lang.IllegalArgumentException;
+
+/**
+ * RemoteControlClient enables exposing information meant to be consumed by remote controls
+ * capable of displaying metadata, artwork and media transport control buttons.
+ *
+ * <p>A remote control client object is associated with a media button event receiver. This
+ * event receiver must have been previously registered with
+ * {@link AudioManager#registerMediaButtonEventReceiver(ComponentName)} before the
+ * RemoteControlClient can be registered through
+ * {@link AudioManager#registerRemoteControlClient(RemoteControlClient)}.
+ *
+ * <p>Here is an example of creating a RemoteControlClient instance after registering a media
+ * button event receiver:
+ * <pre>ComponentName myEventReceiver = new ComponentName(getPackageName(), MyRemoteControlEventReceiver.class.getName());
+ * AudioManager myAudioManager = (AudioManager) getSystemService(Context.AUDIO_SERVICE);
+ * myAudioManager.registerMediaButtonEventReceiver(myEventReceiver);
+ * // build the PendingIntent for the remote control client
+ * Intent mediaButtonIntent = new Intent(Intent.ACTION_MEDIA_BUTTON);
+ * mediaButtonIntent.setComponent(myEventReceiver);
+ * PendingIntent mediaPendingIntent = PendingIntent.getBroadcast(getApplicationContext(), 0, mediaButtonIntent, 0);
+ * // create and register the remote control client
+ * RemoteControlClient myRemoteControlClient = new RemoteControlClient(mediaPendingIntent);
+ * myAudioManager.registerRemoteControlClient(myRemoteControlClient);</pre>
+ *
+ * @deprecated Use {@link MediaSession} instead.
+ */
+@Deprecated public class RemoteControlClient
+{
+ private final static String TAG = "RemoteControlClient";
+ private final static boolean DEBUG = false;
+
+ /**
+ * Playback state of a RemoteControlClient which is stopped.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_STOPPED = 1;
+ /**
+ * Playback state of a RemoteControlClient which is paused.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_PAUSED = 2;
+ /**
+ * Playback state of a RemoteControlClient which is playing media.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_PLAYING = 3;
+ /**
+ * Playback state of a RemoteControlClient which is fast forwarding in the media
+ * it is currently playing.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_FAST_FORWARDING = 4;
+ /**
+ * Playback state of a RemoteControlClient which is fast rewinding in the media
+ * it is currently playing.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_REWINDING = 5;
+ /**
+ * Playback state of a RemoteControlClient which is skipping to the next
+ * logical chapter (such as a song in a playlist) in the media it is currently playing.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_SKIPPING_FORWARDS = 6;
+ /**
+ * Playback state of a RemoteControlClient which is skipping back to the previous
+ * logical chapter (such as a song in a playlist) in the media it is currently playing.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_SKIPPING_BACKWARDS = 7;
+ /**
+ * Playback state of a RemoteControlClient which is buffering data to play before it can
+ * start or resume playback.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_BUFFERING = 8;
+ /**
+ * Playback state of a RemoteControlClient which cannot perform any playback related
+ * operation because of an internal error. Examples of such situations are no network
+ * connectivity when attempting to stream data from a server, or expired user credentials
+ * when trying to play subscription-based content.
+ *
+ * @see #setPlaybackState(int)
+ */
+ public final static int PLAYSTATE_ERROR = 9;
+ /**
+ * @hide
+ * The value of a playback state when none has been declared.
+ * Intentionally hidden as an application shouldn't set such a playback state value.
+ */
+ public final static int PLAYSTATE_NONE = 0;
+
+ /**
+ * @hide
+ * The default playback type, "local", indicating the presentation of the media is happening on
+ * the same device (e.g. a phone, a tablet) as where it is controlled from.
+ */
+ public final static int PLAYBACK_TYPE_LOCAL = 0;
+ /**
+ * @hide
+ * A playback type indicating the presentation of the media is happening on
+ * a different device (i.e. the remote device) than where it is controlled from.
+ */
+ public final static int PLAYBACK_TYPE_REMOTE = 1;
+ private final static int PLAYBACK_TYPE_MIN = PLAYBACK_TYPE_LOCAL;
+ private final static int PLAYBACK_TYPE_MAX = PLAYBACK_TYPE_REMOTE;
+ /**
+ * @hide
+ * Playback information indicating the playback volume is fixed, i.e. it cannot be controlled
+ * from this object. An example of fixed playback volume is a remote player, playing over HDMI
+ * where the user prefer to control the volume on the HDMI sink, rather than attenuate at the
+ * source.
+ * @see #PLAYBACKINFO_VOLUME_HANDLING.
+ */
+ public final static int PLAYBACK_VOLUME_FIXED = 0;
+ /**
+ * @hide
+ * Playback information indicating the playback volume is variable and can be controlled from
+ * this object.
+ * @see #PLAYBACKINFO_VOLUME_HANDLING.
+ */
+ public final static int PLAYBACK_VOLUME_VARIABLE = 1;
+ /**
+ * @hide (to be un-hidden)
+ * The playback information value indicating the value of a given information type is invalid.
+ * @see #PLAYBACKINFO_VOLUME_HANDLING.
+ */
+ public final static int PLAYBACKINFO_INVALID_VALUE = Integer.MIN_VALUE;
+
+ /**
+ * @hide
+ * An unknown or invalid playback position value.
+ */
+ public final static long PLAYBACK_POSITION_INVALID = -1;
+ /**
+ * @hide
+ * An invalid playback position value associated with the use of {@link #setPlaybackState(int)}
+ * used to indicate that playback position will remain unknown.
+ */
+ public final static long PLAYBACK_POSITION_ALWAYS_UNKNOWN = 0x8019771980198300L;
+ /**
+ * @hide
+ * The default playback speed, 1x.
+ */
+ public final static float PLAYBACK_SPEED_1X = 1.0f;
+
+ //==========================================
+ // Public keys for playback information
+ /**
+ * @hide
+ * Playback information that defines the type of playback associated with this
+ * RemoteControlClient. See {@link #PLAYBACK_TYPE_LOCAL} and {@link #PLAYBACK_TYPE_REMOTE}.
+ */
+ public final static int PLAYBACKINFO_PLAYBACK_TYPE = 1;
+ /**
+ * @hide
+ * Playback information that defines at what volume the playback associated with this
+ * RemoteControlClient is performed. This information is only used when the playback type is not
+ * local (see {@link #PLAYBACKINFO_PLAYBACK_TYPE}).
+ */
+ public final static int PLAYBACKINFO_VOLUME = 2;
+ /**
+ * @hide
+ * Playback information that defines the maximum volume volume value that is supported
+ * by the playback associated with this RemoteControlClient. This information is only used
+ * when the playback type is not local (see {@link #PLAYBACKINFO_PLAYBACK_TYPE}).
+ */
+ public final static int PLAYBACKINFO_VOLUME_MAX = 3;
+ /**
+ * @hide
+ * Playback information that defines how volume is handled for the presentation of the media.
+ * @see #PLAYBACK_VOLUME_FIXED
+ * @see #PLAYBACK_VOLUME_VARIABLE
+ */
+ public final static int PLAYBACKINFO_VOLUME_HANDLING = 4;
+ /**
+ * @hide
+ * Playback information that defines over what stream type the media is presented.
+ */
+ public final static int PLAYBACKINFO_USES_STREAM = 5;
+
+ //==========================================
+ // Public flags for the supported transport control capabilities
+ /**
+ * Flag indicating a RemoteControlClient makes use of the "previous" media key.
+ *
+ * @see #setTransportControlFlags(int)
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_PREVIOUS
+ */
+ public final static int FLAG_KEY_MEDIA_PREVIOUS = 1 << 0;
+ /**
+ * Flag indicating a RemoteControlClient makes use of the "rewind" media key.
+ *
+ * @see #setTransportControlFlags(int)
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_REWIND
+ */
+ public final static int FLAG_KEY_MEDIA_REWIND = 1 << 1;
+ /**
+ * Flag indicating a RemoteControlClient makes use of the "play" media key.
+ *
+ * @see #setTransportControlFlags(int)
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_PLAY
+ */
+ public final static int FLAG_KEY_MEDIA_PLAY = 1 << 2;
+ /**
+ * Flag indicating a RemoteControlClient makes use of the "play/pause" media key.
+ *
+ * @see #setTransportControlFlags(int)
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_PLAY_PAUSE
+ */
+ public final static int FLAG_KEY_MEDIA_PLAY_PAUSE = 1 << 3;
+ /**
+ * Flag indicating a RemoteControlClient makes use of the "pause" media key.
+ *
+ * @see #setTransportControlFlags(int)
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_PAUSE
+ */
+ public final static int FLAG_KEY_MEDIA_PAUSE = 1 << 4;
+ /**
+ * Flag indicating a RemoteControlClient makes use of the "stop" media key.
+ *
+ * @see #setTransportControlFlags(int)
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_STOP
+ */
+ public final static int FLAG_KEY_MEDIA_STOP = 1 << 5;
+ /**
+ * Flag indicating a RemoteControlClient makes use of the "fast forward" media key.
+ *
+ * @see #setTransportControlFlags(int)
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_FAST_FORWARD
+ */
+ public final static int FLAG_KEY_MEDIA_FAST_FORWARD = 1 << 6;
+ /**
+ * Flag indicating a RemoteControlClient makes use of the "next" media key.
+ *
+ * @see #setTransportControlFlags(int)
+ * @see android.view.KeyEvent#KEYCODE_MEDIA_NEXT
+ */
+ public final static int FLAG_KEY_MEDIA_NEXT = 1 << 7;
+ /**
+ * Flag indicating a RemoteControlClient can receive changes in the media playback position
+ * through the {@link OnPlaybackPositionUpdateListener} interface. This flag must be set
+ * in order for components that display the RemoteControlClient information, to display and
+ * let the user control media playback position.
+ * @see #setTransportControlFlags(int)
+ * @see #setOnGetPlaybackPositionListener(OnGetPlaybackPositionListener)
+ * @see #setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener)
+ */
+ public final static int FLAG_KEY_MEDIA_POSITION_UPDATE = 1 << 8;
+ /**
+ * Flag indicating a RemoteControlClient supports ratings.
+ * This flag must be set in order for components that display the RemoteControlClient
+ * information, to display ratings information, and, if ratings are declared editable
+ * (by calling {@link MediaMetadataEditor#addEditableKey(int)} with the
+ * {@link MediaMetadataEditor#RATING_KEY_BY_USER} key), it will enable the user to rate
+ * the media, with values being received through the interface set with
+ * {@link #setMetadataUpdateListener(OnMetadataUpdateListener)}.
+ * @see #setTransportControlFlags(int)
+ */
+ public final static int FLAG_KEY_MEDIA_RATING = 1 << 9;
+
+ /**
+ * @hide
+ * The flags for when no media keys are declared supported.
+ * Intentionally hidden as an application shouldn't set the transport control flags
+ * to this value.
+ */
+ public final static int FLAGS_KEY_MEDIA_NONE = 0;
+
+ /**
+ * @hide
+ * Flag used to signal some type of metadata exposed by the RemoteControlClient is requested.
+ */
+ public final static int FLAG_INFORMATION_REQUEST_METADATA = 1 << 0;
+ /**
+ * @hide
+ * Flag used to signal that the transport control buttons supported by the
+ * RemoteControlClient are requested.
+ * This can for instance happen when playback is at the end of a playlist, and the "next"
+ * operation is not supported anymore.
+ */
+ public final static int FLAG_INFORMATION_REQUEST_KEY_MEDIA = 1 << 1;
+ /**
+ * @hide
+ * Flag used to signal that the playback state of the RemoteControlClient is requested.
+ */
+ public final static int FLAG_INFORMATION_REQUEST_PLAYSTATE = 1 << 2;
+ /**
+ * @hide
+ * Flag used to signal that the album art for the RemoteControlClient is requested.
+ */
+ public final static int FLAG_INFORMATION_REQUEST_ALBUM_ART = 1 << 3;
+
+ private MediaSession mSession;
+
+ /**
+ * Class constructor.
+ * @param mediaButtonIntent The intent that will be sent for the media button events sent
+ * by remote controls.
+ * This intent needs to have been constructed with the {@link Intent#ACTION_MEDIA_BUTTON}
+ * action, and have a component that will handle the intent (set with
+ * {@link Intent#setComponent(ComponentName)}) registered with
+ * {@link AudioManager#registerMediaButtonEventReceiver(ComponentName)}
+ * before this new RemoteControlClient can itself be registered with
+ * {@link AudioManager#registerRemoteControlClient(RemoteControlClient)}.
+ * @see AudioManager#registerMediaButtonEventReceiver(ComponentName)
+ * @see AudioManager#registerRemoteControlClient(RemoteControlClient)
+ */
+ public RemoteControlClient(PendingIntent mediaButtonIntent) {
+ mRcMediaIntent = mediaButtonIntent;
+ }
+
+ /**
+ * Class constructor for a remote control client whose internal event handling
+ * happens on a user-provided Looper.
+ * @param mediaButtonIntent The intent that will be sent for the media button events sent
+ * by remote controls.
+ * This intent needs to have been constructed with the {@link Intent#ACTION_MEDIA_BUTTON}
+ * action, and have a component that will handle the intent (set with
+ * {@link Intent#setComponent(ComponentName)}) registered with
+ * {@link AudioManager#registerMediaButtonEventReceiver(ComponentName)}
+ * before this new RemoteControlClient can itself be registered with
+ * {@link AudioManager#registerRemoteControlClient(RemoteControlClient)}.
+ * @param looper The Looper running the event loop.
+ * @see AudioManager#registerMediaButtonEventReceiver(ComponentName)
+ * @see AudioManager#registerRemoteControlClient(RemoteControlClient)
+ */
+ public RemoteControlClient(PendingIntent mediaButtonIntent, Looper looper) {
+ mRcMediaIntent = mediaButtonIntent;
+ }
+
+ /**
+ * @hide
+ */
+ public void registerWithSession(MediaSessionLegacyHelper helper) {
+ helper.addRccListener(mRcMediaIntent, mTransportListener);
+ mSession = helper.getSession(mRcMediaIntent);
+ setTransportControlFlags(mTransportControlFlags);
+ }
+
+ /**
+ * @hide
+ */
+ public void unregisterWithSession(MediaSessionLegacyHelper helper) {
+ helper.removeRccListener(mRcMediaIntent);
+ mSession = null;
+ }
+
+ /**
+ * Get a {@link MediaSession} associated with this RCC. It will only have a
+ * session while it is registered with
+ * {@link AudioManager#registerRemoteControlClient}. The session returned
+ * should not be modified directly by the application but may be used with
+ * other APIs that require a session.
+ *
+ * @return A media session object or null.
+ */
+ public MediaSession getMediaSession() {
+ return mSession;
+ }
+
+ /**
+ * Class used to modify metadata in a {@link RemoteControlClient} object.
+ * Use {@link RemoteControlClient#editMetadata(boolean)} to create an instance of an editor,
+ * on which you set the metadata for the RemoteControlClient instance. Once all the information
+ * has been set, use {@link #apply()} to make it the new metadata that should be displayed
+ * for the associated client. Once the metadata has been "applied", you cannot reuse this
+ * instance of the MetadataEditor.
+ *
+ * @deprecated Use {@link MediaMetadata} and {@link MediaSession} instead.
+ */
+ @Deprecated public class MetadataEditor extends MediaMetadataEditor {
+
+ // only use RemoteControlClient.editMetadata() to get a MetadataEditor instance
+ private MetadataEditor() { }
+ /**
+ * @hide
+ */
+ public Object clone() throws CloneNotSupportedException {
+ throw new CloneNotSupportedException();
+ }
+
+ /**
+ * The metadata key for the content artwork / album art.
+ */
+ public final static int BITMAP_KEY_ARTWORK = 100;
+
+ /**
+ * @hide
+ * TODO(jmtrivi) have lockscreen move to the new key name and remove
+ */
+ public final static int METADATA_KEY_ARTWORK = BITMAP_KEY_ARTWORK;
+
+ /**
+ * Adds textual information to be displayed.
+ * Note that none of the information added after {@link #apply()} has been called,
+ * will be displayed.
+ * @param key The identifier of a the metadata field to set. Valid values are
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_ALBUM},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_ALBUMARTIST},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_TITLE},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_ARTIST},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_AUTHOR},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_COMPILATION},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_COMPOSER},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_DATE},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_GENRE},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_TITLE},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_WRITER}.
+ * @param value The text for the given key, or {@code null} to signify there is no valid
+ * information for the field.
+ * @return Returns a reference to the same MetadataEditor object, so you can chain put
+ * calls together.
+ */
+ public synchronized MetadataEditor putString(int key, String value)
+ throws IllegalArgumentException {
+ super.putString(key, value);
+ if (mMetadataBuilder != null) {
+ // MediaMetadata supports all the same fields as MetadataEditor
+ String metadataKey = MediaMetadata.getKeyFromMetadataEditorKey(key);
+ // But just in case, don't add things we don't understand
+ if (metadataKey != null) {
+ mMetadataBuilder.putText(metadataKey, value);
+ }
+ }
+
+ return this;
+ }
+
+ /**
+ * Adds numerical information to be displayed.
+ * Note that none of the information added after {@link #apply()} has been called,
+ * will be displayed.
+ * @param key the identifier of a the metadata field to set. Valid values are
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_CD_TRACK_NUMBER},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_DISC_NUMBER},
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_DURATION} (with a value
+ * expressed in milliseconds),
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_YEAR}.
+ * @param value The long value for the given key
+ * @return Returns a reference to the same MetadataEditor object, so you can chain put
+ * calls together.
+ * @throws IllegalArgumentException
+ */
+ public synchronized MetadataEditor putLong(int key, long value)
+ throws IllegalArgumentException {
+ super.putLong(key, value);
+ if (mMetadataBuilder != null) {
+ // MediaMetadata supports all the same fields as MetadataEditor
+ String metadataKey = MediaMetadata.getKeyFromMetadataEditorKey(key);
+ // But just in case, don't add things we don't understand
+ if (metadataKey != null) {
+ mMetadataBuilder.putLong(metadataKey, value);
+ }
+ }
+ return this;
+ }
+
+ /**
+ * Sets the album / artwork picture to be displayed on the remote control.
+ * @param key the identifier of the bitmap to set. The only valid value is
+ * {@link #BITMAP_KEY_ARTWORK}
+ * @param bitmap The bitmap for the artwork, or null if there isn't any.
+ * @return Returns a reference to the same MetadataEditor object, so you can chain put
+ * calls together.
+ * @throws IllegalArgumentException
+ * @see android.graphics.Bitmap
+ */
+ @Override
+ public synchronized MetadataEditor putBitmap(int key, Bitmap bitmap)
+ throws IllegalArgumentException {
+ super.putBitmap(key, bitmap);
+ if (mMetadataBuilder != null) {
+ // MediaMetadata supports all the same fields as MetadataEditor
+ String metadataKey = MediaMetadata.getKeyFromMetadataEditorKey(key);
+ // But just in case, don't add things we don't understand
+ if (metadataKey != null) {
+ mMetadataBuilder.putBitmap(metadataKey, bitmap);
+ }
+ }
+ return this;
+ }
+
+ @Override
+ public synchronized MetadataEditor putObject(int key, Object object)
+ throws IllegalArgumentException {
+ super.putObject(key, object);
+ if (mMetadataBuilder != null &&
+ (key == MediaMetadataEditor.RATING_KEY_BY_USER ||
+ key == MediaMetadataEditor.RATING_KEY_BY_OTHERS)) {
+ String metadataKey = MediaMetadata.getKeyFromMetadataEditorKey(key);
+ if (metadataKey != null) {
+ mMetadataBuilder.putRating(metadataKey, (Rating) object);
+ }
+ }
+ return this;
+ }
+
+ /**
+ * Clears all the metadata that has been set since the MetadataEditor instance was created
+ * (with {@link RemoteControlClient#editMetadata(boolean)}).
+ * Note that clearing the metadata doesn't reset the editable keys
+ * (use {@link MediaMetadataEditor#removeEditableKeys()} instead).
+ */
+ @Override
+ public synchronized void clear() {
+ super.clear();
+ }
+
+ /**
+ * Associates all the metadata that has been set since the MetadataEditor instance was
+ * created with {@link RemoteControlClient#editMetadata(boolean)}, or since
+ * {@link #clear()} was called, with the RemoteControlClient. Once "applied",
+ * this MetadataEditor cannot be reused to edit the RemoteControlClient's metadata.
+ */
+ public synchronized void apply() {
+ if (mApplied) {
+ Log.e(TAG, "Can't apply a previously applied MetadataEditor");
+ return;
+ }
+ synchronized (mCacheLock) {
+ // Still build the old metadata so when creating a new editor
+ // you get the expected values.
+ // assign the edited data
+ mMetadata = new Bundle(mEditorMetadata);
+ // add the information about editable keys
+ mMetadata.putLong(String.valueOf(KEY_EDITABLE_MASK), mEditableKeys);
+ if ((mOriginalArtwork != null) && (!mOriginalArtwork.equals(mEditorArtwork))) {
+ mOriginalArtwork.recycle();
+ }
+ mOriginalArtwork = mEditorArtwork;
+ mEditorArtwork = null;
+
+ // USE_SESSIONS
+ if (mSession != null && mMetadataBuilder != null) {
+ mMediaMetadata = mMetadataBuilder.build();
+ mSession.setMetadata(mMediaMetadata);
+ }
+ mApplied = true;
+ }
+ }
+ }
+
+ /**
+ * Creates a {@link MetadataEditor}.
+ * @param startEmpty Set to false if you want the MetadataEditor to contain the metadata that
+ * was previously applied to the RemoteControlClient, or true if it is to be created empty.
+ * @return a new MetadataEditor instance.
+ */
+ public MetadataEditor editMetadata(boolean startEmpty) {
+ MetadataEditor editor = new MetadataEditor();
+ if (startEmpty) {
+ editor.mEditorMetadata = new Bundle();
+ editor.mEditorArtwork = null;
+ editor.mMetadataChanged = true;
+ editor.mArtworkChanged = true;
+ editor.mEditableKeys = 0;
+ } else {
+ editor.mEditorMetadata = new Bundle(mMetadata);
+ editor.mEditorArtwork = mOriginalArtwork;
+ editor.mMetadataChanged = false;
+ editor.mArtworkChanged = false;
+ }
+ // USE_SESSIONS
+ if (startEmpty || mMediaMetadata == null) {
+ editor.mMetadataBuilder = new MediaMetadata.Builder();
+ } else {
+ editor.mMetadataBuilder = new MediaMetadata.Builder(mMediaMetadata);
+ }
+ return editor;
+ }
+
+ /**
+ * Sets the current playback state.
+ * @param state The current playback state, one of the following values:
+ * {@link #PLAYSTATE_STOPPED},
+ * {@link #PLAYSTATE_PAUSED},
+ * {@link #PLAYSTATE_PLAYING},
+ * {@link #PLAYSTATE_FAST_FORWARDING},
+ * {@link #PLAYSTATE_REWINDING},
+ * {@link #PLAYSTATE_SKIPPING_FORWARDS},
+ * {@link #PLAYSTATE_SKIPPING_BACKWARDS},
+ * {@link #PLAYSTATE_BUFFERING},
+ * {@link #PLAYSTATE_ERROR}.
+ */
+ public void setPlaybackState(int state) {
+ setPlaybackStateInt(state, PLAYBACK_POSITION_ALWAYS_UNKNOWN, PLAYBACK_SPEED_1X,
+ false /* legacy API, converting to method with position and speed */);
+ }
+
+ /**
+ * Sets the current playback state and the matching media position for the current playback
+ * speed.
+ * @param state The current playback state, one of the following values:
+ * {@link #PLAYSTATE_STOPPED},
+ * {@link #PLAYSTATE_PAUSED},
+ * {@link #PLAYSTATE_PLAYING},
+ * {@link #PLAYSTATE_FAST_FORWARDING},
+ * {@link #PLAYSTATE_REWINDING},
+ * {@link #PLAYSTATE_SKIPPING_FORWARDS},
+ * {@link #PLAYSTATE_SKIPPING_BACKWARDS},
+ * {@link #PLAYSTATE_BUFFERING},
+ * {@link #PLAYSTATE_ERROR}.
+ * @param timeInMs a 0 or positive value for the current media position expressed in ms
+ * (same unit as for when sending the media duration, if applicable, with
+ * {@link android.media.MediaMetadataRetriever#METADATA_KEY_DURATION} in the
+ * {@link RemoteControlClient.MetadataEditor}). Negative values imply that position is not
+ * known (e.g. listening to a live stream of a radio) or not applicable (e.g. when state
+ * is {@link #PLAYSTATE_BUFFERING} and nothing had played yet).
+ * @param playbackSpeed a value expressed as a ratio of 1x playback: 1.0f is normal playback,
+ * 2.0f is 2x, 0.5f is half-speed, -2.0f is rewind at 2x speed. 0.0f means nothing is
+ * playing (e.g. when state is {@link #PLAYSTATE_ERROR}).
+ */
+ public void setPlaybackState(int state, long timeInMs, float playbackSpeed) {
+ setPlaybackStateInt(state, timeInMs, playbackSpeed, true);
+ }
+
+ private void setPlaybackStateInt(int state, long timeInMs, float playbackSpeed,
+ boolean hasPosition) {
+ synchronized(mCacheLock) {
+ if ((mPlaybackState != state) || (mPlaybackPositionMs != timeInMs)
+ || (mPlaybackSpeed != playbackSpeed)) {
+ // store locally
+ mPlaybackState = state;
+ // distinguish between an application not knowing the current playback position
+ // at the moment and an application using the API where only the playback state
+ // is passed, not the playback position.
+ if (hasPosition) {
+ if (timeInMs < 0) {
+ mPlaybackPositionMs = PLAYBACK_POSITION_INVALID;
+ } else {
+ mPlaybackPositionMs = timeInMs;
+ }
+ } else {
+ mPlaybackPositionMs = PLAYBACK_POSITION_ALWAYS_UNKNOWN;
+ }
+ mPlaybackSpeed = playbackSpeed;
+ // keep track of when the state change occurred
+ mPlaybackStateChangeTimeMs = SystemClock.elapsedRealtime();
+
+ // USE_SESSIONS
+ if (mSession != null) {
+ int pbState = PlaybackState.getStateFromRccState(state);
+ long position = hasPosition ? mPlaybackPositionMs
+ : PlaybackState.PLAYBACK_POSITION_UNKNOWN;
+
+ PlaybackState.Builder bob = new PlaybackState.Builder(mSessionPlaybackState);
+ bob.setState(pbState, position, playbackSpeed, SystemClock.elapsedRealtime());
+ bob.setErrorMessage(null);
+ mSessionPlaybackState = bob.build();
+ mSession.setPlaybackState(mSessionPlaybackState);
+ }
+ }
+ }
+ }
+
+ /**
+ * Sets the flags for the media transport control buttons that this client supports.
+ * @param transportControlFlags A combination of the following flags:
+ * {@link #FLAG_KEY_MEDIA_PREVIOUS},
+ * {@link #FLAG_KEY_MEDIA_REWIND},
+ * {@link #FLAG_KEY_MEDIA_PLAY},
+ * {@link #FLAG_KEY_MEDIA_PLAY_PAUSE},
+ * {@link #FLAG_KEY_MEDIA_PAUSE},
+ * {@link #FLAG_KEY_MEDIA_STOP},
+ * {@link #FLAG_KEY_MEDIA_FAST_FORWARD},
+ * {@link #FLAG_KEY_MEDIA_NEXT},
+ * {@link #FLAG_KEY_MEDIA_POSITION_UPDATE},
+ * {@link #FLAG_KEY_MEDIA_RATING}.
+ */
+ public void setTransportControlFlags(int transportControlFlags) {
+ synchronized(mCacheLock) {
+ // store locally
+ mTransportControlFlags = transportControlFlags;
+
+ // USE_SESSIONS
+ if (mSession != null) {
+ PlaybackState.Builder bob = new PlaybackState.Builder(mSessionPlaybackState);
+ bob.setActions(
+ PlaybackState.getActionsFromRccControlFlags(transportControlFlags));
+ mSessionPlaybackState = bob.build();
+ mSession.setPlaybackState(mSessionPlaybackState);
+ }
+ }
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when one of the metadata values has
+ * been updated.
+ * Implement this interface to receive metadata updates after registering your listener
+ * through {@link RemoteControlClient#setMetadataUpdateListener(OnMetadataUpdateListener)}.
+ */
+ public interface OnMetadataUpdateListener {
+ /**
+ * Called on the implementer to notify that the metadata field for the given key has
+ * been updated to the new value.
+ * @param key the identifier of the updated metadata field.
+ * @param newValue the Object storing the new value for the key.
+ */
+ public abstract void onMetadataUpdate(int key, Object newValue);
+ }
+
+ /**
+ * Sets the listener to be called whenever the metadata is updated.
+ * New metadata values will be received in the same thread as the one in which
+ * RemoteControlClient was created.
+ * @param l the metadata update listener
+ */
+ public void setMetadataUpdateListener(OnMetadataUpdateListener l) {
+ synchronized(mCacheLock) {
+ mMetadataUpdateListener = l;
+ }
+ }
+
+
+ /**
+ * Interface definition for a callback to be invoked when the media playback position is
+ * requested to be updated.
+ * @see RemoteControlClient#FLAG_KEY_MEDIA_POSITION_UPDATE
+ */
+ public interface OnPlaybackPositionUpdateListener {
+ /**
+ * Called on the implementer to notify it that the playback head should be set at the given
+ * position. If the position can be changed from its current value, the implementor of
+ * the interface must also update the playback position using
+ * {@link #setPlaybackState(int, long, float)} to reflect the actual new
+ * position being used, regardless of whether it differs from the requested position.
+ * Failure to do so would cause the system to not know the new actual playback position,
+ * and user interface components would fail to show the user where playback resumed after
+ * the position was updated.
+ * @param newPositionMs the new requested position in the current media, expressed in ms.
+ */
+ void onPlaybackPositionUpdate(long newPositionMs);
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when the media playback position is
+ * queried.
+ * @see RemoteControlClient#FLAG_KEY_MEDIA_POSITION_UPDATE
+ */
+ public interface OnGetPlaybackPositionListener {
+ /**
+ * Called on the implementer of the interface to query the current playback position.
+ * @return a negative value if the current playback position (or the last valid playback
+ * position) is not known, or a zero or positive value expressed in ms indicating the
+ * current position, or the last valid known position.
+ */
+ long onGetPlaybackPosition();
+ }
+
+ /**
+ * Sets the listener to be called whenever the media playback position is requested
+ * to be updated.
+ * Notifications will be received in the same thread as the one in which RemoteControlClient
+ * was created.
+ * @param l the position update listener to be called
+ */
+ public void setPlaybackPositionUpdateListener(OnPlaybackPositionUpdateListener l) {
+ synchronized(mCacheLock) {
+ mPositionUpdateListener = l;
+ }
+ }
+
+ /**
+ * Sets the listener to be called whenever the media current playback position is needed.
+ * Queries will be received in the same thread as the one in which RemoteControlClient
+ * was created.
+ * @param l the listener to be called to retrieve the playback position
+ */
+ public void setOnGetPlaybackPositionListener(OnGetPlaybackPositionListener l) {
+ synchronized(mCacheLock) {
+ mPositionProvider = l;
+ }
+ }
+
+ /**
+ * @hide
+ * Flag to reflect that the application controlling this RemoteControlClient sends playback
+ * position updates. The playback position being "readable" is considered from the application's
+ * point of view.
+ */
+ public static int MEDIA_POSITION_READABLE = 1 << 0;
+ /**
+ * @hide
+ * Flag to reflect that the application controlling this RemoteControlClient can receive
+ * playback position updates. The playback position being "writable"
+ * is considered from the application's point of view.
+ */
+ public static int MEDIA_POSITION_WRITABLE = 1 << 1;
+
+ /** @hide */
+ public final static int DEFAULT_PLAYBACK_VOLUME_HANDLING = PLAYBACK_VOLUME_VARIABLE;
+ /** @hide */
+ // hard-coded to the same number of steps as AudioService.MAX_STREAM_VOLUME[STREAM_MUSIC]
+ public final static int DEFAULT_PLAYBACK_VOLUME = 15;
+
+ /**
+ * Lock for all cached data
+ */
+ private final Object mCacheLock = new Object();
+ /**
+ * Cache for the playback state.
+ * Access synchronized on mCacheLock
+ */
+ private int mPlaybackState = PLAYSTATE_NONE;
+ /**
+ * Time of last play state change
+ * Access synchronized on mCacheLock
+ */
+ private long mPlaybackStateChangeTimeMs = 0;
+ /**
+ * Last playback position in ms reported by the user
+ */
+ private long mPlaybackPositionMs = PLAYBACK_POSITION_INVALID;
+ /**
+ * Last playback speed reported by the user
+ */
+ private float mPlaybackSpeed = PLAYBACK_SPEED_1X;
+ /**
+ * Cache for the artwork bitmap.
+ * Access synchronized on mCacheLock
+ * Artwork and metadata are not kept in one Bundle because the bitmap sometimes needs to be
+ * accessed to be resized, in which case a copy will be made. This would add overhead in
+ * Bundle operations.
+ */
+ private Bitmap mOriginalArtwork;
+ /**
+ * Cache for the transport control mask.
+ * Access synchronized on mCacheLock
+ */
+ private int mTransportControlFlags = FLAGS_KEY_MEDIA_NONE;
+ /**
+ * Cache for the metadata strings.
+ * Access synchronized on mCacheLock
+ * This is re-initialized in apply() and so cannot be final.
+ */
+ private Bundle mMetadata = new Bundle();
+ /**
+ * Listener registered by user of RemoteControlClient to receive requests for playback position
+ * update requests.
+ */
+ private OnPlaybackPositionUpdateListener mPositionUpdateListener;
+ /**
+ * Provider registered by user of RemoteControlClient to provide the current playback position.
+ */
+ private OnGetPlaybackPositionListener mPositionProvider;
+ /**
+ * Listener registered by user of RemoteControlClient to receive edit changes to metadata
+ * it exposes.
+ */
+ private OnMetadataUpdateListener mMetadataUpdateListener;
+ /**
+ * The current remote control client generation ID across the system, as known by this object
+ */
+ private int mCurrentClientGenId = -1;
+
+ /**
+ * The media button intent description associated with this remote control client
+ * (can / should include target component for intent handling, used when persisting media
+ * button event receiver across reboots).
+ */
+ private final PendingIntent mRcMediaIntent;
+
+ /**
+ * Reflects whether any "plugged in" IRemoteControlDisplay has mWantsPositonSync set to true.
+ */
+ // TODO consider using a ref count for IRemoteControlDisplay requiring sync instead
+ private boolean mNeedsPositionSync = false;
+
+ /**
+ * Cache for the current playback state using Session APIs.
+ */
+ private PlaybackState mSessionPlaybackState = null;
+
+ /**
+ * Cache for metadata using Session APIs. This is re-initialized in apply().
+ */
+ private MediaMetadata mMediaMetadata;
+
+ /**
+ * @hide
+ * Accessor to media button intent description (includes target component)
+ */
+ public PendingIntent getRcMediaIntent() {
+ return mRcMediaIntent;
+ }
+
+ /**
+ * @hide
+ * Default value for the unique identifier
+ */
+ public final static int RCSE_ID_UNREGISTERED = -1;
+
+ // USE_SESSIONS
+ private MediaSession.Callback mTransportListener = new MediaSession.Callback() {
+
+ @Override
+ public void onSeekTo(long pos) {
+ RemoteControlClient.this.onSeekTo(mCurrentClientGenId, pos);
+ }
+
+ @Override
+ public void onSetRating(Rating rating) {
+ if ((mTransportControlFlags & FLAG_KEY_MEDIA_RATING) != 0) {
+ onUpdateMetadata(mCurrentClientGenId, MetadataEditor.RATING_KEY_BY_USER, rating);
+ }
+ }
+ };
+
+ //===========================================================
+ // Message handlers
+
+ private void onSeekTo(int generationId, long timeMs) {
+ synchronized (mCacheLock) {
+ if ((mCurrentClientGenId == generationId) && (mPositionUpdateListener != null)) {
+ mPositionUpdateListener.onPlaybackPositionUpdate(timeMs);
+ }
+ }
+ }
+
+ private void onUpdateMetadata(int generationId, int key, Object value) {
+ synchronized (mCacheLock) {
+ if ((mCurrentClientGenId == generationId) && (mMetadataUpdateListener != null)) {
+ mMetadataUpdateListener.onMetadataUpdate(key, value);
+ }
+ }
+ }
+
+ //===========================================================
+ // Internal utilities
+
+ /**
+ * Returns whether, for the given playback state, the playback position is expected to
+ * be changing.
+ * @param playstate the playback state to evaluate
+ * @return true during any form of playback, false if it's not playing anything while in this
+ * playback state
+ */
+ static boolean playbackPositionShouldMove(int playstate) {
+ switch(playstate) {
+ case PLAYSTATE_STOPPED:
+ case PLAYSTATE_PAUSED:
+ case PLAYSTATE_BUFFERING:
+ case PLAYSTATE_ERROR:
+ case PLAYSTATE_SKIPPING_FORWARDS:
+ case PLAYSTATE_SKIPPING_BACKWARDS:
+ return false;
+ case PLAYSTATE_PLAYING:
+ case PLAYSTATE_FAST_FORWARDING:
+ case PLAYSTATE_REWINDING:
+ default:
+ return true;
+ }
+ }
+
+ /**
+ * Period for playback position drift checks, 15s when playing at 1x or slower.
+ */
+ private final static long POSITION_REFRESH_PERIOD_PLAYING_MS = 15000;
+ /**
+ * Minimum period for playback position drift checks, never more often when every 2s, when
+ * fast forwarding or rewinding.
+ */
+ private final static long POSITION_REFRESH_PERIOD_MIN_MS = 2000;
+ /**
+ * The value above which the difference between client-reported playback position and
+ * estimated position is considered a drift.
+ */
+ private final static long POSITION_DRIFT_MAX_MS = 500;
+ /**
+ * Compute the period at which the estimated playback position should be compared against the
+ * actual playback position. Is a funciton of playback speed.
+ * @param speed 1.0f is normal playback speed
+ * @return the period in ms
+ */
+ private static long getCheckPeriodFromSpeed(float speed) {
+ if (Math.abs(speed) <= 1.0f) {
+ return POSITION_REFRESH_PERIOD_PLAYING_MS;
+ } else {
+ return Math.max((long)(POSITION_REFRESH_PERIOD_PLAYING_MS / Math.abs(speed)),
+ POSITION_REFRESH_PERIOD_MIN_MS);
+ }
+ }
+}
diff --git a/android/media/RemoteController.java b/android/media/RemoteController.java
new file mode 100644
index 00000000..90f2163f
--- /dev/null
+++ b/android/media/RemoteController.java
@@ -0,0 +1,695 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.app.ActivityManager;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.graphics.Bitmap;
+import android.media.session.MediaController;
+import android.media.session.MediaSession;
+import android.media.session.MediaSessionLegacyHelper;
+import android.media.session.MediaSessionManager;
+import android.media.session.PlaybackState;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.os.UserHandle;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.view.KeyEvent;
+
+import java.lang.ref.WeakReference;
+import java.util.List;
+
+/**
+ * The RemoteController class is used to control media playback, display and update media metadata
+ * and playback status, published by applications using the {@link RemoteControlClient} class.
+ * <p>
+ * A RemoteController shall be registered through
+ * {@link AudioManager#registerRemoteController(RemoteController)} in order for the system to send
+ * media event updates to the {@link OnClientUpdateListener} listener set in the class constructor.
+ * Implement the methods of the interface to receive the information published by the active
+ * {@link RemoteControlClient} instances.
+ * <br>By default an {@link OnClientUpdateListener} implementation will not receive bitmaps for
+ * album art. Use {@link #setArtworkConfiguration(int, int)} to receive images as well.
+ * <p>
+ * Registration requires the {@link OnClientUpdateListener} listener to be one of the enabled
+ * notification listeners (see {@link android.service.notification.NotificationListenerService}).
+ *
+ * @deprecated Use {@link MediaController} instead.
+ */
+@Deprecated public final class RemoteController
+{
+ private final static int MAX_BITMAP_DIMENSION = 512;
+ private final static String TAG = "RemoteController";
+ private final static boolean DEBUG = false;
+ private final static Object mInfoLock = new Object();
+ private final Context mContext;
+ private final int mMaxBitmapDimension;
+ private MetadataEditor mMetadataEditor;
+
+ private MediaSessionManager mSessionManager;
+ private MediaSessionManager.OnActiveSessionsChangedListener mSessionListener;
+ private MediaController.Callback mSessionCb = new MediaControllerCallback();
+
+ /**
+ * Synchronized on mInfoLock
+ */
+ private boolean mIsRegistered = false;
+ private OnClientUpdateListener mOnClientUpdateListener;
+ private PlaybackInfo mLastPlaybackInfo;
+ private int mArtworkWidth = -1;
+ private int mArtworkHeight = -1;
+ private boolean mEnabled = true;
+ // synchronized on mInfoLock, for USE_SESSION apis.
+ private MediaController mCurrentSession;
+
+ /**
+ * Class constructor.
+ * @param context the {@link Context}, must be non-null.
+ * @param updateListener the listener to be called whenever new client information is available,
+ * must be non-null.
+ * @throws IllegalArgumentException
+ */
+ public RemoteController(Context context, OnClientUpdateListener updateListener)
+ throws IllegalArgumentException {
+ this(context, updateListener, null);
+ }
+
+ /**
+ * Class constructor.
+ * @param context the {@link Context}, must be non-null.
+ * @param updateListener the listener to be called whenever new client information is available,
+ * must be non-null.
+ * @param looper the {@link Looper} on which to run the event loop,
+ * or null to use the current thread's looper.
+ * @throws java.lang.IllegalArgumentException
+ */
+ public RemoteController(Context context, OnClientUpdateListener updateListener, Looper looper)
+ throws IllegalArgumentException {
+ if (context == null) {
+ throw new IllegalArgumentException("Invalid null Context");
+ }
+ if (updateListener == null) {
+ throw new IllegalArgumentException("Invalid null OnClientUpdateListener");
+ }
+ if (looper != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else {
+ Looper l = Looper.myLooper();
+ if (l != null) {
+ mEventHandler = new EventHandler(this, l);
+ } else {
+ throw new IllegalArgumentException("Calling thread not associated with a looper");
+ }
+ }
+ mOnClientUpdateListener = updateListener;
+ mContext = context;
+ mSessionManager = (MediaSessionManager) context
+ .getSystemService(Context.MEDIA_SESSION_SERVICE);
+ mSessionListener = new TopTransportSessionListener();
+
+ if (ActivityManager.isLowRamDeviceStatic()) {
+ mMaxBitmapDimension = MAX_BITMAP_DIMENSION;
+ } else {
+ final DisplayMetrics dm = context.getResources().getDisplayMetrics();
+ mMaxBitmapDimension = Math.max(dm.widthPixels, dm.heightPixels);
+ }
+ }
+
+
+ /**
+ * Interface definition for the callbacks to be invoked whenever media events, metadata
+ * and playback status are available.
+ */
+ public interface OnClientUpdateListener {
+ /**
+ * Called whenever all information, previously received through the other
+ * methods of the listener, is no longer valid and is about to be refreshed.
+ * This is typically called whenever a new {@link RemoteControlClient} has been selected
+ * by the system to have its media information published.
+ * @param clearing true if there is no selected RemoteControlClient and no information
+ * is available.
+ */
+ public void onClientChange(boolean clearing);
+
+ /**
+ * Called whenever the playback state has changed.
+ * It is called when no information is known about the playback progress in the media and
+ * the playback speed.
+ * @param state one of the playback states authorized
+ * in {@link RemoteControlClient#setPlaybackState(int)}.
+ */
+ public void onClientPlaybackStateUpdate(int state);
+ /**
+ * Called whenever the playback state has changed, and playback position
+ * and speed are known.
+ * @param state one of the playback states authorized
+ * in {@link RemoteControlClient#setPlaybackState(int)}.
+ * @param stateChangeTimeMs the system time at which the state change was reported,
+ * expressed in ms. Based on {@link android.os.SystemClock#elapsedRealtime()}.
+ * @param currentPosMs a positive value for the current media playback position expressed
+ * in ms, a negative value if the position is temporarily unknown.
+ * @param speed a value expressed as a ratio of 1x playback: 1.0f is normal playback,
+ * 2.0f is 2x, 0.5f is half-speed, -2.0f is rewind at 2x speed. 0.0f means nothing is
+ * playing (e.g. when state is {@link RemoteControlClient#PLAYSTATE_ERROR}).
+ */
+ public void onClientPlaybackStateUpdate(int state, long stateChangeTimeMs,
+ long currentPosMs, float speed);
+ /**
+ * Called whenever the transport control flags have changed.
+ * @param transportControlFlags one of the flags authorized
+ * in {@link RemoteControlClient#setTransportControlFlags(int)}.
+ */
+ public void onClientTransportControlUpdate(int transportControlFlags);
+ /**
+ * Called whenever new metadata is available.
+ * See the {@link MediaMetadataEditor#putLong(int, long)},
+ * {@link MediaMetadataEditor#putString(int, String)},
+ * {@link MediaMetadataEditor#putBitmap(int, Bitmap)}, and
+ * {@link MediaMetadataEditor#putObject(int, Object)} methods for the various keys that
+ * can be queried.
+ * @param metadataEditor the container of the new metadata.
+ */
+ public void onClientMetadataUpdate(MetadataEditor metadataEditor);
+ };
+
+ /**
+ * Return the estimated playback position of the current media track or a negative value
+ * if not available.
+ *
+ * <p>The value returned is estimated by the current process and may not be perfect.
+ * The time returned by this method is calculated from the last state change time based
+ * on the current play position at that time and the last known playback speed.
+ * An application may call {@link #setSynchronizationMode(int)} to apply
+ * a synchronization policy that will periodically re-sync the estimated position
+ * with the RemoteControlClient.</p>
+ *
+ * @return the current estimated playback position in milliseconds or a negative value
+ * if not available
+ *
+ * @see OnClientUpdateListener#onClientPlaybackStateUpdate(int, long, long, float)
+ */
+ public long getEstimatedMediaPosition() {
+ synchronized (mInfoLock) {
+ if (mCurrentSession != null) {
+ PlaybackState state = mCurrentSession.getPlaybackState();
+ if (state != null) {
+ return state.getPosition();
+ }
+ }
+ }
+ return -1;
+ }
+
+
+ /**
+ * Send a simulated key event for a media button to be received by the current client.
+ * To simulate a key press, you must first send a KeyEvent built with
+ * a {@link KeyEvent#ACTION_DOWN} action, then another event with the {@link KeyEvent#ACTION_UP}
+ * action.
+ * <p>The key event will be sent to the registered receiver
+ * (see {@link AudioManager#registerMediaButtonEventReceiver(PendingIntent)}) whose associated
+ * {@link RemoteControlClient}'s metadata and playback state is published (there may be
+ * none under some circumstances).
+ * @param keyEvent a {@link KeyEvent} instance whose key code is one of
+ * {@link KeyEvent#KEYCODE_MUTE},
+ * {@link KeyEvent#KEYCODE_HEADSETHOOK},
+ * {@link KeyEvent#KEYCODE_MEDIA_PLAY},
+ * {@link KeyEvent#KEYCODE_MEDIA_PAUSE},
+ * {@link KeyEvent#KEYCODE_MEDIA_PLAY_PAUSE},
+ * {@link KeyEvent#KEYCODE_MEDIA_STOP},
+ * {@link KeyEvent#KEYCODE_MEDIA_NEXT},
+ * {@link KeyEvent#KEYCODE_MEDIA_PREVIOUS},
+ * {@link KeyEvent#KEYCODE_MEDIA_REWIND},
+ * {@link KeyEvent#KEYCODE_MEDIA_RECORD},
+ * {@link KeyEvent#KEYCODE_MEDIA_FAST_FORWARD},
+ * {@link KeyEvent#KEYCODE_MEDIA_CLOSE},
+ * {@link KeyEvent#KEYCODE_MEDIA_EJECT},
+ * or {@link KeyEvent#KEYCODE_MEDIA_AUDIO_TRACK}.
+ * @return true if the event was successfully sent, false otherwise.
+ * @throws IllegalArgumentException
+ */
+ public boolean sendMediaKeyEvent(KeyEvent keyEvent) throws IllegalArgumentException {
+ if (!KeyEvent.isMediaKey(keyEvent.getKeyCode())) {
+ throw new IllegalArgumentException("not a media key event");
+ }
+ synchronized (mInfoLock) {
+ if (mCurrentSession != null) {
+ return mCurrentSession.dispatchMediaButtonEvent(keyEvent);
+ }
+ return false;
+ }
+ }
+
+
+ /**
+ * Sets the new playback position.
+ * This method can only be called on a registered RemoteController.
+ * @param timeMs a 0 or positive value for the new playback position, expressed in ms.
+ * @return true if the command to set the playback position was successfully sent.
+ * @throws IllegalArgumentException
+ */
+ public boolean seekTo(long timeMs) throws IllegalArgumentException {
+ if (!mEnabled) {
+ Log.e(TAG, "Cannot use seekTo() from a disabled RemoteController");
+ return false;
+ }
+ if (timeMs < 0) {
+ throw new IllegalArgumentException("illegal negative time value");
+ }
+ synchronized (mInfoLock) {
+ if (mCurrentSession != null) {
+ mCurrentSession.getTransportControls().seekTo(timeMs);
+ }
+ }
+ return true;
+ }
+
+
+ /**
+ * @hide
+ * @param wantBitmap
+ * @param width
+ * @param height
+ * @return true if successful
+ * @throws IllegalArgumentException
+ */
+ public boolean setArtworkConfiguration(boolean wantBitmap, int width, int height)
+ throws IllegalArgumentException {
+ synchronized (mInfoLock) {
+ if (wantBitmap) {
+ if ((width > 0) && (height > 0)) {
+ if (width > mMaxBitmapDimension) { width = mMaxBitmapDimension; }
+ if (height > mMaxBitmapDimension) { height = mMaxBitmapDimension; }
+ mArtworkWidth = width;
+ mArtworkHeight = height;
+ } else {
+ throw new IllegalArgumentException("Invalid dimensions");
+ }
+ } else {
+ mArtworkWidth = -1;
+ mArtworkHeight = -1;
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Set the maximum artwork image dimensions to be received in the metadata.
+ * No bitmaps will be received unless this has been specified.
+ * @param width the maximum width in pixels
+ * @param height the maximum height in pixels
+ * @return true if the artwork dimension was successfully set.
+ * @throws IllegalArgumentException
+ */
+ public boolean setArtworkConfiguration(int width, int height) throws IllegalArgumentException {
+ return setArtworkConfiguration(true, width, height);
+ }
+
+ /**
+ * Prevents this RemoteController from receiving artwork images.
+ * @return true if receiving artwork images was successfully disabled.
+ */
+ public boolean clearArtworkConfiguration() {
+ return setArtworkConfiguration(false, -1, -1);
+ }
+
+
+ /**
+ * Default playback position synchronization mode where the RemoteControlClient is not
+ * asked regularly for its playback position to see if it has drifted from the estimated
+ * position.
+ */
+ public static final int POSITION_SYNCHRONIZATION_NONE = 0;
+
+ /**
+ * The playback position synchronization mode where the RemoteControlClient instances which
+ * expose their playback position to the framework, will be regularly polled to check
+ * whether any drift has been noticed between their estimated position and the one they report.
+ * Note that this mode should only ever be used when needing to display very accurate playback
+ * position, as regularly polling a RemoteControlClient for its position may have an impact
+ * on battery life (if applicable) when this query will trigger network transactions in the
+ * case of remote playback.
+ */
+ public static final int POSITION_SYNCHRONIZATION_CHECK = 1;
+
+ /**
+ * Set the playback position synchronization mode.
+ * Must be called on a registered RemoteController.
+ * @param sync {@link #POSITION_SYNCHRONIZATION_NONE} or {@link #POSITION_SYNCHRONIZATION_CHECK}
+ * @return true if the synchronization mode was successfully set.
+ * @throws IllegalArgumentException
+ */
+ public boolean setSynchronizationMode(int sync) throws IllegalArgumentException {
+ if ((sync != POSITION_SYNCHRONIZATION_NONE) && (sync != POSITION_SYNCHRONIZATION_CHECK)) {
+ throw new IllegalArgumentException("Unknown synchronization mode " + sync);
+ }
+ if (!mIsRegistered) {
+ Log.e(TAG, "Cannot set synchronization mode on an unregistered RemoteController");
+ return false;
+ }
+ // deprecated, no-op
+ return true;
+ }
+
+
+ /**
+ * Creates a {@link MetadataEditor} for updating metadata values of the editable keys of
+ * the current {@link RemoteControlClient}.
+ * This method can only be called on a registered RemoteController.
+ * @return a new MetadataEditor instance.
+ */
+ public MetadataEditor editMetadata() {
+ MetadataEditor editor = new MetadataEditor();
+ editor.mEditorMetadata = new Bundle();
+ editor.mEditorArtwork = null;
+ editor.mMetadataChanged = true;
+ editor.mArtworkChanged = true;
+ editor.mEditableKeys = 0;
+ return editor;
+ }
+
+ /**
+ * A class to read the metadata published by a {@link RemoteControlClient}, or send a
+ * {@link RemoteControlClient} new values for keys that can be edited.
+ */
+ public class MetadataEditor extends MediaMetadataEditor {
+ /**
+ * @hide
+ */
+ protected MetadataEditor() { }
+
+ /**
+ * @hide
+ */
+ protected MetadataEditor(Bundle metadata, long editableKeys) {
+ mEditorMetadata = metadata;
+ mEditableKeys = editableKeys;
+
+ mEditorArtwork = (Bitmap) metadata.getParcelable(
+ String.valueOf(MediaMetadataEditor.BITMAP_KEY_ARTWORK));
+ if (mEditorArtwork != null) {
+ cleanupBitmapFromBundle(MediaMetadataEditor.BITMAP_KEY_ARTWORK);
+ }
+
+ mMetadataChanged = true;
+ mArtworkChanged = true;
+ mApplied = false;
+ }
+
+ private void cleanupBitmapFromBundle(int key) {
+ if (METADATA_KEYS_TYPE.get(key, METADATA_TYPE_INVALID) == METADATA_TYPE_BITMAP) {
+ mEditorMetadata.remove(String.valueOf(key));
+ }
+ }
+
+ /**
+ * Applies all of the metadata changes that have been set since the MediaMetadataEditor
+ * instance was created with {@link RemoteController#editMetadata()}
+ * or since {@link #clear()} was called.
+ */
+ public synchronized void apply() {
+ // "applying" a metadata bundle in RemoteController is only for sending edited
+ // key values back to the RemoteControlClient, so here we only care about the only
+ // editable key we support: RATING_KEY_BY_USER
+ if (!mMetadataChanged) {
+ return;
+ }
+ synchronized (mInfoLock) {
+ if (mCurrentSession != null) {
+ if (mEditorMetadata.containsKey(
+ String.valueOf(MediaMetadataEditor.RATING_KEY_BY_USER))) {
+ Rating rating = (Rating) getObject(
+ MediaMetadataEditor.RATING_KEY_BY_USER, null);
+ if (rating != null) {
+ mCurrentSession.getTransportControls().setRating(rating);
+ }
+ }
+ }
+ }
+ // NOT setting mApplied to true as this type of MetadataEditor will be applied
+ // multiple times, whenever the user of a RemoteController needs to change the
+ // metadata (e.g. user changes the rating of a song more than once during playback)
+ mApplied = false;
+ }
+
+ }
+
+ /**
+ * This receives updates when the current session changes. This is
+ * registered to receive the updates on the handler thread so it can call
+ * directly into the appropriate methods.
+ */
+ private class MediaControllerCallback extends MediaController.Callback {
+ @Override
+ public void onPlaybackStateChanged(PlaybackState state) {
+ onNewPlaybackState(state);
+ }
+
+ @Override
+ public void onMetadataChanged(MediaMetadata metadata) {
+ onNewMediaMetadata(metadata);
+ }
+ }
+
+ /**
+ * Listens for changes to the active session stack and replaces the
+ * currently tracked session if it has changed.
+ */
+ private class TopTransportSessionListener implements
+ MediaSessionManager.OnActiveSessionsChangedListener {
+
+ @Override
+ public void onActiveSessionsChanged(List<MediaController> controllers) {
+ int size = controllers.size();
+ for (int i = 0; i < size; i++) {
+ MediaController controller = controllers.get(i);
+ long flags = controller.getFlags();
+ // We only care about sessions that handle transport controls,
+ // which will be true for apps using RCC
+ if ((flags & MediaSession.FLAG_HANDLES_TRANSPORT_CONTROLS) != 0) {
+ updateController(controller);
+ return;
+ }
+ }
+ updateController(null);
+ }
+
+ }
+
+ //==================================================
+ // Event handling
+ private final EventHandler mEventHandler;
+ private final static int MSG_CLIENT_CHANGE = 0;
+ private final static int MSG_NEW_PLAYBACK_STATE = 1;
+ private final static int MSG_NEW_MEDIA_METADATA = 2;
+
+ private class EventHandler extends Handler {
+
+ public EventHandler(RemoteController rc, Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case MSG_CLIENT_CHANGE:
+ onClientChange(msg.arg2 == 1);
+ break;
+ case MSG_NEW_PLAYBACK_STATE:
+ onNewPlaybackState((PlaybackState) msg.obj);
+ break;
+ case MSG_NEW_MEDIA_METADATA:
+ onNewMediaMetadata((MediaMetadata) msg.obj);
+ break;
+ default:
+ Log.e(TAG, "unknown event " + msg.what);
+ }
+ }
+ }
+
+ /**
+ * @hide
+ */
+ void startListeningToSessions() {
+ final ComponentName listenerComponent = new ComponentName(mContext,
+ mOnClientUpdateListener.getClass());
+ Handler handler = null;
+ if (Looper.myLooper() == null) {
+ handler = new Handler(Looper.getMainLooper());
+ }
+ mSessionManager.addOnActiveSessionsChangedListener(mSessionListener, listenerComponent,
+ UserHandle.myUserId(), handler);
+ mSessionListener.onActiveSessionsChanged(mSessionManager
+ .getActiveSessions(listenerComponent));
+ if (DEBUG) {
+ Log.d(TAG, "Registered session listener with component " + listenerComponent
+ + " for user " + UserHandle.myUserId());
+ }
+ }
+
+ /**
+ * @hide
+ */
+ void stopListeningToSessions() {
+ mSessionManager.removeOnActiveSessionsChangedListener(mSessionListener);
+ if (DEBUG) {
+ Log.d(TAG, "Unregistered session listener for user "
+ + UserHandle.myUserId());
+ }
+ }
+
+ /** If the msg is already queued, replace it with this one. */
+ private static final int SENDMSG_REPLACE = 0;
+ /** If the msg is already queued, ignore this one and leave the old. */
+ private static final int SENDMSG_NOOP = 1;
+ /** If the msg is already queued, queue this one and leave the old. */
+ private static final int SENDMSG_QUEUE = 2;
+
+ private static void sendMsg(Handler handler, int msg, int existingMsgPolicy,
+ int arg1, int arg2, Object obj, int delayMs) {
+ if (handler == null) {
+ Log.e(TAG, "null event handler, will not deliver message " + msg);
+ return;
+ }
+ if (existingMsgPolicy == SENDMSG_REPLACE) {
+ handler.removeMessages(msg);
+ } else if (existingMsgPolicy == SENDMSG_NOOP && handler.hasMessages(msg)) {
+ return;
+ }
+ handler.sendMessageDelayed(handler.obtainMessage(msg, arg1, arg2, obj), delayMs);
+ }
+
+ private void onClientChange(boolean clearing) {
+ final OnClientUpdateListener l;
+ synchronized(mInfoLock) {
+ l = mOnClientUpdateListener;
+ mMetadataEditor = null;
+ }
+ if (l != null) {
+ l.onClientChange(clearing);
+ }
+ }
+
+ private void updateController(MediaController controller) {
+ if (DEBUG) {
+ Log.d(TAG, "Updating controller to " + controller + " previous controller is "
+ + mCurrentSession);
+ }
+ synchronized (mInfoLock) {
+ if (controller == null) {
+ if (mCurrentSession != null) {
+ mCurrentSession.unregisterCallback(mSessionCb);
+ mCurrentSession = null;
+ sendMsg(mEventHandler, MSG_CLIENT_CHANGE, SENDMSG_REPLACE,
+ 0 /* arg1 ignored */, 1 /* clearing */, null /* obj */, 0 /* delay */);
+ }
+ } else if (mCurrentSession == null
+ || !controller.getSessionToken()
+ .equals(mCurrentSession.getSessionToken())) {
+ if (mCurrentSession != null) {
+ mCurrentSession.unregisterCallback(mSessionCb);
+ }
+ sendMsg(mEventHandler, MSG_CLIENT_CHANGE, SENDMSG_REPLACE,
+ 0 /* arg1 ignored */, 0 /* clearing */, null /* obj */, 0 /* delay */);
+ mCurrentSession = controller;
+ mCurrentSession.registerCallback(mSessionCb, mEventHandler);
+
+ PlaybackState state = controller.getPlaybackState();
+ sendMsg(mEventHandler, MSG_NEW_PLAYBACK_STATE, SENDMSG_REPLACE,
+ 0 /* arg1 ignored */, 0 /* arg2 ignored */, state /* obj */, 0 /* delay */);
+
+ MediaMetadata metadata = controller.getMetadata();
+ sendMsg(mEventHandler, MSG_NEW_MEDIA_METADATA, SENDMSG_REPLACE,
+ 0 /* arg1 ignored */, 0 /* arg2 ignored*/, metadata /* obj */, 0 /*delay*/);
+ }
+ // else same controller, no need to update
+ }
+ }
+
+ private void onNewPlaybackState(PlaybackState state) {
+ final OnClientUpdateListener l;
+ synchronized (mInfoLock) {
+ l = this.mOnClientUpdateListener;
+ }
+ if (l != null) {
+ int playstate = state == null ? RemoteControlClient.PLAYSTATE_NONE : PlaybackState
+ .getRccStateFromState(state.getState());
+ if (state == null || state.getPosition() == PlaybackState.PLAYBACK_POSITION_UNKNOWN) {
+ l.onClientPlaybackStateUpdate(playstate);
+ } else {
+ l.onClientPlaybackStateUpdate(playstate, state.getLastPositionUpdateTime(),
+ state.getPosition(), state.getPlaybackSpeed());
+ }
+ if (state != null) {
+ l.onClientTransportControlUpdate(
+ PlaybackState.getRccControlFlagsFromActions(state.getActions()));
+ }
+ }
+ }
+
+ private void onNewMediaMetadata(MediaMetadata metadata) {
+ if (metadata == null) {
+ // RemoteController only handles non-null metadata
+ return;
+ }
+ final OnClientUpdateListener l;
+ final MetadataEditor metadataEditor;
+ // prepare the received Bundle to be used inside a MetadataEditor
+ synchronized(mInfoLock) {
+ l = mOnClientUpdateListener;
+ boolean canRate = mCurrentSession != null
+ && mCurrentSession.getRatingType() != Rating.RATING_NONE;
+ long editableKeys = canRate ? MediaMetadataEditor.RATING_KEY_BY_USER : 0;
+ Bundle legacyMetadata = MediaSessionLegacyHelper.getOldMetadata(metadata,
+ mArtworkWidth, mArtworkHeight);
+ mMetadataEditor = new MetadataEditor(legacyMetadata, editableKeys);
+ metadataEditor = mMetadataEditor;
+ }
+ if (l != null) {
+ l.onClientMetadataUpdate(metadataEditor);
+ }
+ }
+
+ //==================================================
+ private static class PlaybackInfo {
+ int mState;
+ long mStateChangeTimeMs;
+ long mCurrentPosMs;
+ float mSpeed;
+
+ PlaybackInfo(int state, long stateChangeTimeMs, long currentPosMs, float speed) {
+ mState = state;
+ mStateChangeTimeMs = stateChangeTimeMs;
+ mCurrentPosMs = currentPosMs;
+ mSpeed = speed;
+ }
+ }
+
+ /**
+ * @hide
+ * Used by AudioManager to access user listener receiving the client update notifications
+ * @return
+ */
+ OnClientUpdateListener getUpdateListener() {
+ return mOnClientUpdateListener;
+ }
+}
diff --git a/android/media/RemoteDisplay.java b/android/media/RemoteDisplay.java
new file mode 100644
index 00000000..5add65a9
--- /dev/null
+++ b/android/media/RemoteDisplay.java
@@ -0,0 +1,167 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import dalvik.system.CloseGuard;
+
+import android.os.Handler;
+import android.view.Surface;
+
+/**
+ * Listens for Wifi remote display connections managed by the media server.
+ *
+ * @hide
+ */
+public final class RemoteDisplay {
+ /* these constants must be kept in sync with IRemoteDisplayClient.h */
+
+ public static final int DISPLAY_FLAG_SECURE = 1 << 0;
+
+ public static final int DISPLAY_ERROR_UNKOWN = 1;
+ public static final int DISPLAY_ERROR_CONNECTION_DROPPED = 2;
+
+ private final CloseGuard mGuard = CloseGuard.get();
+ private final Listener mListener;
+ private final Handler mHandler;
+ private final String mOpPackageName;
+
+ private long mPtr;
+
+ private native long nativeListen(String iface, String opPackageName);
+ private native void nativeDispose(long ptr);
+ private native void nativePause(long ptr);
+ private native void nativeResume(long ptr);
+
+ private RemoteDisplay(Listener listener, Handler handler, String opPackageName) {
+ mListener = listener;
+ mHandler = handler;
+ mOpPackageName = opPackageName;
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ dispose(true);
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Starts listening for displays to be connected on the specified interface.
+ *
+ * @param iface The interface address and port in the form "x.x.x.x:y".
+ * @param listener The listener to invoke when displays are connected or disconnected.
+ * @param handler The handler on which to invoke the listener.
+ */
+ public static RemoteDisplay listen(String iface, Listener listener, Handler handler,
+ String opPackageName) {
+ if (iface == null) {
+ throw new IllegalArgumentException("iface must not be null");
+ }
+ if (listener == null) {
+ throw new IllegalArgumentException("listener must not be null");
+ }
+ if (handler == null) {
+ throw new IllegalArgumentException("handler must not be null");
+ }
+
+ RemoteDisplay display = new RemoteDisplay(listener, handler, opPackageName);
+ display.startListening(iface);
+ return display;
+ }
+
+ /**
+ * Disconnects the remote display and stops listening for new connections.
+ */
+ public void dispose() {
+ dispose(false);
+ }
+
+ public void pause() {
+ nativePause(mPtr);
+ }
+
+ public void resume() {
+ nativeResume(mPtr);
+ }
+
+ private void dispose(boolean finalized) {
+ if (mPtr != 0) {
+ if (mGuard != null) {
+ if (finalized) {
+ mGuard.warnIfOpen();
+ } else {
+ mGuard.close();
+ }
+ }
+
+ nativeDispose(mPtr);
+ mPtr = 0;
+ }
+ }
+
+ private void startListening(String iface) {
+ mPtr = nativeListen(iface, mOpPackageName);
+ if (mPtr == 0) {
+ throw new IllegalStateException("Could not start listening for "
+ + "remote display connection on \"" + iface + "\"");
+ }
+ mGuard.open("dispose");
+ }
+
+ // Called from native.
+ private void notifyDisplayConnected(final Surface surface,
+ final int width, final int height, final int flags, final int session) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mListener.onDisplayConnected(surface, width, height, flags, session);
+ }
+ });
+ }
+
+ // Called from native.
+ private void notifyDisplayDisconnected() {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mListener.onDisplayDisconnected();
+ }
+ });
+ }
+
+ // Called from native.
+ private void notifyDisplayError(final int error) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mListener.onDisplayError(error);
+ }
+ });
+ }
+
+ /**
+ * Listener invoked when the remote display connection changes state.
+ */
+ public interface Listener {
+ void onDisplayConnected(Surface surface,
+ int width, int height, int flags, int session);
+ void onDisplayDisconnected();
+ void onDisplayError(int error);
+ }
+}
diff --git a/android/media/RemoteDisplayState.java b/android/media/RemoteDisplayState.java
new file mode 100644
index 00000000..1197f659
--- /dev/null
+++ b/android/media/RemoteDisplayState.java
@@ -0,0 +1,189 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.text.TextUtils;
+
+import java.util.ArrayList;
+
+/**
+ * Information available from IRemoteDisplayProvider about available remote displays.
+ *
+ * Clients must not modify the contents of this object.
+ * @hide
+ */
+public final class RemoteDisplayState implements Parcelable {
+ // Note: These constants are used by the remote display provider API.
+ // Do not change them!
+ public static final String SERVICE_INTERFACE =
+ "com.android.media.remotedisplay.RemoteDisplayProvider";
+ public static final int DISCOVERY_MODE_NONE = 0;
+ public static final int DISCOVERY_MODE_PASSIVE = 1;
+ public static final int DISCOVERY_MODE_ACTIVE = 2;
+
+ /**
+ * A list of all remote displays.
+ */
+ public final ArrayList<RemoteDisplayInfo> displays;
+
+ public RemoteDisplayState() {
+ displays = new ArrayList<RemoteDisplayInfo>();
+ }
+
+ RemoteDisplayState(Parcel src) {
+ displays = src.createTypedArrayList(RemoteDisplayInfo.CREATOR);
+ }
+
+ public boolean isValid() {
+ if (displays == null) {
+ return false;
+ }
+ final int count = displays.size();
+ for (int i = 0; i < count; i++) {
+ if (!displays.get(i).isValid()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeTypedList(displays);
+ }
+
+ public static final Parcelable.Creator<RemoteDisplayState> CREATOR =
+ new Parcelable.Creator<RemoteDisplayState>() {
+ @Override
+ public RemoteDisplayState createFromParcel(Parcel in) {
+ return new RemoteDisplayState(in);
+ }
+
+ @Override
+ public RemoteDisplayState[] newArray(int size) {
+ return new RemoteDisplayState[size];
+ }
+ };
+
+ public static final class RemoteDisplayInfo implements Parcelable {
+ // Note: These constants are used by the remote display provider API.
+ // Do not change them!
+ public static final int STATUS_NOT_AVAILABLE = 0;
+ public static final int STATUS_IN_USE = 1;
+ public static final int STATUS_AVAILABLE = 2;
+ public static final int STATUS_CONNECTING = 3;
+ public static final int STATUS_CONNECTED = 4;
+
+ public static final int PLAYBACK_VOLUME_VARIABLE =
+ MediaRouter.RouteInfo.PLAYBACK_VOLUME_VARIABLE;
+ public static final int PLAYBACK_VOLUME_FIXED =
+ MediaRouter.RouteInfo.PLAYBACK_VOLUME_FIXED;
+
+ public String id;
+ public String name;
+ public String description;
+ public int status;
+ public int volume;
+ public int volumeMax;
+ public int volumeHandling;
+ public int presentationDisplayId;
+
+ public RemoteDisplayInfo(String id) {
+ this.id = id;
+ status = STATUS_NOT_AVAILABLE;
+ volumeHandling = MediaRouter.RouteInfo.PLAYBACK_VOLUME_FIXED;
+ presentationDisplayId = -1;
+ }
+
+ public RemoteDisplayInfo(RemoteDisplayInfo other) {
+ id = other.id;
+ name = other.name;
+ description = other.description;
+ status = other.status;
+ volume = other.volume;
+ volumeMax = other.volumeMax;
+ volumeHandling = other.volumeHandling;
+ presentationDisplayId = other.presentationDisplayId;
+ }
+
+ RemoteDisplayInfo(Parcel in) {
+ id = in.readString();
+ name = in.readString();
+ description = in.readString();
+ status = in.readInt();
+ volume = in.readInt();
+ volumeMax = in.readInt();
+ volumeHandling = in.readInt();
+ presentationDisplayId = in.readInt();
+ }
+
+ public boolean isValid() {
+ return !TextUtils.isEmpty(id) && !TextUtils.isEmpty(name);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeString(id);
+ dest.writeString(name);
+ dest.writeString(description);
+ dest.writeInt(status);
+ dest.writeInt(volume);
+ dest.writeInt(volumeMax);
+ dest.writeInt(volumeHandling);
+ dest.writeInt(presentationDisplayId);
+ }
+
+ @Override
+ public String toString() {
+ return "RemoteDisplayInfo{ id=" + id
+ + ", name=" + name
+ + ", description=" + description
+ + ", status=" + status
+ + ", volume=" + volume
+ + ", volumeMax=" + volumeMax
+ + ", volumeHandling=" + volumeHandling
+ + ", presentationDisplayId=" + presentationDisplayId
+ + " }";
+ }
+
+ @SuppressWarnings("hiding")
+ public static final Parcelable.Creator<RemoteDisplayInfo> CREATOR =
+ new Parcelable.Creator<RemoteDisplayInfo>() {
+ @Override
+ public RemoteDisplayInfo createFromParcel(Parcel in) {
+ return new RemoteDisplayInfo(in);
+ }
+
+ @Override
+ public RemoteDisplayInfo[] newArray(int size) {
+ return new RemoteDisplayInfo[size];
+ }
+ };
+ }
+}
diff --git a/android/media/ResampleInputStream.java b/android/media/ResampleInputStream.java
new file mode 100644
index 00000000..80919f7f
--- /dev/null
+++ b/android/media/ResampleInputStream.java
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.io.InputStream;
+import java.io.IOException;
+
+
+/**
+ * ResampleInputStream
+ * @hide
+ */
+public final class ResampleInputStream extends InputStream
+{
+ static {
+ System.loadLibrary("media_jni");
+ }
+
+ private final static String TAG = "ResampleInputStream";
+
+ // pcm input stream
+ private InputStream mInputStream;
+
+ // sample rates, assumed to be normalized
+ private final int mRateIn;
+ private final int mRateOut;
+
+ // input pcm data
+ private byte[] mBuf;
+ private int mBufCount;
+
+ // length of 2:1 fir
+ private static final int mFirLength = 29;
+
+ // helper for bytewise read()
+ private final byte[] mOneByte = new byte[1];
+
+ /**
+ * Create a new ResampleInputStream, which converts the sample rate
+ * @param inputStream InputStream containing 16 bit PCM.
+ * @param rateIn the input sample rate.
+ * @param rateOut the output sample rate.
+ * This only handles rateIn == rateOut / 2 for the moment.
+ */
+ public ResampleInputStream(InputStream inputStream, int rateIn, int rateOut) {
+ // only support 2:1 at the moment
+ if (rateIn != 2 * rateOut) throw new IllegalArgumentException("only support 2:1 at the moment");
+ rateIn = 2;
+ rateOut = 1;
+
+ mInputStream = inputStream;
+ mRateIn = rateIn;
+ mRateOut = rateOut;
+ }
+
+ @Override
+ public int read() throws IOException {
+ int rtn = read(mOneByte, 0, 1);
+ return rtn == 1 ? (0xff & mOneByte[0]) : -1;
+ }
+
+ @Override
+ public int read(byte[] b) throws IOException {
+ return read(b, 0, b.length);
+ }
+
+ @Override
+ public int read(byte[] b, int offset, int length) throws IOException {
+ if (mInputStream == null) throw new IllegalStateException("not open");
+
+ // ensure that mBuf is big enough to cover requested 'length'
+ int nIn = ((length / 2) * mRateIn / mRateOut + mFirLength) * 2;
+ if (mBuf == null) {
+ mBuf = new byte[nIn];
+ } else if (nIn > mBuf.length) {
+ byte[] bf = new byte[nIn];
+ System.arraycopy(mBuf, 0, bf, 0, mBufCount);
+ mBuf = bf;
+ }
+
+ // read until we have enough data for at least one output sample
+ while (true) {
+ int len = ((mBufCount / 2 - mFirLength) * mRateOut / mRateIn) * 2;
+ if (len > 0) {
+ length = len < length ? len : (length / 2) * 2;
+ break;
+ }
+ // TODO: should mBuf.length below be nIn instead?
+ int n = mInputStream.read(mBuf, mBufCount, mBuf.length - mBufCount);
+ if (n == -1) return -1;
+ mBufCount += n;
+ }
+
+ // resample input data
+ fir21(mBuf, 0, b, offset, length / 2);
+
+ // move any unused bytes to front of mBuf
+ int nFwd = length * mRateIn / mRateOut;
+ mBufCount -= nFwd;
+ if (mBufCount > 0) System.arraycopy(mBuf, nFwd, mBuf, 0, mBufCount);
+
+ return length;
+ }
+
+/*
+ @Override
+ public int available() throws IOException {
+ int nsamples = (mIn - mOut + mInputStream.available()) / 2;
+ return ((nsamples - mFirLength) * mRateOut / mRateIn) * 2;
+ }
+*/
+
+ @Override
+ public void close() throws IOException {
+ try {
+ if (mInputStream != null) mInputStream.close();
+ } finally {
+ mInputStream = null;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ if (mInputStream != null) {
+ close();
+ throw new IllegalStateException("someone forgot to close ResampleInputStream");
+ }
+ }
+
+ //
+ // fir filter code JNI interface
+ //
+ private static native void fir21(byte[] in, int inOffset,
+ byte[] out, int outOffset, int npoints);
+
+}
diff --git a/android/media/ResourceBusyException.java b/android/media/ResourceBusyException.java
new file mode 100644
index 00000000..a5abe211
--- /dev/null
+++ b/android/media/ResourceBusyException.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Exception thrown when an operation on a MediaDrm object is attempted
+ * and hardware resources are not available, due to being in use.
+ */
+public final class ResourceBusyException extends MediaDrmException {
+ public ResourceBusyException(String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/android/media/Ringtone.java b/android/media/Ringtone.java
new file mode 100644
index 00000000..209ec42d
--- /dev/null
+++ b/android/media/Ringtone.java
@@ -0,0 +1,480 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.ContentProvider;
+import android.content.ContentResolver;
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.content.res.Resources.NotFoundException;
+import android.database.Cursor;
+import android.media.MediaPlayer.OnCompletionListener;
+import android.net.Uri;
+import android.os.Binder;
+import android.os.RemoteException;
+import android.provider.MediaStore;
+import android.provider.Settings;
+import android.provider.MediaStore.MediaColumns;
+import android.util.Log;
+
+import java.io.IOException;
+import java.util.ArrayList;
+
+/**
+ * Ringtone provides a quick method for playing a ringtone, notification, or
+ * other similar types of sounds.
+ * <p>
+ * For ways of retrieving {@link Ringtone} objects or to show a ringtone
+ * picker, see {@link RingtoneManager}.
+ *
+ * @see RingtoneManager
+ */
+public class Ringtone {
+ private static final String TAG = "Ringtone";
+ private static final boolean LOGD = true;
+
+ private static final String[] MEDIA_COLUMNS = new String[] {
+ MediaStore.Audio.Media._ID,
+ MediaStore.Audio.Media.DATA,
+ MediaStore.Audio.Media.TITLE
+ };
+ /** Selection that limits query results to just audio files */
+ private static final String MEDIA_SELECTION = MediaColumns.MIME_TYPE + " LIKE 'audio/%' OR "
+ + MediaColumns.MIME_TYPE + " IN ('application/ogg', 'application/x-flac')";
+
+ // keep references on active Ringtones until stopped or completion listener called.
+ private static final ArrayList<Ringtone> sActiveRingtones = new ArrayList<Ringtone>();
+
+ private final Context mContext;
+ private final AudioManager mAudioManager;
+
+ /**
+ * Flag indicating if we're allowed to fall back to remote playback using
+ * {@link #mRemotePlayer}. Typically this is false when we're the remote
+ * player and there is nobody else to delegate to.
+ */
+ private final boolean mAllowRemote;
+ private final IRingtonePlayer mRemotePlayer;
+ private final Binder mRemoteToken;
+
+ private MediaPlayer mLocalPlayer;
+ private final MyOnCompletionListener mCompletionListener = new MyOnCompletionListener();
+
+ private Uri mUri;
+ private String mTitle;
+
+ private AudioAttributes mAudioAttributes = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_NOTIFICATION_RINGTONE)
+ .setContentType(AudioAttributes.CONTENT_TYPE_SONIFICATION)
+ .build();
+ // playback properties, use synchronized with mPlaybackSettingsLock
+ private boolean mIsLooping = false;
+ private float mVolume = 1.0f;
+ private final Object mPlaybackSettingsLock = new Object();
+
+ /** {@hide} */
+ public Ringtone(Context context, boolean allowRemote) {
+ mContext = context;
+ mAudioManager = (AudioManager) mContext.getSystemService(Context.AUDIO_SERVICE);
+ mAllowRemote = allowRemote;
+ mRemotePlayer = allowRemote ? mAudioManager.getRingtonePlayer() : null;
+ mRemoteToken = allowRemote ? new Binder() : null;
+ }
+
+ /**
+ * Sets the stream type where this ringtone will be played.
+ *
+ * @param streamType The stream, see {@link AudioManager}.
+ * @deprecated use {@link #setAudioAttributes(AudioAttributes)}
+ */
+ @Deprecated
+ public void setStreamType(int streamType) {
+ PlayerBase.deprecateStreamTypeForPlayback(streamType, "Ringtone", "setStreamType()");
+ setAudioAttributes(new AudioAttributes.Builder()
+ .setInternalLegacyStreamType(streamType)
+ .build());
+ }
+
+ /**
+ * Gets the stream type where this ringtone will be played.
+ *
+ * @return The stream type, see {@link AudioManager}.
+ * @deprecated use of stream types is deprecated, see
+ * {@link #setAudioAttributes(AudioAttributes)}
+ */
+ @Deprecated
+ public int getStreamType() {
+ return AudioAttributes.toLegacyStreamType(mAudioAttributes);
+ }
+
+ /**
+ * Sets the {@link AudioAttributes} for this ringtone.
+ * @param attributes the non-null attributes characterizing this ringtone.
+ */
+ public void setAudioAttributes(AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Invalid null AudioAttributes for Ringtone");
+ }
+ mAudioAttributes = attributes;
+ // The audio attributes have to be set before the media player is prepared.
+ // Re-initialize it.
+ setUri(mUri);
+ }
+
+ /**
+ * Returns the {@link AudioAttributes} used by this object.
+ * @return the {@link AudioAttributes} that were set with
+ * {@link #setAudioAttributes(AudioAttributes)} or the default attributes if none were set.
+ */
+ public AudioAttributes getAudioAttributes() {
+ return mAudioAttributes;
+ }
+
+ /**
+ * @hide
+ * Sets the player to be looping or non-looping.
+ * @param looping whether to loop or not
+ */
+ public void setLooping(boolean looping) {
+ synchronized (mPlaybackSettingsLock) {
+ mIsLooping = looping;
+ applyPlaybackProperties_sync();
+ }
+ }
+
+ /**
+ * @hide
+ * Sets the volume on this player.
+ * @param volume a raw scalar in range 0.0 to 1.0, where 0.0 mutes this player, and 1.0
+ * corresponds to no attenuation being applied.
+ */
+ public void setVolume(float volume) {
+ synchronized (mPlaybackSettingsLock) {
+ if (volume < 0.0f) { volume = 0.0f; }
+ if (volume > 1.0f) { volume = 1.0f; }
+ mVolume = volume;
+ applyPlaybackProperties_sync();
+ }
+ }
+
+ /**
+ * Must be called synchronized on mPlaybackSettingsLock
+ */
+ private void applyPlaybackProperties_sync() {
+ if (mLocalPlayer != null) {
+ mLocalPlayer.setVolume(mVolume);
+ mLocalPlayer.setLooping(mIsLooping);
+ } else if (mAllowRemote && (mRemotePlayer != null)) {
+ try {
+ mRemotePlayer.setPlaybackProperties(mRemoteToken, mVolume, mIsLooping);
+ } catch (RemoteException e) {
+ Log.w(TAG, "Problem setting playback properties: ", e);
+ }
+ } else {
+ Log.w(TAG,
+ "Neither local nor remote player available when applying playback properties");
+ }
+ }
+
+ /**
+ * Returns a human-presentable title for ringtone. Looks in media
+ * content provider. If not in either, uses the filename
+ *
+ * @param context A context used for querying.
+ */
+ public String getTitle(Context context) {
+ if (mTitle != null) return mTitle;
+ return mTitle = getTitle(context, mUri, true /*followSettingsUri*/, mAllowRemote);
+ }
+
+ /**
+ * @hide
+ */
+ public static String getTitle(
+ Context context, Uri uri, boolean followSettingsUri, boolean allowRemote) {
+ ContentResolver res = context.getContentResolver();
+
+ String title = null;
+
+ if (uri != null) {
+ String authority = ContentProvider.getAuthorityWithoutUserId(uri.getAuthority());
+
+ if (Settings.AUTHORITY.equals(authority)) {
+ if (followSettingsUri) {
+ Uri actualUri = RingtoneManager.getActualDefaultRingtoneUri(context,
+ RingtoneManager.getDefaultType(uri));
+ String actualTitle = getTitle(
+ context, actualUri, false /*followSettingsUri*/, allowRemote);
+ title = context
+ .getString(com.android.internal.R.string.ringtone_default_with_actual,
+ actualTitle);
+ }
+ } else {
+ Cursor cursor = null;
+ try {
+ if (MediaStore.AUTHORITY.equals(authority)) {
+ final String mediaSelection = allowRemote ? null : MEDIA_SELECTION;
+ cursor = res.query(uri, MEDIA_COLUMNS, mediaSelection, null, null);
+ if (cursor != null && cursor.getCount() == 1) {
+ cursor.moveToFirst();
+ return cursor.getString(2);
+ }
+ // missing cursor is handled below
+ }
+ } catch (SecurityException e) {
+ IRingtonePlayer mRemotePlayer = null;
+ if (allowRemote) {
+ AudioManager audioManager =
+ (AudioManager) context.getSystemService(Context.AUDIO_SERVICE);
+ mRemotePlayer = audioManager.getRingtonePlayer();
+ }
+ if (mRemotePlayer != null) {
+ try {
+ title = mRemotePlayer.getTitle(uri);
+ } catch (RemoteException re) {
+ }
+ }
+ } finally {
+ if (cursor != null) {
+ cursor.close();
+ }
+ cursor = null;
+ }
+ if (title == null) {
+ title = uri.getLastPathSegment();
+ }
+ }
+ } else {
+ title = context.getString(com.android.internal.R.string.ringtone_silent);
+ }
+
+ if (title == null) {
+ title = context.getString(com.android.internal.R.string.ringtone_unknown);
+
+ if (title == null) {
+ title = "";
+ }
+ }
+
+ return title;
+ }
+
+ /**
+ * Set {@link Uri} to be used for ringtone playback. Attempts to open
+ * locally, otherwise will delegate playback to remote
+ * {@link IRingtonePlayer}.
+ *
+ * @hide
+ */
+ public void setUri(Uri uri) {
+ destroyLocalPlayer();
+
+ mUri = uri;
+ if (mUri == null) {
+ return;
+ }
+
+ // TODO: detect READ_EXTERNAL and specific content provider case, instead of relying on throwing
+
+ // try opening uri locally before delegating to remote player
+ mLocalPlayer = new MediaPlayer();
+ try {
+ mLocalPlayer.setDataSource(mContext, mUri);
+ mLocalPlayer.setAudioAttributes(mAudioAttributes);
+ synchronized (mPlaybackSettingsLock) {
+ applyPlaybackProperties_sync();
+ }
+ mLocalPlayer.prepare();
+
+ } catch (SecurityException | IOException e) {
+ destroyLocalPlayer();
+ if (!mAllowRemote) {
+ Log.w(TAG, "Remote playback not allowed: " + e);
+ }
+ }
+
+ if (LOGD) {
+ if (mLocalPlayer != null) {
+ Log.d(TAG, "Successfully created local player");
+ } else {
+ Log.d(TAG, "Problem opening; delegating to remote player");
+ }
+ }
+ }
+
+ /** {@hide} */
+ public Uri getUri() {
+ return mUri;
+ }
+
+ /**
+ * Plays the ringtone.
+ */
+ public void play() {
+ if (mLocalPlayer != null) {
+ // do not play ringtones if stream volume is 0
+ // (typically because ringer mode is silent).
+ if (mAudioManager.getStreamVolume(
+ AudioAttributes.toLegacyStreamType(mAudioAttributes)) != 0) {
+ startLocalPlayer();
+ }
+ } else if (mAllowRemote && (mRemotePlayer != null)) {
+ final Uri canonicalUri = mUri.getCanonicalUri();
+ final boolean looping;
+ final float volume;
+ synchronized (mPlaybackSettingsLock) {
+ looping = mIsLooping;
+ volume = mVolume;
+ }
+ try {
+ mRemotePlayer.play(mRemoteToken, canonicalUri, mAudioAttributes, volume, looping);
+ } catch (RemoteException e) {
+ if (!playFallbackRingtone()) {
+ Log.w(TAG, "Problem playing ringtone: " + e);
+ }
+ }
+ } else {
+ if (!playFallbackRingtone()) {
+ Log.w(TAG, "Neither local nor remote playback available");
+ }
+ }
+ }
+
+ /**
+ * Stops a playing ringtone.
+ */
+ public void stop() {
+ if (mLocalPlayer != null) {
+ destroyLocalPlayer();
+ } else if (mAllowRemote && (mRemotePlayer != null)) {
+ try {
+ mRemotePlayer.stop(mRemoteToken);
+ } catch (RemoteException e) {
+ Log.w(TAG, "Problem stopping ringtone: " + e);
+ }
+ }
+ }
+
+ private void destroyLocalPlayer() {
+ if (mLocalPlayer != null) {
+ mLocalPlayer.setOnCompletionListener(null);
+ mLocalPlayer.reset();
+ mLocalPlayer.release();
+ mLocalPlayer = null;
+ synchronized (sActiveRingtones) {
+ sActiveRingtones.remove(this);
+ }
+ }
+ }
+
+ private void startLocalPlayer() {
+ if (mLocalPlayer == null) {
+ return;
+ }
+ synchronized (sActiveRingtones) {
+ sActiveRingtones.add(this);
+ }
+ mLocalPlayer.setOnCompletionListener(mCompletionListener);
+ mLocalPlayer.start();
+ }
+
+ /**
+ * Whether this ringtone is currently playing.
+ *
+ * @return True if playing, false otherwise.
+ */
+ public boolean isPlaying() {
+ if (mLocalPlayer != null) {
+ return mLocalPlayer.isPlaying();
+ } else if (mAllowRemote && (mRemotePlayer != null)) {
+ try {
+ return mRemotePlayer.isPlaying(mRemoteToken);
+ } catch (RemoteException e) {
+ Log.w(TAG, "Problem checking ringtone: " + e);
+ return false;
+ }
+ } else {
+ Log.w(TAG, "Neither local nor remote playback available");
+ return false;
+ }
+ }
+
+ private boolean playFallbackRingtone() {
+ if (mAudioManager.getStreamVolume(AudioAttributes.toLegacyStreamType(mAudioAttributes))
+ != 0) {
+ int ringtoneType = RingtoneManager.getDefaultType(mUri);
+ if (ringtoneType == -1 ||
+ RingtoneManager.getActualDefaultRingtoneUri(mContext, ringtoneType) != null) {
+ // Default ringtone, try fallback ringtone.
+ try {
+ AssetFileDescriptor afd = mContext.getResources().openRawResourceFd(
+ com.android.internal.R.raw.fallbackring);
+ if (afd != null) {
+ mLocalPlayer = new MediaPlayer();
+ if (afd.getDeclaredLength() < 0) {
+ mLocalPlayer.setDataSource(afd.getFileDescriptor());
+ } else {
+ mLocalPlayer.setDataSource(afd.getFileDescriptor(),
+ afd.getStartOffset(),
+ afd.getDeclaredLength());
+ }
+ mLocalPlayer.setAudioAttributes(mAudioAttributes);
+ synchronized (mPlaybackSettingsLock) {
+ applyPlaybackProperties_sync();
+ }
+ mLocalPlayer.prepare();
+ startLocalPlayer();
+ afd.close();
+ return true;
+ } else {
+ Log.e(TAG, "Could not load fallback ringtone");
+ }
+ } catch (IOException ioe) {
+ destroyLocalPlayer();
+ Log.e(TAG, "Failed to open fallback ringtone");
+ } catch (NotFoundException nfe) {
+ Log.e(TAG, "Fallback ringtone does not exist");
+ }
+ } else {
+ Log.w(TAG, "not playing fallback for " + mUri);
+ }
+ }
+ return false;
+ }
+
+ void setTitle(String title) {
+ mTitle = title;
+ }
+
+ @Override
+ protected void finalize() {
+ if (mLocalPlayer != null) {
+ mLocalPlayer.release();
+ }
+ }
+
+ class MyOnCompletionListener implements MediaPlayer.OnCompletionListener {
+ @Override
+ public void onCompletion(MediaPlayer mp) {
+ synchronized (sActiveRingtones) {
+ sActiveRingtones.remove(Ringtone.this);
+ }
+ mp.setOnCompletionListener(null); // Help the Java GC: break the refcount cycle.
+ }
+ }
+}
diff --git a/android/media/RingtoneManager.java b/android/media/RingtoneManager.java
new file mode 100644
index 00000000..3eb9d529
--- /dev/null
+++ b/android/media/RingtoneManager.java
@@ -0,0 +1,1189 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.Manifest;
+import android.annotation.NonNull;
+import android.annotation.RequiresPermission;
+import android.annotation.SdkConstant;
+import android.annotation.SdkConstant.SdkConstantType;
+import android.annotation.WorkerThread;
+import android.app.Activity;
+import android.content.ContentProvider;
+import android.content.ContentResolver;
+import android.content.ContentUris;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.content.pm.UserInfo;
+import android.database.Cursor;
+import android.media.MediaScannerConnection.MediaScannerConnectionClient;
+import android.net.Uri;
+import android.os.Environment;
+import android.os.IBinder;
+import android.os.ParcelFileDescriptor;
+import android.os.Process;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.os.UserHandle;
+import android.os.UserManager;
+import android.provider.MediaStore;
+import android.provider.Settings;
+import android.provider.Settings.System;
+import android.util.Log;
+
+import com.android.internal.database.SortCursor;
+
+import libcore.io.Streams;
+
+import java.io.Closeable;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import static android.content.ContentProvider.maybeAddUserId;
+import static android.content.pm.PackageManager.NameNotFoundException;
+
+/**
+ * RingtoneManager provides access to ringtones, notification, and other types
+ * of sounds. It manages querying the different media providers and combines the
+ * results into a single cursor. It also provides a {@link Ringtone} for each
+ * ringtone. We generically call these sounds ringtones, however the
+ * {@link #TYPE_RINGTONE} refers to the type of sounds that are suitable for the
+ * phone ringer.
+ * <p>
+ * To show a ringtone picker to the user, use the
+ * {@link #ACTION_RINGTONE_PICKER} intent to launch the picker as a subactivity.
+ *
+ * @see Ringtone
+ */
+public class RingtoneManager {
+
+ private static final String TAG = "RingtoneManager";
+
+ // Make sure these are in sync with attrs.xml:
+ // <attr name="ringtoneType">
+
+ /**
+ * Type that refers to sounds that are used for the phone ringer.
+ */
+ public static final int TYPE_RINGTONE = 1;
+
+ /**
+ * Type that refers to sounds that are used for notifications.
+ */
+ public static final int TYPE_NOTIFICATION = 2;
+
+ /**
+ * Type that refers to sounds that are used for the alarm.
+ */
+ public static final int TYPE_ALARM = 4;
+
+ /**
+ * All types of sounds.
+ */
+ public static final int TYPE_ALL = TYPE_RINGTONE | TYPE_NOTIFICATION | TYPE_ALARM;
+
+ // </attr>
+
+ /**
+ * Activity Action: Shows a ringtone picker.
+ * <p>
+ * Input: {@link #EXTRA_RINGTONE_EXISTING_URI},
+ * {@link #EXTRA_RINGTONE_SHOW_DEFAULT},
+ * {@link #EXTRA_RINGTONE_SHOW_SILENT}, {@link #EXTRA_RINGTONE_TYPE},
+ * {@link #EXTRA_RINGTONE_DEFAULT_URI}, {@link #EXTRA_RINGTONE_TITLE},
+ * <p>
+ * Output: {@link #EXTRA_RINGTONE_PICKED_URI}.
+ */
+ @SdkConstant(SdkConstantType.ACTIVITY_INTENT_ACTION)
+ public static final String ACTION_RINGTONE_PICKER = "android.intent.action.RINGTONE_PICKER";
+
+ /**
+ * Given to the ringtone picker as a boolean. Whether to show an item for
+ * "Default".
+ *
+ * @see #ACTION_RINGTONE_PICKER
+ */
+ public static final String EXTRA_RINGTONE_SHOW_DEFAULT =
+ "android.intent.extra.ringtone.SHOW_DEFAULT";
+
+ /**
+ * Given to the ringtone picker as a boolean. Whether to show an item for
+ * "Silent". If the "Silent" item is picked,
+ * {@link #EXTRA_RINGTONE_PICKED_URI} will be null.
+ *
+ * @see #ACTION_RINGTONE_PICKER
+ */
+ public static final String EXTRA_RINGTONE_SHOW_SILENT =
+ "android.intent.extra.ringtone.SHOW_SILENT";
+
+ /**
+ * Given to the ringtone picker as a boolean. Whether to include DRM ringtones.
+ * @deprecated DRM ringtones are no longer supported
+ */
+ @Deprecated
+ public static final String EXTRA_RINGTONE_INCLUDE_DRM =
+ "android.intent.extra.ringtone.INCLUDE_DRM";
+
+ /**
+ * Given to the ringtone picker as a {@link Uri}. The {@link Uri} of the
+ * current ringtone, which will be used to show a checkmark next to the item
+ * for this {@link Uri}. If showing an item for "Default" (@see
+ * {@link #EXTRA_RINGTONE_SHOW_DEFAULT}), this can also be one of
+ * {@link System#DEFAULT_RINGTONE_URI},
+ * {@link System#DEFAULT_NOTIFICATION_URI}, or
+ * {@link System#DEFAULT_ALARM_ALERT_URI} to have the "Default" item
+ * checked.
+ *
+ * @see #ACTION_RINGTONE_PICKER
+ */
+ public static final String EXTRA_RINGTONE_EXISTING_URI =
+ "android.intent.extra.ringtone.EXISTING_URI";
+
+ /**
+ * Given to the ringtone picker as a {@link Uri}. The {@link Uri} of the
+ * ringtone to play when the user attempts to preview the "Default"
+ * ringtone. This can be one of {@link System#DEFAULT_RINGTONE_URI},
+ * {@link System#DEFAULT_NOTIFICATION_URI}, or
+ * {@link System#DEFAULT_ALARM_ALERT_URI} to have the "Default" point to
+ * the current sound for the given default sound type. If you are showing a
+ * ringtone picker for some other type of sound, you are free to provide any
+ * {@link Uri} here.
+ */
+ public static final String EXTRA_RINGTONE_DEFAULT_URI =
+ "android.intent.extra.ringtone.DEFAULT_URI";
+
+ /**
+ * Given to the ringtone picker as an int. Specifies which ringtone type(s) should be
+ * shown in the picker. One or more of {@link #TYPE_RINGTONE},
+ * {@link #TYPE_NOTIFICATION}, {@link #TYPE_ALARM}, or {@link #TYPE_ALL}
+ * (bitwise-ored together).
+ */
+ public static final String EXTRA_RINGTONE_TYPE = "android.intent.extra.ringtone.TYPE";
+
+ /**
+ * Given to the ringtone picker as a {@link CharSequence}. The title to
+ * show for the ringtone picker. This has a default value that is suitable
+ * in most cases.
+ */
+ public static final String EXTRA_RINGTONE_TITLE = "android.intent.extra.ringtone.TITLE";
+
+ /**
+ * @hide
+ * Given to the ringtone picker as an int. Additional AudioAttributes flags to use
+ * when playing the ringtone in the picker.
+ * @see #ACTION_RINGTONE_PICKER
+ */
+ public static final String EXTRA_RINGTONE_AUDIO_ATTRIBUTES_FLAGS =
+ "android.intent.extra.ringtone.AUDIO_ATTRIBUTES_FLAGS";
+
+ /**
+ * Returned from the ringtone picker as a {@link Uri}.
+ * <p>
+ * It will be one of:
+ * <li> the picked ringtone,
+ * <li> a {@link Uri} that equals {@link System#DEFAULT_RINGTONE_URI},
+ * {@link System#DEFAULT_NOTIFICATION_URI}, or
+ * {@link System#DEFAULT_ALARM_ALERT_URI} if the default was chosen,
+ * <li> null if the "Silent" item was picked.
+ *
+ * @see #ACTION_RINGTONE_PICKER
+ */
+ public static final String EXTRA_RINGTONE_PICKED_URI =
+ "android.intent.extra.ringtone.PICKED_URI";
+
+ // Make sure the column ordering and then ..._COLUMN_INDEX are in sync
+
+ private static final String[] INTERNAL_COLUMNS = new String[] {
+ MediaStore.Audio.Media._ID, MediaStore.Audio.Media.TITLE,
+ "\"" + MediaStore.Audio.Media.INTERNAL_CONTENT_URI + "\"",
+ MediaStore.Audio.Media.TITLE_KEY
+ };
+
+ private static final String[] MEDIA_COLUMNS = new String[] {
+ MediaStore.Audio.Media._ID, MediaStore.Audio.Media.TITLE,
+ "\"" + MediaStore.Audio.Media.EXTERNAL_CONTENT_URI + "\"",
+ MediaStore.Audio.Media.TITLE_KEY
+ };
+
+ /**
+ * The column index (in the cursor returned by {@link #getCursor()} for the
+ * row ID.
+ */
+ public static final int ID_COLUMN_INDEX = 0;
+
+ /**
+ * The column index (in the cursor returned by {@link #getCursor()} for the
+ * title.
+ */
+ public static final int TITLE_COLUMN_INDEX = 1;
+
+ /**
+ * The column index (in the cursor returned by {@link #getCursor()} for the
+ * media provider's URI.
+ */
+ public static final int URI_COLUMN_INDEX = 2;
+
+ private final Activity mActivity;
+ private final Context mContext;
+
+ private Cursor mCursor;
+
+ private int mType = TYPE_RINGTONE;
+
+ /**
+ * If a column (item from this list) exists in the Cursor, its value must
+ * be true (value of 1) for the row to be returned.
+ */
+ private final List<String> mFilterColumns = new ArrayList<String>();
+
+ private boolean mStopPreviousRingtone = true;
+ private Ringtone mPreviousRingtone;
+
+ private boolean mIncludeParentRingtones;
+
+ /**
+ * Constructs a RingtoneManager. This constructor is recommended as its
+ * constructed instance manages cursor(s).
+ *
+ * @param activity The activity used to get a managed cursor.
+ */
+ public RingtoneManager(Activity activity) {
+ this(activity, /* includeParentRingtones */ false);
+ }
+
+ /**
+ * Constructs a RingtoneManager. This constructor is recommended if there's the need to also
+ * list ringtones from the user's parent.
+ *
+ * @param activity The activity used to get a managed cursor.
+ * @param includeParentRingtones if true, this ringtone manager's cursor will also retrieve
+ * ringtones from the parent of the user specified in the given activity
+ *
+ * @hide
+ */
+ public RingtoneManager(Activity activity, boolean includeParentRingtones) {
+ mActivity = activity;
+ mContext = activity;
+ setType(mType);
+ mIncludeParentRingtones = includeParentRingtones;
+ }
+
+ /**
+ * Constructs a RingtoneManager. The instance constructed by this
+ * constructor will not manage the cursor(s), so the client should handle
+ * this itself.
+ *
+ * @param context The context to used to get a cursor.
+ */
+ public RingtoneManager(Context context) {
+ this(context, /* includeParentRingtones */ false);
+ }
+
+ /**
+ * Constructs a RingtoneManager.
+ *
+ * @param context The context to used to get a cursor.
+ * @param includeParentRingtones if true, this ringtone manager's cursor will also retrieve
+ * ringtones from the parent of the user specified in the given context
+ *
+ * @hide
+ */
+ public RingtoneManager(Context context, boolean includeParentRingtones) {
+ mActivity = null;
+ mContext = context;
+ setType(mType);
+ mIncludeParentRingtones = includeParentRingtones;
+ }
+
+ /**
+ * Sets which type(s) of ringtones will be listed by this.
+ *
+ * @param type The type(s), one or more of {@link #TYPE_RINGTONE},
+ * {@link #TYPE_NOTIFICATION}, {@link #TYPE_ALARM},
+ * {@link #TYPE_ALL}.
+ * @see #EXTRA_RINGTONE_TYPE
+ */
+ public void setType(int type) {
+ if (mCursor != null) {
+ throw new IllegalStateException(
+ "Setting filter columns should be done before querying for ringtones.");
+ }
+
+ mType = type;
+ setFilterColumnsList(type);
+ }
+
+ /**
+ * Infers the volume stream type based on what type of ringtones this
+ * manager is returning.
+ *
+ * @return The stream type.
+ */
+ public int inferStreamType() {
+ switch (mType) {
+
+ case TYPE_ALARM:
+ return AudioManager.STREAM_ALARM;
+
+ case TYPE_NOTIFICATION:
+ return AudioManager.STREAM_NOTIFICATION;
+
+ default:
+ return AudioManager.STREAM_RING;
+ }
+ }
+
+ /**
+ * Whether retrieving another {@link Ringtone} will stop playing the
+ * previously retrieved {@link Ringtone}.
+ * <p>
+ * If this is false, make sure to {@link Ringtone#stop()} any previous
+ * ringtones to free resources.
+ *
+ * @param stopPreviousRingtone If true, the previously retrieved
+ * {@link Ringtone} will be stopped.
+ */
+ public void setStopPreviousRingtone(boolean stopPreviousRingtone) {
+ mStopPreviousRingtone = stopPreviousRingtone;
+ }
+
+ /**
+ * @see #setStopPreviousRingtone(boolean)
+ */
+ public boolean getStopPreviousRingtone() {
+ return mStopPreviousRingtone;
+ }
+
+ /**
+ * Stops playing the last {@link Ringtone} retrieved from this.
+ */
+ public void stopPreviousRingtone() {
+ if (mPreviousRingtone != null) {
+ mPreviousRingtone.stop();
+ }
+ }
+
+ /**
+ * Returns whether DRM ringtones will be included.
+ *
+ * @return Whether DRM ringtones will be included.
+ * @see #setIncludeDrm(boolean)
+ * Obsolete - always returns false
+ * @deprecated DRM ringtones are no longer supported
+ */
+ @Deprecated
+ public boolean getIncludeDrm() {
+ return false;
+ }
+
+ /**
+ * Sets whether to include DRM ringtones.
+ *
+ * @param includeDrm Whether to include DRM ringtones.
+ * Obsolete - no longer has any effect
+ * @deprecated DRM ringtones are no longer supported
+ */
+ @Deprecated
+ public void setIncludeDrm(boolean includeDrm) {
+ if (includeDrm) {
+ Log.w(TAG, "setIncludeDrm no longer supported");
+ }
+ }
+
+ /**
+ * Returns a {@link Cursor} of all the ringtones available. The returned
+ * cursor will be the same cursor returned each time this method is called,
+ * so do not {@link Cursor#close()} the cursor. The cursor can be
+ * {@link Cursor#deactivate()} safely.
+ * <p>
+ * If {@link RingtoneManager#RingtoneManager(Activity)} was not used, the
+ * caller should manage the returned cursor through its activity's life
+ * cycle to prevent leaking the cursor.
+ * <p>
+ * Note that the list of ringtones available will differ depending on whether the caller
+ * has the {@link android.Manifest.permission#READ_EXTERNAL_STORAGE} permission.
+ *
+ * @return A {@link Cursor} of all the ringtones available.
+ * @see #ID_COLUMN_INDEX
+ * @see #TITLE_COLUMN_INDEX
+ * @see #URI_COLUMN_INDEX
+ */
+ public Cursor getCursor() {
+ if (mCursor != null && mCursor.requery()) {
+ return mCursor;
+ }
+
+ ArrayList<Cursor> ringtoneCursors = new ArrayList<Cursor>();
+ ringtoneCursors.add(getInternalRingtones());
+ ringtoneCursors.add(getMediaRingtones());
+
+ if (mIncludeParentRingtones) {
+ Cursor parentRingtonesCursor = getParentProfileRingtones();
+ if (parentRingtonesCursor != null) {
+ ringtoneCursors.add(parentRingtonesCursor);
+ }
+ }
+
+ return mCursor = new SortCursor(ringtoneCursors.toArray(new Cursor[ringtoneCursors.size()]),
+ MediaStore.Audio.Media.DEFAULT_SORT_ORDER);
+ }
+
+ private Cursor getParentProfileRingtones() {
+ final UserManager um = UserManager.get(mContext);
+ final UserInfo parentInfo = um.getProfileParent(mContext.getUserId());
+ if (parentInfo != null && parentInfo.id != mContext.getUserId()) {
+ final Context parentContext = createPackageContextAsUser(mContext, parentInfo.id);
+ if (parentContext != null) {
+ // We don't need to re-add the internal ringtones for the work profile since
+ // they are the same as the personal profile. We just need the external
+ // ringtones.
+ return new ExternalRingtonesCursorWrapper(getMediaRingtones(parentContext),
+ parentInfo.id);
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Gets a {@link Ringtone} for the ringtone at the given position in the
+ * {@link Cursor}.
+ *
+ * @param position The position (in the {@link Cursor}) of the ringtone.
+ * @return A {@link Ringtone} pointing to the ringtone.
+ */
+ public Ringtone getRingtone(int position) {
+ if (mStopPreviousRingtone && mPreviousRingtone != null) {
+ mPreviousRingtone.stop();
+ }
+
+ mPreviousRingtone = getRingtone(mContext, getRingtoneUri(position), inferStreamType());
+ return mPreviousRingtone;
+ }
+
+ /**
+ * Gets a {@link Uri} for the ringtone at the given position in the {@link Cursor}.
+ *
+ * @param position The position (in the {@link Cursor}) of the ringtone.
+ * @return A {@link Uri} pointing to the ringtone.
+ */
+ public Uri getRingtoneUri(int position) {
+ // use cursor directly instead of requerying it, which could easily
+ // cause position to shuffle.
+ if (mCursor == null || !mCursor.moveToPosition(position)) {
+ return null;
+ }
+
+ return getUriFromCursor(mCursor);
+ }
+
+ /**
+ * Queries the database for the Uri to a ringtone in a specific path (the ringtone has to have
+ * been scanned before)
+ *
+ * @param context Context used to query the database
+ * @param path Path to the ringtone file
+ * @return Uri of the ringtone, null if something fails in the query or the ringtone doesn't
+ * exist
+ *
+ * @hide
+ */
+ private static Uri getExistingRingtoneUriFromPath(Context context, String path) {
+ final String[] proj = {MediaStore.Audio.Media._ID};
+ final String[] selectionArgs = {path};
+ try (final Cursor cursor = context.getContentResolver().query(
+ MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, proj,
+ MediaStore.Audio.Media.DATA + "=? ", selectionArgs, /* sortOrder */ null)) {
+ if (cursor == null || !cursor.moveToFirst()) {
+ return null;
+ }
+ final int id = cursor.getInt(cursor.getColumnIndex(MediaStore.MediaColumns._ID));
+ if (id == -1) {
+ return null;
+ }
+ return Uri.withAppendedPath(MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, "" + id);
+ }
+ }
+
+ private static Uri getUriFromCursor(Cursor cursor) {
+ return ContentUris.withAppendedId(Uri.parse(cursor.getString(URI_COLUMN_INDEX)), cursor
+ .getLong(ID_COLUMN_INDEX));
+ }
+
+ /**
+ * Gets the position of a {@link Uri} within this {@link RingtoneManager}.
+ *
+ * @param ringtoneUri The {@link Uri} to retreive the position of.
+ * @return The position of the {@link Uri}, or -1 if it cannot be found.
+ */
+ public int getRingtonePosition(Uri ringtoneUri) {
+
+ if (ringtoneUri == null) return -1;
+
+ final Cursor cursor = getCursor();
+ final int cursorCount = cursor.getCount();
+
+ if (!cursor.moveToFirst()) {
+ return -1;
+ }
+
+ // Only create Uri objects when the actual URI changes
+ Uri currentUri = null;
+ String previousUriString = null;
+ for (int i = 0; i < cursorCount; i++) {
+ String uriString = cursor.getString(URI_COLUMN_INDEX);
+ if (currentUri == null || !uriString.equals(previousUriString)) {
+ currentUri = Uri.parse(uriString);
+ }
+
+ if (ringtoneUri.equals(ContentUris.withAppendedId(currentUri, cursor
+ .getLong(ID_COLUMN_INDEX)))) {
+ return i;
+ }
+
+ cursor.move(1);
+
+ previousUriString = uriString;
+ }
+
+ return -1;
+ }
+
+ /**
+ * Returns a valid ringtone URI. No guarantees on which it returns. If it
+ * cannot find one, returns null. If it can only find one on external storage and the caller
+ * doesn't have the {@link android.Manifest.permission#READ_EXTERNAL_STORAGE} permission,
+ * returns null.
+ *
+ * @param context The context to use for querying.
+ * @return A ringtone URI, or null if one cannot be found.
+ */
+ public static Uri getValidRingtoneUri(Context context) {
+ final RingtoneManager rm = new RingtoneManager(context);
+
+ Uri uri = getValidRingtoneUriFromCursorAndClose(context, rm.getInternalRingtones());
+
+ if (uri == null) {
+ uri = getValidRingtoneUriFromCursorAndClose(context, rm.getMediaRingtones());
+ }
+
+ return uri;
+ }
+
+ private static Uri getValidRingtoneUriFromCursorAndClose(Context context, Cursor cursor) {
+ if (cursor != null) {
+ Uri uri = null;
+
+ if (cursor.moveToFirst()) {
+ uri = getUriFromCursor(cursor);
+ }
+ cursor.close();
+
+ return uri;
+ } else {
+ return null;
+ }
+ }
+
+ private Cursor getInternalRingtones() {
+ return query(
+ MediaStore.Audio.Media.INTERNAL_CONTENT_URI, INTERNAL_COLUMNS,
+ constructBooleanTrueWhereClause(mFilterColumns),
+ null, MediaStore.Audio.Media.DEFAULT_SORT_ORDER);
+ }
+
+ private Cursor getMediaRingtones() {
+ return getMediaRingtones(mContext);
+ }
+
+ private Cursor getMediaRingtones(Context context) {
+ if (PackageManager.PERMISSION_GRANTED != context.checkPermission(
+ android.Manifest.permission.READ_EXTERNAL_STORAGE,
+ Process.myPid(), Process.myUid())) {
+ Log.w(TAG, "No READ_EXTERNAL_STORAGE permission, ignoring ringtones on ext storage");
+ return null;
+ }
+ // Get the external media cursor. First check to see if it is mounted.
+ final String status = Environment.getExternalStorageState();
+
+ return (status.equals(Environment.MEDIA_MOUNTED) ||
+ status.equals(Environment.MEDIA_MOUNTED_READ_ONLY))
+ ? query(
+ MediaStore.Audio.Media.EXTERNAL_CONTENT_URI, MEDIA_COLUMNS,
+ constructBooleanTrueWhereClause(mFilterColumns), null,
+ MediaStore.Audio.Media.DEFAULT_SORT_ORDER, context)
+ : null;
+ }
+
+ private void setFilterColumnsList(int type) {
+ List<String> columns = mFilterColumns;
+ columns.clear();
+
+ if ((type & TYPE_RINGTONE) != 0) {
+ columns.add(MediaStore.Audio.AudioColumns.IS_RINGTONE);
+ }
+
+ if ((type & TYPE_NOTIFICATION) != 0) {
+ columns.add(MediaStore.Audio.AudioColumns.IS_NOTIFICATION);
+ }
+
+ if ((type & TYPE_ALARM) != 0) {
+ columns.add(MediaStore.Audio.AudioColumns.IS_ALARM);
+ }
+ }
+
+ /**
+ * Constructs a where clause that consists of at least one column being 1
+ * (true). This is used to find all matching sounds for the given sound
+ * types (ringtone, notifications, etc.)
+ *
+ * @param columns The columns that must be true.
+ * @return The where clause.
+ */
+ private static String constructBooleanTrueWhereClause(List<String> columns) {
+
+ if (columns == null) return null;
+
+ StringBuilder sb = new StringBuilder();
+ sb.append("(");
+
+ for (int i = columns.size() - 1; i >= 0; i--) {
+ sb.append(columns.get(i)).append("=1 or ");
+ }
+
+ if (columns.size() > 0) {
+ // Remove last ' or '
+ sb.setLength(sb.length() - 4);
+ }
+
+ sb.append(")");
+
+ return sb.toString();
+ }
+
+ private Cursor query(Uri uri,
+ String[] projection,
+ String selection,
+ String[] selectionArgs,
+ String sortOrder) {
+ return query(uri, projection, selection, selectionArgs, sortOrder, mContext);
+ }
+
+ private Cursor query(Uri uri,
+ String[] projection,
+ String selection,
+ String[] selectionArgs,
+ String sortOrder,
+ Context context) {
+ if (mActivity != null) {
+ return mActivity.managedQuery(uri, projection, selection, selectionArgs, sortOrder);
+ } else {
+ return context.getContentResolver().query(uri, projection, selection, selectionArgs,
+ sortOrder);
+ }
+ }
+
+ /**
+ * Returns a {@link Ringtone} for a given sound URI.
+ * <p>
+ * If the given URI cannot be opened for any reason, this method will
+ * attempt to fallback on another sound. If it cannot find any, it will
+ * return null.
+ *
+ * @param context A context used to query.
+ * @param ringtoneUri The {@link Uri} of a sound or ringtone.
+ * @return A {@link Ringtone} for the given URI, or null.
+ */
+ public static Ringtone getRingtone(final Context context, Uri ringtoneUri) {
+ // Don't set the stream type
+ return getRingtone(context, ringtoneUri, -1);
+ }
+
+ //FIXME bypass the notion of stream types within the class
+ /**
+ * Returns a {@link Ringtone} for a given sound URI on the given stream
+ * type. Normally, if you change the stream type on the returned
+ * {@link Ringtone}, it will re-create the {@link MediaPlayer}. This is just
+ * an optimized route to avoid that.
+ *
+ * @param streamType The stream type for the ringtone, or -1 if it should
+ * not be set (and the default used instead).
+ * @see #getRingtone(Context, Uri)
+ */
+ private static Ringtone getRingtone(final Context context, Uri ringtoneUri, int streamType) {
+ try {
+ final Ringtone r = new Ringtone(context, true);
+ if (streamType >= 0) {
+ //FIXME deprecated call
+ r.setStreamType(streamType);
+ }
+ r.setUri(ringtoneUri);
+ return r;
+ } catch (Exception ex) {
+ Log.e(TAG, "Failed to open ringtone " + ringtoneUri + ": " + ex);
+ }
+
+ return null;
+ }
+
+ /**
+ * Look up the path for a given {@link Uri} referring to a ringtone sound (TYPE_RINGTONE,
+ * TYPE_NOTIFICATION, or TYPE_ALARM). This is saved in {@link MediaStore.Audio.Media#DATA}.
+ *
+ * @return a {@link File} pointing at the location of the {@param uri} on disk, or {@code null}
+ * if there is no such file.
+ */
+ private File getRingtonePathFromUri(Uri uri) {
+ // Query cursor to get ringtone path
+ final String[] projection = {MediaStore.Audio.Media.DATA};
+ setFilterColumnsList(TYPE_RINGTONE | TYPE_NOTIFICATION | TYPE_ALARM);
+
+ String path = null;
+ try (Cursor cursor = query(uri, projection, constructBooleanTrueWhereClause(mFilterColumns),
+ null, null)) {
+ if (cursor != null && cursor.moveToFirst()) {
+ path = cursor.getString(cursor.getColumnIndex(MediaStore.Audio.Media.DATA));
+ }
+ }
+ return path != null ? new File(path) : null;
+ }
+
+ /**
+ * Disables Settings.System.SYNC_PARENT_SOUNDS.
+ *
+ * @hide
+ */
+ public static void disableSyncFromParent(Context userContext) {
+ IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE);
+ IAudioService audioService = IAudioService.Stub.asInterface(b);
+ try {
+ audioService.disableRingtoneSync(userContext.getUserId());
+ } catch (RemoteException e) {
+ Log.e(TAG, "Unable to disable ringtone sync.");
+ }
+ }
+
+ /**
+ * Enables Settings.System.SYNC_PARENT_SOUNDS for the content's user
+ *
+ * @hide
+ */
+ @RequiresPermission(Manifest.permission.WRITE_SECURE_SETTINGS)
+ public static void enableSyncFromParent(Context userContext) {
+ Settings.Secure.putIntForUser(userContext.getContentResolver(),
+ Settings.Secure.SYNC_PARENT_SOUNDS, 1 /* true */, userContext.getUserId());
+ }
+
+ /**
+ * Gets the current default sound's {@link Uri}. This will give the actual
+ * sound {@link Uri}, instead of using this, most clients can use
+ * {@link System#DEFAULT_RINGTONE_URI}.
+ *
+ * @param context A context used for querying.
+ * @param type The type whose default sound should be returned. One of
+ * {@link #TYPE_RINGTONE}, {@link #TYPE_NOTIFICATION}, or
+ * {@link #TYPE_ALARM}.
+ * @return A {@link Uri} pointing to the default sound for the sound type.
+ * @see #setActualDefaultRingtoneUri(Context, int, Uri)
+ */
+ public static Uri getActualDefaultRingtoneUri(Context context, int type) {
+ String setting = getSettingForType(type);
+ if (setting == null) return null;
+ final String uriString = Settings.System.getStringForUser(context.getContentResolver(),
+ setting, context.getUserId());
+ Uri ringtoneUri = uriString != null ? Uri.parse(uriString) : null;
+
+ // If this doesn't verify, the user id must be kept in the uri to ensure it resolves in the
+ // correct user storage
+ if (ringtoneUri != null
+ && ContentProvider.getUserIdFromUri(ringtoneUri) == context.getUserId()) {
+ ringtoneUri = ContentProvider.getUriWithoutUserId(ringtoneUri);
+ }
+
+ return ringtoneUri;
+ }
+
+ /**
+ * Sets the {@link Uri} of the default sound for a given sound type.
+ *
+ * @param context A context used for querying.
+ * @param type The type whose default sound should be set. One of
+ * {@link #TYPE_RINGTONE}, {@link #TYPE_NOTIFICATION}, or
+ * {@link #TYPE_ALARM}.
+ * @param ringtoneUri A {@link Uri} pointing to the default sound to set.
+ * @see #getActualDefaultRingtoneUri(Context, int)
+ */
+ public static void setActualDefaultRingtoneUri(Context context, int type, Uri ringtoneUri) {
+ String setting = getSettingForType(type);
+ if (setting == null) return;
+
+ final ContentResolver resolver = context.getContentResolver();
+ if (Settings.Secure.getIntForUser(resolver, Settings.Secure.SYNC_PARENT_SOUNDS, 0,
+ context.getUserId()) == 1) {
+ // Parent sound override is enabled. Disable it using the audio service.
+ disableSyncFromParent(context);
+ }
+ if(!isInternalRingtoneUri(ringtoneUri)) {
+ ringtoneUri = ContentProvider.maybeAddUserId(ringtoneUri, context.getUserId());
+ }
+ Settings.System.putStringForUser(resolver, setting,
+ ringtoneUri != null ? ringtoneUri.toString() : null, context.getUserId());
+
+ // Stream selected ringtone into cache so it's available for playback
+ // when CE storage is still locked
+ if (ringtoneUri != null) {
+ final Uri cacheUri = getCacheForType(type, context.getUserId());
+ try (InputStream in = openRingtone(context, ringtoneUri);
+ OutputStream out = resolver.openOutputStream(cacheUri)) {
+ Streams.copy(in, out);
+ } catch (IOException e) {
+ Log.w(TAG, "Failed to cache ringtone: " + e);
+ }
+ }
+ }
+
+ private static boolean isInternalRingtoneUri(Uri uri) {
+ return isRingtoneUriInStorage(uri, MediaStore.Audio.Media.INTERNAL_CONTENT_URI);
+ }
+
+ private static boolean isExternalRingtoneUri(Uri uri) {
+ return isRingtoneUriInStorage(uri, MediaStore.Audio.Media.EXTERNAL_CONTENT_URI);
+ }
+
+ private static boolean isRingtoneUriInStorage(Uri ringtone, Uri storage) {
+ Uri uriWithoutUserId = ContentProvider.getUriWithoutUserId(ringtone);
+ return uriWithoutUserId == null ? false
+ : uriWithoutUserId.toString().startsWith(storage.toString());
+ }
+
+ /** @hide */
+ public boolean isCustomRingtone(Uri uri) {
+ if(!isExternalRingtoneUri(uri)) {
+ // A custom ringtone would be in the external storage
+ return false;
+ }
+
+ final File ringtoneFile = (uri == null ? null : getRingtonePathFromUri(uri));
+ final File parent = (ringtoneFile == null ? null : ringtoneFile.getParentFile());
+ if (parent == null) {
+ return false;
+ }
+
+ final String[] directories = {
+ Environment.DIRECTORY_RINGTONES,
+ Environment.DIRECTORY_NOTIFICATIONS,
+ Environment.DIRECTORY_ALARMS
+ };
+ for (final String directory : directories) {
+ if (parent.equals(Environment.getExternalStoragePublicDirectory(directory))) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Adds an audio file to the list of ringtones.
+ *
+ * After making sure the given file is an audio file, copies the file to the ringtone storage,
+ * and asks the {@link android.media.MediaScanner} to scan that file. This call will block until
+ * the scan is completed.
+ *
+ * The directory where the copied file is stored is the directory that matches the ringtone's
+ * type, which is one of: {@link android.is.Environment#DIRECTORY_RINGTONES};
+ * {@link android.is.Environment#DIRECTORY_NOTIFICATIONS};
+ * {@link android.is.Environment#DIRECTORY_ALARMS}.
+ *
+ * This does not allow modifying the type of an existing ringtone file. To change type, use the
+ * APIs in {@link android.content.ContentResolver} to update the corresponding columns.
+ *
+ * @param fileUri Uri of the file to be added as ringtone. Must be a media file.
+ * @param type The type of the ringtone to be added. Must be one of {@link #TYPE_RINGTONE},
+ * {@link #TYPE_NOTIFICATION}, or {@link #TYPE_ALARM}.
+ *
+ * @return The Uri of the installed ringtone, which may be the Uri of {@param fileUri} if it is
+ * already in ringtone storage.
+ *
+ * @throws FileNotFoundexception if an appropriate unique filename to save the new ringtone file
+ * as cannot be found, for example if the unique name is too long.
+ * @throws IllegalArgumentException if {@param fileUri} does not point to an existing audio
+ * file, or if the {@param type} is not one of the accepted ringtone types.
+ * @throws IOException if the audio file failed to copy to ringtone storage; for example, if
+ * external storage was not available, or if the file was copied but the media scanner
+ * did not recognize it as a ringtone.
+ *
+ * @hide
+ */
+ @WorkerThread
+ public Uri addCustomExternalRingtone(@NonNull final Uri fileUri, final int type)
+ throws FileNotFoundException, IllegalArgumentException, IOException {
+ if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) {
+ throw new IOException("External storage is not mounted. Unable to install ringtones.");
+ }
+
+ // Sanity-check: are we actually being asked to install an audio file?
+ final String mimeType = mContext.getContentResolver().getType(fileUri);
+ if(mimeType == null ||
+ !(mimeType.startsWith("audio/") || mimeType.equals("application/ogg"))) {
+ throw new IllegalArgumentException("Ringtone file must have MIME type \"audio/*\"."
+ + " Given file has MIME type \"" + mimeType + "\"");
+ }
+
+ // Choose a directory to save the ringtone. Only one type of installation at a time is
+ // allowed. Throws IllegalArgumentException if anything else is given.
+ final String subdirectory = getExternalDirectoryForType(type);
+
+ // Find a filename. Throws FileNotFoundException if none can be found.
+ final File outFile = Utils.getUniqueExternalFile(mContext, subdirectory,
+ Utils.getFileDisplayNameFromUri(mContext, fileUri), mimeType);
+
+ // Copy contents to external ringtone storage. Throws IOException if the copy fails.
+ try (final InputStream input = mContext.getContentResolver().openInputStream(fileUri);
+ final OutputStream output = new FileOutputStream(outFile)) {
+ Streams.copy(input, output);
+ }
+
+ // Tell MediaScanner about the new file. Wait for it to assign a {@link Uri}.
+ try (NewRingtoneScanner scanner = new NewRingtoneScanner(outFile)) {
+ return scanner.take();
+ } catch (InterruptedException e) {
+ throw new IOException("Audio file failed to scan as a ringtone", e);
+ }
+ }
+
+ private static final String getExternalDirectoryForType(final int type) {
+ switch (type) {
+ case TYPE_RINGTONE:
+ return Environment.DIRECTORY_RINGTONES;
+ case TYPE_NOTIFICATION:
+ return Environment.DIRECTORY_NOTIFICATIONS;
+ case TYPE_ALARM:
+ return Environment.DIRECTORY_ALARMS;
+ default:
+ throw new IllegalArgumentException("Unsupported ringtone type: " + type);
+ }
+ }
+
+ /**
+ * Deletes the actual file in the Uri and its ringtone database entry if the Uri's actual path
+ * is in one of the following directories: {@link android.is.Environment#DIRECTORY_RINGTONES},
+ * {@link android.is.Environment#DIRECTORY_NOTIFICATIONS} or
+ * {@link android.is.Environment#DIRECTORY_ALARMS}.
+ *
+ * The given Uri must be a ringtone Content Uri.
+ *
+ * Keep in mind that if the ringtone deleted is a default ringtone, it will still live in the
+ * ringtone cache file so it will be playable from there. However, if an app uses the ringtone
+ * as its own ringtone, it won't be played, which is the same behavior observed for 3rd party
+ * custom ringtones.
+ *
+ * @hide
+ */
+ public boolean deleteExternalRingtone(Uri uri) {
+ if(!isCustomRingtone(uri)) {
+ // We can only delete custom ringtones in the default ringtone storages
+ return false;
+ }
+
+ // Save the path of the ringtone before deleting from our content resolver.
+ final File ringtoneFile = getRingtonePathFromUri(uri);
+ try {
+ if (ringtoneFile != null && mContext.getContentResolver().delete(uri, null, null) > 0) {
+ return ringtoneFile.delete();
+ }
+ } catch (SecurityException e) {
+ Log.d(TAG, "Unable to delete custom ringtone", e);
+ }
+ return false;
+ }
+
+ /**
+ * Try opening the given ringtone locally first, but failover to
+ * {@link IRingtonePlayer} if we can't access it directly. Typically happens
+ * when process doesn't hold
+ * {@link android.Manifest.permission#READ_EXTERNAL_STORAGE}.
+ */
+ private static InputStream openRingtone(Context context, Uri uri) throws IOException {
+ final ContentResolver resolver = context.getContentResolver();
+ try {
+ return resolver.openInputStream(uri);
+ } catch (SecurityException | IOException e) {
+ Log.w(TAG, "Failed to open directly; attempting failover: " + e);
+ final IRingtonePlayer player = context.getSystemService(AudioManager.class)
+ .getRingtonePlayer();
+ try {
+ return new ParcelFileDescriptor.AutoCloseInputStream(player.openRingtone(uri));
+ } catch (Exception e2) {
+ throw new IOException(e2);
+ }
+ }
+ }
+
+ private static String getSettingForType(int type) {
+ if ((type & TYPE_RINGTONE) != 0) {
+ return Settings.System.RINGTONE;
+ } else if ((type & TYPE_NOTIFICATION) != 0) {
+ return Settings.System.NOTIFICATION_SOUND;
+ } else if ((type & TYPE_ALARM) != 0) {
+ return Settings.System.ALARM_ALERT;
+ } else {
+ return null;
+ }
+ }
+
+ /** {@hide} */
+ public static Uri getCacheForType(int type) {
+ return getCacheForType(type, UserHandle.getCallingUserId());
+ }
+
+ /** {@hide} */
+ public static Uri getCacheForType(int type, int userId) {
+ if ((type & TYPE_RINGTONE) != 0) {
+ return ContentProvider.maybeAddUserId(Settings.System.RINGTONE_CACHE_URI, userId);
+ } else if ((type & TYPE_NOTIFICATION) != 0) {
+ return ContentProvider.maybeAddUserId(Settings.System.NOTIFICATION_SOUND_CACHE_URI,
+ userId);
+ } else if ((type & TYPE_ALARM) != 0) {
+ return ContentProvider.maybeAddUserId(Settings.System.ALARM_ALERT_CACHE_URI, userId);
+ }
+ return null;
+ }
+
+ /**
+ * Returns whether the given {@link Uri} is one of the default ringtones.
+ *
+ * @param ringtoneUri The ringtone {@link Uri} to be checked.
+ * @return Whether the {@link Uri} is a default.
+ */
+ public static boolean isDefault(Uri ringtoneUri) {
+ return getDefaultType(ringtoneUri) != -1;
+ }
+
+ /**
+ * Returns the type of a default {@link Uri}.
+ *
+ * @param defaultRingtoneUri The default {@link Uri}. For example,
+ * {@link System#DEFAULT_RINGTONE_URI},
+ * {@link System#DEFAULT_NOTIFICATION_URI}, or
+ * {@link System#DEFAULT_ALARM_ALERT_URI}.
+ * @return The type of the defaultRingtoneUri, or -1.
+ */
+ public static int getDefaultType(Uri defaultRingtoneUri) {
+ defaultRingtoneUri = ContentProvider.getUriWithoutUserId(defaultRingtoneUri);
+ if (defaultRingtoneUri == null) {
+ return -1;
+ } else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_RINGTONE_URI)) {
+ return TYPE_RINGTONE;
+ } else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_NOTIFICATION_URI)) {
+ return TYPE_NOTIFICATION;
+ } else if (defaultRingtoneUri.equals(Settings.System.DEFAULT_ALARM_ALERT_URI)) {
+ return TYPE_ALARM;
+ } else {
+ return -1;
+ }
+ }
+
+ /**
+ * Returns the {@link Uri} for the default ringtone of a particular type.
+ * Rather than returning the actual ringtone's sound {@link Uri}, this will
+ * return the symbolic {@link Uri} which will resolved to the actual sound
+ * when played.
+ *
+ * @param type The ringtone type whose default should be returned.
+ * @return The {@link Uri} of the default ringtone for the given type.
+ */
+ public static Uri getDefaultUri(int type) {
+ if ((type & TYPE_RINGTONE) != 0) {
+ return Settings.System.DEFAULT_RINGTONE_URI;
+ } else if ((type & TYPE_NOTIFICATION) != 0) {
+ return Settings.System.DEFAULT_NOTIFICATION_URI;
+ } else if ((type & TYPE_ALARM) != 0) {
+ return Settings.System.DEFAULT_ALARM_ALERT_URI;
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Creates a {@link android.media.MediaScannerConnection} to scan a ringtone file and add its
+ * information to the internal database.
+ *
+ * It uses a {@link java.util.concurrent.LinkedBlockingQueue} so that the caller can block until
+ * the scan is completed.
+ */
+ private class NewRingtoneScanner implements Closeable, MediaScannerConnectionClient {
+ private MediaScannerConnection mMediaScannerConnection;
+ private File mFile;
+ private LinkedBlockingQueue<Uri> mQueue = new LinkedBlockingQueue<>(1);
+
+ public NewRingtoneScanner(File file) {
+ mFile = file;
+ mMediaScannerConnection = new MediaScannerConnection(mContext, this);
+ mMediaScannerConnection.connect();
+ }
+
+ @Override
+ public void close() {
+ mMediaScannerConnection.disconnect();
+ }
+
+ @Override
+ public void onMediaScannerConnected() {
+ mMediaScannerConnection.scanFile(mFile.getAbsolutePath(), null);
+ }
+
+ @Override
+ public void onScanCompleted(String path, Uri uri) {
+ if (uri == null) {
+ // There was some issue with scanning. Delete the copied file so it is not oprhaned.
+ mFile.delete();
+ return;
+ }
+ try {
+ mQueue.put(uri);
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Unable to put new ringtone Uri in queue", e);
+ }
+ }
+
+ public Uri take() throws InterruptedException {
+ return mQueue.take();
+ }
+ }
+
+ /**
+ * Attempts to create a context for the given user.
+ *
+ * @return created context, or null if package does not exist
+ * @hide
+ */
+ private static Context createPackageContextAsUser(Context context, int userId) {
+ try {
+ return context.createPackageContextAsUser(context.getPackageName(), 0 /* flags */,
+ UserHandle.of(userId));
+ } catch (NameNotFoundException e) {
+ Log.e(TAG, "Unable to create package context", e);
+ return null;
+ }
+ }
+}
diff --git a/android/media/SRTRenderer.java b/android/media/SRTRenderer.java
new file mode 100644
index 00000000..a3e2abda
--- /dev/null
+++ b/android/media/SRTRenderer.java
@@ -0,0 +1,202 @@
+package android.media;
+
+import android.content.Context;
+import android.media.SubtitleController.Renderer;
+import android.os.Handler;
+import android.os.Message;
+import android.os.Parcel;
+import android.util.Log;
+
+import java.io.BufferedReader;
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.UnsupportedEncodingException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Vector;
+
+/** @hide */
+public class SRTRenderer extends Renderer {
+ private final Context mContext;
+ private final boolean mRender;
+ private final Handler mEventHandler;
+
+ private WebVttRenderingWidget mRenderingWidget;
+
+ public SRTRenderer(Context context) {
+ this(context, null);
+ }
+
+ SRTRenderer(Context mContext, Handler mEventHandler) {
+ this.mContext = mContext;
+ this.mRender = (mEventHandler == null);
+ this.mEventHandler = mEventHandler;
+ }
+
+ @Override
+ public boolean supports(MediaFormat format) {
+ if (format.containsKey(MediaFormat.KEY_MIME)) {
+ if (!format.getString(MediaFormat.KEY_MIME)
+ .equals(MediaPlayer.MEDIA_MIMETYPE_TEXT_SUBRIP)) {
+ return false;
+ };
+ return mRender == (format.getInteger(MediaFormat.KEY_IS_TIMED_TEXT, 0) == 0);
+ }
+ return false;
+ }
+
+ @Override
+ public SubtitleTrack createTrack(MediaFormat format) {
+ if (mRender && mRenderingWidget == null) {
+ mRenderingWidget = new WebVttRenderingWidget(mContext);
+ }
+
+ if (mRender) {
+ return new SRTTrack(mRenderingWidget, format);
+ } else {
+ return new SRTTrack(mEventHandler, format);
+ }
+ }
+}
+
+class SRTTrack extends WebVttTrack {
+ private static final int MEDIA_TIMED_TEXT = 99; // MediaPlayer.MEDIA_TIMED_TEXT
+ private static final int KEY_STRUCT_TEXT = 16; // TimedText.KEY_STRUCT_TEXT
+ private static final int KEY_START_TIME = 7; // TimedText.KEY_START_TIME
+ private static final int KEY_LOCAL_SETTING = 102; // TimedText.KEY_START_TIME
+
+ private static final String TAG = "SRTTrack";
+ private final Handler mEventHandler;
+
+ SRTTrack(WebVttRenderingWidget renderingWidget, MediaFormat format) {
+ super(renderingWidget, format);
+ mEventHandler = null;
+ }
+
+ SRTTrack(Handler eventHandler, MediaFormat format) {
+ super(null, format);
+ mEventHandler = eventHandler;
+ }
+
+ @Override
+ protected void onData(SubtitleData data) {
+ try {
+ TextTrackCue cue = new TextTrackCue();
+ cue.mStartTimeMs = data.getStartTimeUs() / 1000;
+ cue.mEndTimeMs = (data.getStartTimeUs() + data.getDurationUs()) / 1000;
+
+ String paragraph;
+ paragraph = new String(data.getData(), "UTF-8");
+ String[] lines = paragraph.split("\\r?\\n");
+ cue.mLines = new TextTrackCueSpan[lines.length][];
+
+ int i = 0;
+ for (String line : lines) {
+ TextTrackCueSpan[] span = new TextTrackCueSpan[] {
+ new TextTrackCueSpan(line, -1)
+ };
+ cue.mLines[i++] = span;
+ }
+
+ addCue(cue);
+ } catch (UnsupportedEncodingException e) {
+ Log.w(TAG, "subtitle data is not UTF-8 encoded: " + e);
+ }
+ }
+
+ @Override
+ public void onData(byte[] data, boolean eos, long runID) {
+ // TODO make reentrant
+ try {
+ Reader r = new InputStreamReader(new ByteArrayInputStream(data), "UTF-8");
+ BufferedReader br = new BufferedReader(r);
+
+ String header;
+ while ((header = br.readLine()) != null) {
+ // discard subtitle number
+ header = br.readLine();
+ if (header == null) {
+ break;
+ }
+
+ TextTrackCue cue = new TextTrackCue();
+ String[] startEnd = header.split("-->");
+ cue.mStartTimeMs = parseMs(startEnd[0]);
+ cue.mEndTimeMs = parseMs(startEnd[1]);
+
+ String s;
+ List<String> paragraph = new ArrayList<String>();
+ while (!((s = br.readLine()) == null || s.trim().equals(""))) {
+ paragraph.add(s);
+ }
+
+ int i = 0;
+ cue.mLines = new TextTrackCueSpan[paragraph.size()][];
+ cue.mStrings = paragraph.toArray(new String[0]);
+ for (String line : paragraph) {
+ TextTrackCueSpan[] span = new TextTrackCueSpan[] {
+ new TextTrackCueSpan(line, -1)
+ };
+ cue.mStrings[i] = line;
+ cue.mLines[i++] = span;
+ }
+
+ addCue(cue);
+ }
+
+ } catch (UnsupportedEncodingException e) {
+ Log.w(TAG, "subtitle data is not UTF-8 encoded: " + e);
+ } catch (IOException ioe) {
+ // shouldn't happen
+ Log.e(TAG, ioe.getMessage(), ioe);
+ }
+ }
+
+ @Override
+ public void updateView(Vector<Cue> activeCues) {
+ if (getRenderingWidget() != null) {
+ super.updateView(activeCues);
+ return;
+ }
+
+ if (mEventHandler == null) {
+ return;
+ }
+
+ for (Cue cue : activeCues) {
+ TextTrackCue ttc = (TextTrackCue) cue;
+
+ Parcel parcel = Parcel.obtain();
+ parcel.writeInt(KEY_LOCAL_SETTING);
+ parcel.writeInt(KEY_START_TIME);
+ parcel.writeInt((int) cue.mStartTimeMs);
+
+ parcel.writeInt(KEY_STRUCT_TEXT);
+ StringBuilder sb = new StringBuilder();
+ for (String line : ttc.mStrings) {
+ sb.append(line).append('\n');
+ }
+
+ byte[] buf = sb.toString().getBytes();
+ parcel.writeInt(buf.length);
+ parcel.writeByteArray(buf);
+
+ Message msg = mEventHandler.obtainMessage(MEDIA_TIMED_TEXT, 0 /* arg1 */, 0 /* arg2 */,
+ parcel);
+ mEventHandler.sendMessage(msg);
+ }
+ activeCues.clear();
+ }
+
+ private static long parseMs(String in) {
+ long hours = Long.parseLong(in.split(":")[0].trim());
+ long minutes = Long.parseLong(in.split(":")[1].trim());
+ long seconds = Long.parseLong(in.split(":")[2].split(",")[0].trim());
+ long millies = Long.parseLong(in.split(":")[2].split(",")[1].trim());
+
+ return hours * 60 * 60 * 1000 + minutes * 60 * 1000 + seconds * 1000 + millies;
+
+ }
+}
diff --git a/android/media/SoundPool.java b/android/media/SoundPool.java
new file mode 100644
index 00000000..26e65dda
--- /dev/null
+++ b/android/media/SoundPool.java
@@ -0,0 +1,615 @@
+/*
+ * Copyright (C) 2007 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.lang.ref.WeakReference;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.app.ActivityThread;
+import android.app.AppOpsManager;
+import android.content.Context;
+import android.content.res.AssetFileDescriptor;
+import android.media.PlayerBase;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.ParcelFileDescriptor;
+import android.os.Process;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.AndroidRuntimeException;
+import android.util.Log;
+
+
+/**
+ * The SoundPool class manages and plays audio resources for applications.
+ *
+ * <p>A SoundPool is a collection of samples that can be loaded into memory
+ * from a resource inside the APK or from a file in the file system. The
+ * SoundPool library uses the MediaPlayer service to decode the audio
+ * into a raw 16-bit PCM mono or stereo stream. This allows applications
+ * to ship with compressed streams without having to suffer the CPU load
+ * and latency of decompressing during playback.</p>
+ *
+ * <p>In addition to low-latency playback, SoundPool can also manage the number
+ * of audio streams being rendered at once. When the SoundPool object is
+ * constructed, the maxStreams parameter sets the maximum number of streams
+ * that can be played at a time from this single SoundPool. SoundPool tracks
+ * the number of active streams. If the maximum number of streams is exceeded,
+ * SoundPool will automatically stop a previously playing stream based first
+ * on priority and then by age within that priority. Limiting the maximum
+ * number of streams helps to cap CPU loading and reducing the likelihood that
+ * audio mixing will impact visuals or UI performance.</p>
+ *
+ * <p>Sounds can be looped by setting a non-zero loop value. A value of -1
+ * causes the sound to loop forever. In this case, the application must
+ * explicitly call the stop() function to stop the sound. Any other non-zero
+ * value will cause the sound to repeat the specified number of times, e.g.
+ * a value of 3 causes the sound to play a total of 4 times.</p>
+ *
+ * <p>The playback rate can also be changed. A playback rate of 1.0 causes
+ * the sound to play at its original frequency (resampled, if necessary,
+ * to the hardware output frequency). A playback rate of 2.0 causes the
+ * sound to play at twice its original frequency, and a playback rate of
+ * 0.5 causes it to play at half its original frequency. The playback
+ * rate range is 0.5 to 2.0.</p>
+ *
+ * <p>Priority runs low to high, i.e. higher numbers are higher priority.
+ * Priority is used when a call to play() would cause the number of active
+ * streams to exceed the value established by the maxStreams parameter when
+ * the SoundPool was created. In this case, the stream allocator will stop
+ * the lowest priority stream. If there are multiple streams with the same
+ * low priority, it will choose the oldest stream to stop. In the case
+ * where the priority of the new stream is lower than all the active
+ * streams, the new sound will not play and the play() function will return
+ * a streamID of zero.</p>
+ *
+ * <p>Let's examine a typical use case: A game consists of several levels of
+ * play. For each level, there is a set of unique sounds that are used only
+ * by that level. In this case, the game logic should create a new SoundPool
+ * object when the first level is loaded. The level data itself might contain
+ * the list of sounds to be used by this level. The loading logic iterates
+ * through the list of sounds calling the appropriate SoundPool.load()
+ * function. This should typically be done early in the process to allow time
+ * for decompressing the audio to raw PCM format before they are needed for
+ * playback.</p>
+ *
+ * <p>Once the sounds are loaded and play has started, the application can
+ * trigger sounds by calling SoundPool.play(). Playing streams can be
+ * paused or resumed, and the application can also alter the pitch by
+ * adjusting the playback rate in real-time for doppler or synthesis
+ * effects.</p>
+ *
+ * <p>Note that since streams can be stopped due to resource constraints, the
+ * streamID is a reference to a particular instance of a stream. If the stream
+ * is stopped to allow a higher priority stream to play, the stream is no
+ * longer valid. However, the application is allowed to call methods on
+ * the streamID without error. This may help simplify program logic since
+ * the application need not concern itself with the stream lifecycle.</p>
+ *
+ * <p>In our example, when the player has completed the level, the game
+ * logic should call SoundPool.release() to release all the native resources
+ * in use and then set the SoundPool reference to null. If the player starts
+ * another level, a new SoundPool is created, sounds are loaded, and play
+ * resumes.</p>
+ */
+public class SoundPool extends PlayerBase {
+ static { System.loadLibrary("soundpool"); }
+
+ // SoundPool messages
+ //
+ // must match SoundPool.h
+ private static final int SAMPLE_LOADED = 1;
+
+ private final static String TAG = "SoundPool";
+ private final static boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+
+ private long mNativeContext; // accessed by native methods
+
+ private EventHandler mEventHandler;
+ private SoundPool.OnLoadCompleteListener mOnLoadCompleteListener;
+ private boolean mHasAppOpsPlayAudio;
+
+ private final Object mLock;
+ private final AudioAttributes mAttributes;
+
+ /**
+ * Constructor. Constructs a SoundPool object with the following
+ * characteristics:
+ *
+ * @param maxStreams the maximum number of simultaneous streams for this
+ * SoundPool object
+ * @param streamType the audio stream type as described in AudioManager
+ * For example, game applications will normally use
+ * {@link AudioManager#STREAM_MUSIC}.
+ * @param srcQuality the sample-rate converter quality. Currently has no
+ * effect. Use 0 for the default.
+ * @return a SoundPool object, or null if creation failed
+ * @deprecated use {@link SoundPool.Builder} instead to create and configure a
+ * SoundPool instance
+ */
+ public SoundPool(int maxStreams, int streamType, int srcQuality) {
+ this(maxStreams,
+ new AudioAttributes.Builder().setInternalLegacyStreamType(streamType).build());
+ PlayerBase.deprecateStreamTypeForPlayback(streamType, "SoundPool", "SoundPool()");
+ }
+
+ private SoundPool(int maxStreams, AudioAttributes attributes) {
+ super(attributes, AudioPlaybackConfiguration.PLAYER_TYPE_JAM_SOUNDPOOL);
+
+ // do native setup
+ if (native_setup(new WeakReference<SoundPool>(this), maxStreams, attributes) != 0) {
+ throw new RuntimeException("Native setup failed");
+ }
+ mLock = new Object();
+ mAttributes = attributes;
+
+ baseRegisterPlayer();
+ }
+
+ /**
+ * Release the SoundPool resources.
+ *
+ * Release all memory and native resources used by the SoundPool
+ * object. The SoundPool can no longer be used and the reference
+ * should be set to null.
+ */
+ public final void release() {
+ baseRelease();
+ native_release();
+ }
+
+ private native final void native_release();
+
+ protected void finalize() { release(); }
+
+ /**
+ * Load the sound from the specified path.
+ *
+ * @param path the path to the audio file
+ * @param priority the priority of the sound. Currently has no effect. Use
+ * a value of 1 for future compatibility.
+ * @return a sound ID. This value can be used to play or unload the sound.
+ */
+ public int load(String path, int priority) {
+ int id = 0;
+ try {
+ File f = new File(path);
+ ParcelFileDescriptor fd = ParcelFileDescriptor.open(f,
+ ParcelFileDescriptor.MODE_READ_ONLY);
+ if (fd != null) {
+ id = _load(fd.getFileDescriptor(), 0, f.length(), priority);
+ fd.close();
+ }
+ } catch (java.io.IOException e) {
+ Log.e(TAG, "error loading " + path);
+ }
+ return id;
+ }
+
+ /**
+ * Load the sound from the specified APK resource.
+ *
+ * Note that the extension is dropped. For example, if you want to load
+ * a sound from the raw resource file "explosion.mp3", you would specify
+ * "R.raw.explosion" as the resource ID. Note that this means you cannot
+ * have both an "explosion.wav" and an "explosion.mp3" in the res/raw
+ * directory.
+ *
+ * @param context the application context
+ * @param resId the resource ID
+ * @param priority the priority of the sound. Currently has no effect. Use
+ * a value of 1 for future compatibility.
+ * @return a sound ID. This value can be used to play or unload the sound.
+ */
+ public int load(Context context, int resId, int priority) {
+ AssetFileDescriptor afd = context.getResources().openRawResourceFd(resId);
+ int id = 0;
+ if (afd != null) {
+ id = _load(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength(), priority);
+ try {
+ afd.close();
+ } catch (java.io.IOException ex) {
+ //Log.d(TAG, "close failed:", ex);
+ }
+ }
+ return id;
+ }
+
+ /**
+ * Load the sound from an asset file descriptor.
+ *
+ * @param afd an asset file descriptor
+ * @param priority the priority of the sound. Currently has no effect. Use
+ * a value of 1 for future compatibility.
+ * @return a sound ID. This value can be used to play or unload the sound.
+ */
+ public int load(AssetFileDescriptor afd, int priority) {
+ if (afd != null) {
+ long len = afd.getLength();
+ if (len < 0) {
+ throw new AndroidRuntimeException("no length for fd");
+ }
+ return _load(afd.getFileDescriptor(), afd.getStartOffset(), len, priority);
+ } else {
+ return 0;
+ }
+ }
+
+ /**
+ * Load the sound from a FileDescriptor.
+ *
+ * This version is useful if you store multiple sounds in a single
+ * binary. The offset specifies the offset from the start of the file
+ * and the length specifies the length of the sound within the file.
+ *
+ * @param fd a FileDescriptor object
+ * @param offset offset to the start of the sound
+ * @param length length of the sound
+ * @param priority the priority of the sound. Currently has no effect. Use
+ * a value of 1 for future compatibility.
+ * @return a sound ID. This value can be used to play or unload the sound.
+ */
+ public int load(FileDescriptor fd, long offset, long length, int priority) {
+ return _load(fd, offset, length, priority);
+ }
+
+ /**
+ * Unload a sound from a sound ID.
+ *
+ * Unloads the sound specified by the soundID. This is the value
+ * returned by the load() function. Returns true if the sound is
+ * successfully unloaded, false if the sound was already unloaded.
+ *
+ * @param soundID a soundID returned by the load() function
+ * @return true if just unloaded, false if previously unloaded
+ */
+ public native final boolean unload(int soundID);
+
+ /**
+ * Play a sound from a sound ID.
+ *
+ * Play the sound specified by the soundID. This is the value
+ * returned by the load() function. Returns a non-zero streamID
+ * if successful, zero if it fails. The streamID can be used to
+ * further control playback. Note that calling play() may cause
+ * another sound to stop playing if the maximum number of active
+ * streams is exceeded. A loop value of -1 means loop forever,
+ * a value of 0 means don't loop, other values indicate the
+ * number of repeats, e.g. a value of 1 plays the audio twice.
+ * The playback rate allows the application to vary the playback
+ * rate (pitch) of the sound. A value of 1.0 means play back at
+ * the original frequency. A value of 2.0 means play back twice
+ * as fast, and a value of 0.5 means playback at half speed.
+ *
+ * @param soundID a soundID returned by the load() function
+ * @param leftVolume left volume value (range = 0.0 to 1.0)
+ * @param rightVolume right volume value (range = 0.0 to 1.0)
+ * @param priority stream priority (0 = lowest priority)
+ * @param loop loop mode (0 = no loop, -1 = loop forever)
+ * @param rate playback rate (1.0 = normal playback, range 0.5 to 2.0)
+ * @return non-zero streamID if successful, zero if failed
+ */
+ public final int play(int soundID, float leftVolume, float rightVolume,
+ int priority, int loop, float rate) {
+ baseStart();
+ return _play(soundID, leftVolume, rightVolume, priority, loop, rate);
+ }
+
+ /**
+ * Pause a playback stream.
+ *
+ * Pause the stream specified by the streamID. This is the
+ * value returned by the play() function. If the stream is
+ * playing, it will be paused. If the stream is not playing
+ * (e.g. is stopped or was previously paused), calling this
+ * function will have no effect.
+ *
+ * @param streamID a streamID returned by the play() function
+ */
+ public native final void pause(int streamID);
+
+ /**
+ * Resume a playback stream.
+ *
+ * Resume the stream specified by the streamID. This
+ * is the value returned by the play() function. If the stream
+ * is paused, this will resume playback. If the stream was not
+ * previously paused, calling this function will have no effect.
+ *
+ * @param streamID a streamID returned by the play() function
+ */
+ public native final void resume(int streamID);
+
+ /**
+ * Pause all active streams.
+ *
+ * Pause all streams that are currently playing. This function
+ * iterates through all the active streams and pauses any that
+ * are playing. It also sets a flag so that any streams that
+ * are playing can be resumed by calling autoResume().
+ */
+ public native final void autoPause();
+
+ /**
+ * Resume all previously active streams.
+ *
+ * Automatically resumes all streams that were paused in previous
+ * calls to autoPause().
+ */
+ public native final void autoResume();
+
+ /**
+ * Stop a playback stream.
+ *
+ * Stop the stream specified by the streamID. This
+ * is the value returned by the play() function. If the stream
+ * is playing, it will be stopped. It also releases any native
+ * resources associated with this stream. If the stream is not
+ * playing, it will have no effect.
+ *
+ * @param streamID a streamID returned by the play() function
+ */
+ public native final void stop(int streamID);
+
+ /**
+ * Set stream volume.
+ *
+ * Sets the volume on the stream specified by the streamID.
+ * This is the value returned by the play() function. The
+ * value must be in the range of 0.0 to 1.0. If the stream does
+ * not exist, it will have no effect.
+ *
+ * @param streamID a streamID returned by the play() function
+ * @param leftVolume left volume value (range = 0.0 to 1.0)
+ * @param rightVolume right volume value (range = 0.0 to 1.0)
+ */
+ public final void setVolume(int streamID, float leftVolume, float rightVolume) {
+ // unlike other subclasses of PlayerBase, we are not calling
+ // baseSetVolume(leftVolume, rightVolume) as we need to keep track of each
+ // volume separately for each player, so we still send the command, but
+ // handle mute/unmute separately through playerSetVolume()
+ _setVolume(streamID, leftVolume, rightVolume);
+ }
+
+ @Override
+ /* package */ int playerApplyVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration,
+ @Nullable VolumeShaper.Operation operation) {
+ return -1;
+ }
+
+ @Override
+ /* package */ @Nullable VolumeShaper.State playerGetVolumeShaperState(int id) {
+ return null;
+ }
+
+ @Override
+ void playerSetVolume(boolean muting, float leftVolume, float rightVolume) {
+ // not used here to control the player volume directly, but used to mute/unmute
+ _mute(muting);
+ }
+
+ @Override
+ int playerSetAuxEffectSendLevel(boolean muting, float level) {
+ // no aux send functionality so no-op
+ return AudioSystem.SUCCESS;
+ }
+
+ @Override
+ void playerStart() {
+ // FIXME implement resuming any paused sound
+ }
+
+ @Override
+ void playerPause() {
+ // FIXME implement pausing any playing sound
+ }
+
+ @Override
+ void playerStop() {
+ // FIXME implement pausing any playing sound
+ }
+
+ /**
+ * Similar, except set volume of all channels to same value.
+ * @hide
+ */
+ public void setVolume(int streamID, float volume) {
+ setVolume(streamID, volume, volume);
+ }
+
+ /**
+ * Change stream priority.
+ *
+ * Change the priority of the stream specified by the streamID.
+ * This is the value returned by the play() function. Affects the
+ * order in which streams are re-used to play new sounds. If the
+ * stream does not exist, it will have no effect.
+ *
+ * @param streamID a streamID returned by the play() function
+ */
+ public native final void setPriority(int streamID, int priority);
+
+ /**
+ * Set loop mode.
+ *
+ * Change the loop mode. A loop value of -1 means loop forever,
+ * a value of 0 means don't loop, other values indicate the
+ * number of repeats, e.g. a value of 1 plays the audio twice.
+ * If the stream does not exist, it will have no effect.
+ *
+ * @param streamID a streamID returned by the play() function
+ * @param loop loop mode (0 = no loop, -1 = loop forever)
+ */
+ public native final void setLoop(int streamID, int loop);
+
+ /**
+ * Change playback rate.
+ *
+ * The playback rate allows the application to vary the playback
+ * rate (pitch) of the sound. A value of 1.0 means playback at
+ * the original frequency. A value of 2.0 means playback twice
+ * as fast, and a value of 0.5 means playback at half speed.
+ * If the stream does not exist, it will have no effect.
+ *
+ * @param streamID a streamID returned by the play() function
+ * @param rate playback rate (1.0 = normal playback, range 0.5 to 2.0)
+ */
+ public native final void setRate(int streamID, float rate);
+
+ public interface OnLoadCompleteListener {
+ /**
+ * Called when a sound has completed loading.
+ *
+ * @param soundPool SoundPool object from the load() method
+ * @param sampleId the sample ID of the sound loaded.
+ * @param status the status of the load operation (0 = success)
+ */
+ public void onLoadComplete(SoundPool soundPool, int sampleId, int status);
+ }
+
+ /**
+ * Sets the callback hook for the OnLoadCompleteListener.
+ */
+ public void setOnLoadCompleteListener(OnLoadCompleteListener listener) {
+ synchronized(mLock) {
+ if (listener != null) {
+ // setup message handler
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mEventHandler = new EventHandler(looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mEventHandler = new EventHandler(looper);
+ } else {
+ mEventHandler = null;
+ }
+ } else {
+ mEventHandler = null;
+ }
+ mOnLoadCompleteListener = listener;
+ }
+ }
+
+ private native final int _load(FileDescriptor fd, long offset, long length, int priority);
+
+ private native final int native_setup(Object weakRef, int maxStreams,
+ Object/*AudioAttributes*/ attributes);
+
+ private native final int _play(int soundID, float leftVolume, float rightVolume,
+ int priority, int loop, float rate);
+
+ private native final void _setVolume(int streamID, float leftVolume, float rightVolume);
+
+ private native final void _mute(boolean muting);
+
+ // post event from native code to message handler
+ @SuppressWarnings("unchecked")
+ private static void postEventFromNative(Object ref, int msg, int arg1, int arg2, Object obj) {
+ SoundPool soundPool = ((WeakReference<SoundPool>) ref).get();
+ if (soundPool == null)
+ return;
+
+ if (soundPool.mEventHandler != null) {
+ Message m = soundPool.mEventHandler.obtainMessage(msg, arg1, arg2, obj);
+ soundPool.mEventHandler.sendMessage(m);
+ }
+ }
+
+ private final class EventHandler extends Handler {
+ public EventHandler(Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case SAMPLE_LOADED:
+ if (DEBUG) Log.d(TAG, "Sample " + msg.arg1 + " loaded");
+ synchronized(mLock) {
+ if (mOnLoadCompleteListener != null) {
+ mOnLoadCompleteListener.onLoadComplete(SoundPool.this, msg.arg1, msg.arg2);
+ }
+ }
+ break;
+ default:
+ Log.e(TAG, "Unknown message type " + msg.what);
+ return;
+ }
+ }
+ }
+
+ /**
+ * Builder class for {@link SoundPool} objects.
+ */
+ public static class Builder {
+ private int mMaxStreams = 1;
+ private AudioAttributes mAudioAttributes;
+
+ /**
+ * Constructs a new Builder with the defaults format values.
+ * If not provided, the maximum number of streams is 1 (see {@link #setMaxStreams(int)} to
+ * change it), and the audio attributes have a usage value of
+ * {@link AudioAttributes#USAGE_MEDIA} (see {@link #setAudioAttributes(AudioAttributes)} to
+ * change them).
+ */
+ public Builder() {
+ }
+
+ /**
+ * Sets the maximum of number of simultaneous streams that can be played simultaneously.
+ * @param maxStreams a value equal to 1 or greater.
+ * @return the same Builder instance
+ * @throws IllegalArgumentException
+ */
+ public Builder setMaxStreams(int maxStreams) throws IllegalArgumentException {
+ if (maxStreams <= 0) {
+ throw new IllegalArgumentException(
+ "Strictly positive value required for the maximum number of streams");
+ }
+ mMaxStreams = maxStreams;
+ return this;
+ }
+
+ /**
+ * Sets the {@link AudioAttributes}. For examples, game applications will use attributes
+ * built with usage information set to {@link AudioAttributes#USAGE_GAME}.
+ * @param attributes a non-null
+ * @return
+ */
+ public Builder setAudioAttributes(AudioAttributes attributes)
+ throws IllegalArgumentException {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Invalid null AudioAttributes");
+ }
+ mAudioAttributes = attributes;
+ return this;
+ }
+
+ public SoundPool build() {
+ if (mAudioAttributes == null) {
+ mAudioAttributes = new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_MEDIA).build();
+ }
+ return new SoundPool(mMaxStreams, mAudioAttributes);
+ }
+ }
+}
diff --git a/android/media/SubtitleController.java b/android/media/SubtitleController.java
new file mode 100644
index 00000000..fd72b39b
--- /dev/null
+++ b/android/media/SubtitleController.java
@@ -0,0 +1,507 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.util.Locale;
+import java.util.Vector;
+
+import android.content.Context;
+import android.media.MediaPlayer.TrackInfo;
+import android.media.SubtitleTrack.RenderingWidget;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.view.accessibility.CaptioningManager;
+
+/**
+ * The subtitle controller provides the architecture to display subtitles for a
+ * media source. It allows specifying which tracks to display, on which anchor
+ * to display them, and also allows adding external, out-of-band subtitle tracks.
+ *
+ * @hide
+ */
+public class SubtitleController {
+ private MediaTimeProvider mTimeProvider;
+ private Vector<Renderer> mRenderers;
+ private Vector<SubtitleTrack> mTracks;
+ private SubtitleTrack mSelectedTrack;
+ private boolean mShowing;
+ private CaptioningManager mCaptioningManager;
+ private Handler mHandler;
+
+ private static final int WHAT_SHOW = 1;
+ private static final int WHAT_HIDE = 2;
+ private static final int WHAT_SELECT_TRACK = 3;
+ private static final int WHAT_SELECT_DEFAULT_TRACK = 4;
+
+ private final Handler.Callback mCallback = new Handler.Callback() {
+ @Override
+ public boolean handleMessage(Message msg) {
+ switch (msg.what) {
+ case WHAT_SHOW:
+ doShow();
+ return true;
+ case WHAT_HIDE:
+ doHide();
+ return true;
+ case WHAT_SELECT_TRACK:
+ doSelectTrack((SubtitleTrack)msg.obj);
+ return true;
+ case WHAT_SELECT_DEFAULT_TRACK:
+ doSelectDefaultTrack();
+ return true;
+ default:
+ return false;
+ }
+ }
+ };
+
+ private CaptioningManager.CaptioningChangeListener mCaptioningChangeListener =
+ new CaptioningManager.CaptioningChangeListener() {
+ /** @hide */
+ @Override
+ public void onEnabledChanged(boolean enabled) {
+ selectDefaultTrack();
+ }
+
+ /** @hide */
+ @Override
+ public void onLocaleChanged(Locale locale) {
+ selectDefaultTrack();
+ }
+ };
+
+ /**
+ * Creates a subtitle controller for a media playback object that implements
+ * the MediaTimeProvider interface.
+ *
+ * @param timeProvider
+ */
+ public SubtitleController(
+ Context context,
+ MediaTimeProvider timeProvider,
+ Listener listener) {
+ mTimeProvider = timeProvider;
+ mListener = listener;
+
+ mRenderers = new Vector<Renderer>();
+ mShowing = false;
+ mTracks = new Vector<SubtitleTrack>();
+ mCaptioningManager =
+ (CaptioningManager)context.getSystemService(Context.CAPTIONING_SERVICE);
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ mCaptioningManager.removeCaptioningChangeListener(
+ mCaptioningChangeListener);
+ super.finalize();
+ }
+
+ /**
+ * @return the available subtitle tracks for this media. These include
+ * the tracks found by {@link MediaPlayer} as well as any tracks added
+ * manually via {@link #addTrack}.
+ */
+ public SubtitleTrack[] getTracks() {
+ synchronized(mTracks) {
+ SubtitleTrack[] tracks = new SubtitleTrack[mTracks.size()];
+ mTracks.toArray(tracks);
+ return tracks;
+ }
+ }
+
+ /**
+ * @return the currently selected subtitle track
+ */
+ public SubtitleTrack getSelectedTrack() {
+ return mSelectedTrack;
+ }
+
+ private RenderingWidget getRenderingWidget() {
+ if (mSelectedTrack == null) {
+ return null;
+ }
+ return mSelectedTrack.getRenderingWidget();
+ }
+
+ /**
+ * Selects a subtitle track. As a result, this track will receive
+ * in-band data from the {@link MediaPlayer}. However, this does
+ * not change the subtitle visibility.
+ *
+ * Should be called from the anchor's (UI) thread. {@see #Anchor.getSubtitleLooper}
+ *
+ * @param track The subtitle track to select. This must be one of the
+ * tracks in {@link #getTracks}.
+ * @return true if the track was successfully selected.
+ */
+ public boolean selectTrack(SubtitleTrack track) {
+ if (track != null && !mTracks.contains(track)) {
+ return false;
+ }
+
+ processOnAnchor(mHandler.obtainMessage(WHAT_SELECT_TRACK, track));
+ return true;
+ }
+
+ private void doSelectTrack(SubtitleTrack track) {
+ mTrackIsExplicit = true;
+ if (mSelectedTrack == track) {
+ return;
+ }
+
+ if (mSelectedTrack != null) {
+ mSelectedTrack.hide();
+ mSelectedTrack.setTimeProvider(null);
+ }
+
+ mSelectedTrack = track;
+ if (mAnchor != null) {
+ mAnchor.setSubtitleWidget(getRenderingWidget());
+ }
+
+ if (mSelectedTrack != null) {
+ mSelectedTrack.setTimeProvider(mTimeProvider);
+ mSelectedTrack.show();
+ }
+
+ if (mListener != null) {
+ mListener.onSubtitleTrackSelected(track);
+ }
+ }
+
+ /**
+ * @return the default subtitle track based on system preferences, or null,
+ * if no such track exists in this manager.
+ *
+ * Supports HLS-flags: AUTOSELECT, FORCED & DEFAULT.
+ *
+ * 1. If captioning is disabled, only consider FORCED tracks. Otherwise,
+ * consider all tracks, but prefer non-FORCED ones.
+ * 2. If user selected "Default" caption language:
+ * a. If there is a considered track with DEFAULT=yes, returns that track
+ * (favor the first one in the current language if there are more than
+ * one default tracks, or the first in general if none of them are in
+ * the current language).
+ * b. Otherwise, if there is a track with AUTOSELECT=yes in the current
+ * language, return that one.
+ * c. If there are no default tracks, and no autoselectable tracks in the
+ * current language, return null.
+ * 3. If there is a track with the caption language, select that one. Prefer
+ * the one with AUTOSELECT=no.
+ *
+ * The default values for these flags are DEFAULT=no, AUTOSELECT=yes
+ * and FORCED=no.
+ */
+ public SubtitleTrack getDefaultTrack() {
+ SubtitleTrack bestTrack = null;
+ int bestScore = -1;
+
+ Locale selectedLocale = mCaptioningManager.getLocale();
+ Locale locale = selectedLocale;
+ if (locale == null) {
+ locale = Locale.getDefault();
+ }
+ boolean selectForced = !mCaptioningManager.isEnabled();
+
+ synchronized(mTracks) {
+ for (SubtitleTrack track: mTracks) {
+ MediaFormat format = track.getFormat();
+ String language = format.getString(MediaFormat.KEY_LANGUAGE);
+ boolean forced =
+ format.getInteger(MediaFormat.KEY_IS_FORCED_SUBTITLE, 0) != 0;
+ boolean autoselect =
+ format.getInteger(MediaFormat.KEY_IS_AUTOSELECT, 1) != 0;
+ boolean is_default =
+ format.getInteger(MediaFormat.KEY_IS_DEFAULT, 0) != 0;
+
+ boolean languageMatches =
+ (locale == null ||
+ locale.getLanguage().equals("") ||
+ locale.getISO3Language().equals(language) ||
+ locale.getLanguage().equals(language));
+ // is_default is meaningless unless caption language is 'default'
+ int score = (forced ? 0 : 8) +
+ (((selectedLocale == null) && is_default) ? 4 : 0) +
+ (autoselect ? 0 : 2) + (languageMatches ? 1 : 0);
+
+ if (selectForced && !forced) {
+ continue;
+ }
+
+ // we treat null locale/language as matching any language
+ if ((selectedLocale == null && is_default) ||
+ (languageMatches &&
+ (autoselect || forced || selectedLocale != null))) {
+ if (score > bestScore) {
+ bestScore = score;
+ bestTrack = track;
+ }
+ }
+ }
+ }
+ return bestTrack;
+ }
+
+ private boolean mTrackIsExplicit = false;
+ private boolean mVisibilityIsExplicit = false;
+
+ /** @hide - should be called from anchor thread */
+ public void selectDefaultTrack() {
+ processOnAnchor(mHandler.obtainMessage(WHAT_SELECT_DEFAULT_TRACK));
+ }
+
+ private void doSelectDefaultTrack() {
+ if (mTrackIsExplicit) {
+ // If track selection is explicit, but visibility
+ // is not, it falls back to the captioning setting
+ if (!mVisibilityIsExplicit) {
+ if (mCaptioningManager.isEnabled() ||
+ (mSelectedTrack != null &&
+ mSelectedTrack.getFormat().getInteger(
+ MediaFormat.KEY_IS_FORCED_SUBTITLE, 0) != 0)) {
+ show();
+ } else if (mSelectedTrack != null
+ && mSelectedTrack.getTrackType() == TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE) {
+ hide();
+ }
+ mVisibilityIsExplicit = false;
+ }
+ return;
+ }
+
+ // We can have a default (forced) track even if captioning
+ // is not enabled. This is handled by getDefaultTrack().
+ // Show this track unless subtitles were explicitly hidden.
+ SubtitleTrack track = getDefaultTrack();
+ if (track != null) {
+ selectTrack(track);
+ mTrackIsExplicit = false;
+ if (!mVisibilityIsExplicit) {
+ show();
+ mVisibilityIsExplicit = false;
+ }
+ }
+ }
+
+ /** @hide - must be called from anchor thread */
+ public void reset() {
+ checkAnchorLooper();
+ hide();
+ selectTrack(null);
+ mTracks.clear();
+ mTrackIsExplicit = false;
+ mVisibilityIsExplicit = false;
+ mCaptioningManager.removeCaptioningChangeListener(
+ mCaptioningChangeListener);
+ }
+
+ /**
+ * Adds a new, external subtitle track to the manager.
+ *
+ * @param format the format of the track that will include at least
+ * the MIME type {@link MediaFormat@KEY_MIME}.
+ * @return the created {@link SubtitleTrack} object
+ */
+ public SubtitleTrack addTrack(MediaFormat format) {
+ synchronized(mRenderers) {
+ for (Renderer renderer: mRenderers) {
+ if (renderer.supports(format)) {
+ SubtitleTrack track = renderer.createTrack(format);
+ if (track != null) {
+ synchronized(mTracks) {
+ if (mTracks.size() == 0) {
+ mCaptioningManager.addCaptioningChangeListener(
+ mCaptioningChangeListener);
+ }
+ mTracks.add(track);
+ }
+ return track;
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Show the selected (or default) subtitle track.
+ *
+ * Should be called from the anchor's (UI) thread. {@see #Anchor.getSubtitleLooper}
+ */
+ public void show() {
+ processOnAnchor(mHandler.obtainMessage(WHAT_SHOW));
+ }
+
+ private void doShow() {
+ mShowing = true;
+ mVisibilityIsExplicit = true;
+ if (mSelectedTrack != null) {
+ mSelectedTrack.show();
+ }
+ }
+
+ /**
+ * Hide the selected (or default) subtitle track.
+ *
+ * Should be called from the anchor's (UI) thread. {@see #Anchor.getSubtitleLooper}
+ */
+ public void hide() {
+ processOnAnchor(mHandler.obtainMessage(WHAT_HIDE));
+ }
+
+ private void doHide() {
+ mVisibilityIsExplicit = true;
+ if (mSelectedTrack != null) {
+ mSelectedTrack.hide();
+ }
+ mShowing = false;
+ }
+
+ /**
+ * Interface for supporting a single or multiple subtitle types in {@link
+ * MediaPlayer}.
+ */
+ public abstract static class Renderer {
+ /**
+ * Called by {@link MediaPlayer}'s {@link SubtitleController} when a new
+ * subtitle track is detected, to see if it should use this object to
+ * parse and display this subtitle track.
+ *
+ * @param format the format of the track that will include at least
+ * the MIME type {@link MediaFormat@KEY_MIME}.
+ *
+ * @return true if and only if the track format is supported by this
+ * renderer
+ */
+ public abstract boolean supports(MediaFormat format);
+
+ /**
+ * Called by {@link MediaPlayer}'s {@link SubtitleController} for each
+ * subtitle track that was detected and is supported by this object to
+ * create a {@link SubtitleTrack} object. This object will be created
+ * for each track that was found. If the track is selected for display,
+ * this object will be used to parse and display the track data.
+ *
+ * @param format the format of the track that will include at least
+ * the MIME type {@link MediaFormat@KEY_MIME}.
+ * @return a {@link SubtitleTrack} object that will be used to parse
+ * and render the subtitle track.
+ */
+ public abstract SubtitleTrack createTrack(MediaFormat format);
+ }
+
+ /**
+ * Add support for a subtitle format in {@link MediaPlayer}.
+ *
+ * @param renderer a {@link SubtitleController.Renderer} object that adds
+ * support for a subtitle format.
+ */
+ public void registerRenderer(Renderer renderer) {
+ synchronized(mRenderers) {
+ // TODO how to get available renderers in the system
+ if (!mRenderers.contains(renderer)) {
+ // TODO should added renderers override existing ones (to allow replacing?)
+ mRenderers.add(renderer);
+ }
+ }
+ }
+
+ /** @hide */
+ public boolean hasRendererFor(MediaFormat format) {
+ synchronized(mRenderers) {
+ // TODO how to get available renderers in the system
+ for (Renderer renderer: mRenderers) {
+ if (renderer.supports(format)) {
+ return true;
+ }
+ }
+ return false;
+ }
+ }
+
+ /**
+ * Subtitle anchor, an object that is able to display a subtitle renderer,
+ * e.g. a VideoView.
+ */
+ public interface Anchor {
+ /**
+ * Anchor should use the supplied subtitle rendering widget, or
+ * none if it is null.
+ * @hide
+ */
+ public void setSubtitleWidget(RenderingWidget subtitleWidget);
+
+ /**
+ * Anchors provide the looper on which all track visibility changes
+ * (track.show/hide, setSubtitleWidget) will take place.
+ * @hide
+ */
+ public Looper getSubtitleLooper();
+ }
+
+ private Anchor mAnchor;
+
+ /**
+ * @hide - called from anchor's looper (if any, both when unsetting and
+ * setting)
+ */
+ public void setAnchor(Anchor anchor) {
+ if (mAnchor == anchor) {
+ return;
+ }
+
+ if (mAnchor != null) {
+ checkAnchorLooper();
+ mAnchor.setSubtitleWidget(null);
+ }
+ mAnchor = anchor;
+ mHandler = null;
+ if (mAnchor != null) {
+ mHandler = new Handler(mAnchor.getSubtitleLooper(), mCallback);
+ checkAnchorLooper();
+ mAnchor.setSubtitleWidget(getRenderingWidget());
+ }
+ }
+
+ private void checkAnchorLooper() {
+ assert mHandler != null : "Should have a looper already";
+ assert Looper.myLooper() == mHandler.getLooper() : "Must be called from the anchor's looper";
+ }
+
+ private void processOnAnchor(Message m) {
+ assert mHandler != null : "Should have a looper already";
+ if (Looper.myLooper() == mHandler.getLooper()) {
+ mHandler.dispatchMessage(m);
+ } else {
+ mHandler.sendMessage(m);
+ }
+ }
+
+ public interface Listener {
+ /**
+ * Called when a subtitle track has been selected.
+ *
+ * @param track selected subtitle track or null
+ * @hide
+ */
+ public void onSubtitleTrackSelected(SubtitleTrack track);
+ }
+
+ private Listener mListener;
+}
diff --git a/android/media/SubtitleData.java b/android/media/SubtitleData.java
new file mode 100644
index 00000000..3e6f6f9f
--- /dev/null
+++ b/android/media/SubtitleData.java
@@ -0,0 +1,87 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+
+/**
+ * @hide
+ *
+ * Class to hold the subtitle track's data, including:
+ * <ul>
+ * <li> Track index</li>
+ * <li> Start time (in microseconds) of the data</li>
+ * <li> Duration (in microseconds) of the data</li>
+ * <li> A byte-array of the data</li>
+ * </ul>
+ *
+ * <p> To receive the subtitle data, applications need to do the following:
+ *
+ * <ul>
+ * <li> Select a track of type MEDIA_TRACK_TYPE_SUBTITLE with {@link MediaPlayer.selectTrack(int)</li>
+ * <li> Implement the {@link MediaPlayer.OnSubtitleDataListener} interface</li>
+ * <li> Register the {@link MediaPlayer.OnSubtitleDataListener} callback on a MediaPlayer object</li>
+ * </ul>
+ *
+ * @see android.media.MediaPlayer
+ */
+public final class SubtitleData
+{
+ private static final String TAG = "SubtitleData";
+
+ private int mTrackIndex;
+ private long mStartTimeUs;
+ private long mDurationUs;
+ private byte[] mData;
+
+ public SubtitleData(Parcel parcel) {
+ if (!parseParcel(parcel)) {
+ throw new IllegalArgumentException("parseParcel() fails");
+ }
+ }
+
+ public int getTrackIndex() {
+ return mTrackIndex;
+ }
+
+ public long getStartTimeUs() {
+ return mStartTimeUs;
+ }
+
+ public long getDurationUs() {
+ return mDurationUs;
+ }
+
+ public byte[] getData() {
+ return mData;
+ }
+
+ private boolean parseParcel(Parcel parcel) {
+ parcel.setDataPosition(0);
+ if (parcel.dataAvail() == 0) {
+ return false;
+ }
+
+ mTrackIndex = parcel.readInt();
+ mStartTimeUs = parcel.readLong();
+ mDurationUs = parcel.readLong();
+ mData = new byte[parcel.readInt()];
+ parcel.readByteArray(mData);
+
+ return true;
+ }
+}
diff --git a/android/media/SubtitleTrack.java b/android/media/SubtitleTrack.java
new file mode 100644
index 00000000..6c8e3231
--- /dev/null
+++ b/android/media/SubtitleTrack.java
@@ -0,0 +1,726 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.Canvas;
+import android.media.MediaPlayer.TrackInfo;
+import android.os.Handler;
+import android.util.Log;
+import android.util.LongSparseArray;
+import android.util.Pair;
+
+import java.util.Iterator;
+import java.util.NoSuchElementException;
+import java.util.SortedMap;
+import java.util.TreeMap;
+import java.util.Vector;
+
+/**
+ * A subtitle track abstract base class that is responsible for parsing and displaying
+ * an instance of a particular type of subtitle.
+ *
+ * @hide
+ */
+public abstract class SubtitleTrack implements MediaTimeProvider.OnMediaTimeListener {
+ private static final String TAG = "SubtitleTrack";
+ private long mLastUpdateTimeMs;
+ private long mLastTimeMs;
+
+ private Runnable mRunnable;
+
+ /** @hide TODO private */
+ final protected LongSparseArray<Run> mRunsByEndTime = new LongSparseArray<Run>();
+ /** @hide TODO private */
+ final protected LongSparseArray<Run> mRunsByID = new LongSparseArray<Run>();
+
+ /** @hide TODO private */
+ protected CueList mCues;
+ /** @hide TODO private */
+ final protected Vector<Cue> mActiveCues = new Vector<Cue>();
+ /** @hide */
+ protected boolean mVisible;
+
+ /** @hide */
+ public boolean DEBUG = false;
+
+ /** @hide */
+ protected Handler mHandler = new Handler();
+
+ private MediaFormat mFormat;
+
+ public SubtitleTrack(MediaFormat format) {
+ mFormat = format;
+ mCues = new CueList();
+ clearActiveCues();
+ mLastTimeMs = -1;
+ }
+
+ /** @hide */
+ public final MediaFormat getFormat() {
+ return mFormat;
+ }
+
+ private long mNextScheduledTimeMs = -1;
+
+ protected void onData(SubtitleData data) {
+ long runID = data.getStartTimeUs() + 1;
+ onData(data.getData(), true /* eos */, runID);
+ setRunDiscardTimeMs(
+ runID,
+ (data.getStartTimeUs() + data.getDurationUs()) / 1000);
+ }
+
+ /**
+ * Called when there is input data for the subtitle track. The
+ * complete subtitle for a track can include multiple whole units
+ * (runs). Each of these units can have multiple sections. The
+ * contents of a run are submitted in sequential order, with eos
+ * indicating the last section of the run. Calls from different
+ * runs must not be intermixed.
+ *
+ * @param data subtitle data byte buffer
+ * @param eos true if this is the last section of the run.
+ * @param runID mostly-unique ID for this run of data. Subtitle cues
+ * with runID of 0 are discarded immediately after
+ * display. Cues with runID of ~0 are discarded
+ * only at the deletion of the track object. Cues
+ * with other runID-s are discarded at the end of the
+ * run, which defaults to the latest timestamp of
+ * any of its cues (with this runID).
+ */
+ public abstract void onData(byte[] data, boolean eos, long runID);
+
+ /**
+ * Called when adding the subtitle rendering widget to the view hierarchy,
+ * as well as when showing or hiding the subtitle track, or when the video
+ * surface position has changed.
+ *
+ * @return the widget that renders this subtitle track. For most renderers
+ * there should be a single shared instance that is used for all
+ * tracks supported by that renderer, as at most one subtitle track
+ * is visible at one time.
+ */
+ public abstract RenderingWidget getRenderingWidget();
+
+ /**
+ * Called when the active cues have changed, and the contents of the subtitle
+ * view should be updated.
+ *
+ * @hide
+ */
+ public abstract void updateView(Vector<Cue> activeCues);
+
+ /** @hide */
+ protected synchronized void updateActiveCues(boolean rebuild, long timeMs) {
+ // out-of-order times mean seeking or new active cues being added
+ // (during their own timespan)
+ if (rebuild || mLastUpdateTimeMs > timeMs) {
+ clearActiveCues();
+ }
+
+ for(Iterator<Pair<Long, Cue> > it =
+ mCues.entriesBetween(mLastUpdateTimeMs, timeMs).iterator(); it.hasNext(); ) {
+ Pair<Long, Cue> event = it.next();
+ Cue cue = event.second;
+
+ if (cue.mEndTimeMs == event.first) {
+ // remove past cues
+ if (DEBUG) Log.v(TAG, "Removing " + cue);
+ mActiveCues.remove(cue);
+ if (cue.mRunID == 0) {
+ it.remove();
+ }
+ } else if (cue.mStartTimeMs == event.first) {
+ // add new cues
+ // TRICKY: this will happen in start order
+ if (DEBUG) Log.v(TAG, "Adding " + cue);
+ if (cue.mInnerTimesMs != null) {
+ cue.onTime(timeMs);
+ }
+ mActiveCues.add(cue);
+ } else if (cue.mInnerTimesMs != null) {
+ // cue is modified
+ cue.onTime(timeMs);
+ }
+ }
+
+ /* complete any runs */
+ while (mRunsByEndTime.size() > 0 &&
+ mRunsByEndTime.keyAt(0) <= timeMs) {
+ removeRunsByEndTimeIndex(0); // removes element
+ }
+ mLastUpdateTimeMs = timeMs;
+ }
+
+ private void removeRunsByEndTimeIndex(int ix) {
+ Run run = mRunsByEndTime.valueAt(ix);
+ while (run != null) {
+ Cue cue = run.mFirstCue;
+ while (cue != null) {
+ mCues.remove(cue);
+ Cue nextCue = cue.mNextInRun;
+ cue.mNextInRun = null;
+ cue = nextCue;
+ }
+ mRunsByID.remove(run.mRunID);
+ Run nextRun = run.mNextRunAtEndTimeMs;
+ run.mPrevRunAtEndTimeMs = null;
+ run.mNextRunAtEndTimeMs = null;
+ run = nextRun;
+ }
+ mRunsByEndTime.removeAt(ix);
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ /* remove all cues (untangle all cross-links) */
+ int size = mRunsByEndTime.size();
+ for(int ix = size - 1; ix >= 0; ix--) {
+ removeRunsByEndTimeIndex(ix);
+ }
+
+ super.finalize();
+ }
+
+ private synchronized void takeTime(long timeMs) {
+ mLastTimeMs = timeMs;
+ }
+
+ /** @hide */
+ protected synchronized void clearActiveCues() {
+ if (DEBUG) Log.v(TAG, "Clearing " + mActiveCues.size() + " active cues");
+ mActiveCues.clear();
+ mLastUpdateTimeMs = -1;
+ }
+
+ /** @hide */
+ protected void scheduleTimedEvents() {
+ /* get times for the next event */
+ if (mTimeProvider != null) {
+ mNextScheduledTimeMs = mCues.nextTimeAfter(mLastTimeMs);
+ if (DEBUG) Log.d(TAG, "sched @" + mNextScheduledTimeMs + " after " + mLastTimeMs);
+ mTimeProvider.notifyAt(
+ mNextScheduledTimeMs >= 0 ?
+ (mNextScheduledTimeMs * 1000) : MediaTimeProvider.NO_TIME,
+ this);
+ }
+ }
+
+ /**
+ * @hide
+ */
+ @Override
+ public void onTimedEvent(long timeUs) {
+ if (DEBUG) Log.d(TAG, "onTimedEvent " + timeUs);
+ synchronized (this) {
+ long timeMs = timeUs / 1000;
+ updateActiveCues(false, timeMs);
+ takeTime(timeMs);
+ }
+ updateView(mActiveCues);
+ scheduleTimedEvents();
+ }
+
+ /**
+ * @hide
+ */
+ @Override
+ public void onSeek(long timeUs) {
+ if (DEBUG) Log.d(TAG, "onSeek " + timeUs);
+ synchronized (this) {
+ long timeMs = timeUs / 1000;
+ updateActiveCues(true, timeMs);
+ takeTime(timeMs);
+ }
+ updateView(mActiveCues);
+ scheduleTimedEvents();
+ }
+
+ /**
+ * @hide
+ */
+ @Override
+ public void onStop() {
+ synchronized (this) {
+ if (DEBUG) Log.d(TAG, "onStop");
+ clearActiveCues();
+ mLastTimeMs = -1;
+ }
+ updateView(mActiveCues);
+ mNextScheduledTimeMs = -1;
+ mTimeProvider.notifyAt(MediaTimeProvider.NO_TIME, this);
+ }
+
+ /** @hide */
+ protected MediaTimeProvider mTimeProvider;
+
+ /** @hide */
+ public void show() {
+ if (mVisible) {
+ return;
+ }
+
+ mVisible = true;
+ RenderingWidget renderingWidget = getRenderingWidget();
+ if (renderingWidget != null) {
+ renderingWidget.setVisible(true);
+ }
+ if (mTimeProvider != null) {
+ mTimeProvider.scheduleUpdate(this);
+ }
+ }
+
+ /** @hide */
+ public void hide() {
+ if (!mVisible) {
+ return;
+ }
+
+ if (mTimeProvider != null) {
+ mTimeProvider.cancelNotifications(this);
+ }
+ RenderingWidget renderingWidget = getRenderingWidget();
+ if (renderingWidget != null) {
+ renderingWidget.setVisible(false);
+ }
+ mVisible = false;
+ }
+
+ /** @hide */
+ protected synchronized boolean addCue(Cue cue) {
+ mCues.add(cue);
+
+ if (cue.mRunID != 0) {
+ Run run = mRunsByID.get(cue.mRunID);
+ if (run == null) {
+ run = new Run();
+ mRunsByID.put(cue.mRunID, run);
+ run.mEndTimeMs = cue.mEndTimeMs;
+ } else if (run.mEndTimeMs < cue.mEndTimeMs) {
+ run.mEndTimeMs = cue.mEndTimeMs;
+ }
+
+ // link-up cues in the same run
+ cue.mNextInRun = run.mFirstCue;
+ run.mFirstCue = cue;
+ }
+
+ // if a cue is added that should be visible, need to refresh view
+ long nowMs = -1;
+ if (mTimeProvider != null) {
+ try {
+ nowMs = mTimeProvider.getCurrentTimeUs(
+ false /* precise */, true /* monotonic */) / 1000;
+ } catch (IllegalStateException e) {
+ // handle as it we are not playing
+ }
+ }
+
+ if (DEBUG) Log.v(TAG, "mVisible=" + mVisible + ", " +
+ cue.mStartTimeMs + " <= " + nowMs + ", " +
+ cue.mEndTimeMs + " >= " + mLastTimeMs);
+
+ if (mVisible &&
+ cue.mStartTimeMs <= nowMs &&
+ // we don't trust nowMs, so check any cue since last callback
+ cue.mEndTimeMs >= mLastTimeMs) {
+ if (mRunnable != null) {
+ mHandler.removeCallbacks(mRunnable);
+ }
+ final SubtitleTrack track = this;
+ final long thenMs = nowMs;
+ mRunnable = new Runnable() {
+ @Override
+ public void run() {
+ // even with synchronized, it is possible that we are going
+ // to do multiple updates as the runnable could be already
+ // running.
+ synchronized (track) {
+ mRunnable = null;
+ updateActiveCues(true, thenMs);
+ updateView(mActiveCues);
+ }
+ }
+ };
+ // delay update so we don't update view on every cue. TODO why 10?
+ if (mHandler.postDelayed(mRunnable, 10 /* delay */)) {
+ if (DEBUG) Log.v(TAG, "scheduling update");
+ } else {
+ if (DEBUG) Log.w(TAG, "failed to schedule subtitle view update");
+ }
+ return true;
+ }
+
+ if (mVisible &&
+ cue.mEndTimeMs >= mLastTimeMs &&
+ (cue.mStartTimeMs < mNextScheduledTimeMs ||
+ mNextScheduledTimeMs < 0)) {
+ scheduleTimedEvents();
+ }
+
+ return false;
+ }
+
+ /** @hide */
+ public synchronized void setTimeProvider(MediaTimeProvider timeProvider) {
+ if (mTimeProvider == timeProvider) {
+ return;
+ }
+ if (mTimeProvider != null) {
+ mTimeProvider.cancelNotifications(this);
+ }
+ mTimeProvider = timeProvider;
+ if (mTimeProvider != null) {
+ mTimeProvider.scheduleUpdate(this);
+ }
+ }
+
+
+ /** @hide */
+ static class CueList {
+ private static final String TAG = "CueList";
+ // simplistic, inefficient implementation
+ private SortedMap<Long, Vector<Cue> > mCues;
+ public boolean DEBUG = false;
+
+ private boolean addEvent(Cue cue, long timeMs) {
+ Vector<Cue> cues = mCues.get(timeMs);
+ if (cues == null) {
+ cues = new Vector<Cue>(2);
+ mCues.put(timeMs, cues);
+ } else if (cues.contains(cue)) {
+ // do not duplicate cues
+ return false;
+ }
+
+ cues.add(cue);
+ return true;
+ }
+
+ private void removeEvent(Cue cue, long timeMs) {
+ Vector<Cue> cues = mCues.get(timeMs);
+ if (cues != null) {
+ cues.remove(cue);
+ if (cues.size() == 0) {
+ mCues.remove(timeMs);
+ }
+ }
+ }
+
+ public void add(Cue cue) {
+ // ignore non-positive-duration cues
+ if (cue.mStartTimeMs >= cue.mEndTimeMs)
+ return;
+
+ if (!addEvent(cue, cue.mStartTimeMs)) {
+ return;
+ }
+
+ long lastTimeMs = cue.mStartTimeMs;
+ if (cue.mInnerTimesMs != null) {
+ for (long timeMs: cue.mInnerTimesMs) {
+ if (timeMs > lastTimeMs && timeMs < cue.mEndTimeMs) {
+ addEvent(cue, timeMs);
+ lastTimeMs = timeMs;
+ }
+ }
+ }
+
+ addEvent(cue, cue.mEndTimeMs);
+ }
+
+ public void remove(Cue cue) {
+ removeEvent(cue, cue.mStartTimeMs);
+ if (cue.mInnerTimesMs != null) {
+ for (long timeMs: cue.mInnerTimesMs) {
+ removeEvent(cue, timeMs);
+ }
+ }
+ removeEvent(cue, cue.mEndTimeMs);
+ }
+
+ public Iterable<Pair<Long, Cue>> entriesBetween(
+ final long lastTimeMs, final long timeMs) {
+ return new Iterable<Pair<Long, Cue> >() {
+ @Override
+ public Iterator<Pair<Long, Cue> > iterator() {
+ if (DEBUG) Log.d(TAG, "slice (" + lastTimeMs + ", " + timeMs + "]=");
+ try {
+ return new EntryIterator(
+ mCues.subMap(lastTimeMs + 1, timeMs + 1));
+ } catch(IllegalArgumentException e) {
+ return new EntryIterator(null);
+ }
+ }
+ };
+ }
+
+ public long nextTimeAfter(long timeMs) {
+ SortedMap<Long, Vector<Cue>> tail = null;
+ try {
+ tail = mCues.tailMap(timeMs + 1);
+ if (tail != null) {
+ return tail.firstKey();
+ } else {
+ return -1;
+ }
+ } catch(IllegalArgumentException e) {
+ return -1;
+ } catch(NoSuchElementException e) {
+ return -1;
+ }
+ }
+
+ class EntryIterator implements Iterator<Pair<Long, Cue> > {
+ @Override
+ public boolean hasNext() {
+ return !mDone;
+ }
+
+ @Override
+ public Pair<Long, Cue> next() {
+ if (mDone) {
+ throw new NoSuchElementException("");
+ }
+ mLastEntry = new Pair<Long, Cue>(
+ mCurrentTimeMs, mListIterator.next());
+ mLastListIterator = mListIterator;
+ if (!mListIterator.hasNext()) {
+ nextKey();
+ }
+ return mLastEntry;
+ }
+
+ @Override
+ public void remove() {
+ // only allow removing end tags
+ if (mLastListIterator == null ||
+ mLastEntry.second.mEndTimeMs != mLastEntry.first) {
+ throw new IllegalStateException("");
+ }
+
+ // remove end-cue
+ mLastListIterator.remove();
+ mLastListIterator = null;
+ if (mCues.get(mLastEntry.first).size() == 0) {
+ mCues.remove(mLastEntry.first);
+ }
+
+ // remove rest of the cues
+ Cue cue = mLastEntry.second;
+ removeEvent(cue, cue.mStartTimeMs);
+ if (cue.mInnerTimesMs != null) {
+ for (long timeMs: cue.mInnerTimesMs) {
+ removeEvent(cue, timeMs);
+ }
+ }
+ }
+
+ public EntryIterator(SortedMap<Long, Vector<Cue> > cues) {
+ if (DEBUG) Log.v(TAG, cues + "");
+ mRemainingCues = cues;
+ mLastListIterator = null;
+ nextKey();
+ }
+
+ private void nextKey() {
+ do {
+ try {
+ if (mRemainingCues == null) {
+ throw new NoSuchElementException("");
+ }
+ mCurrentTimeMs = mRemainingCues.firstKey();
+ mListIterator =
+ mRemainingCues.get(mCurrentTimeMs).iterator();
+ try {
+ mRemainingCues =
+ mRemainingCues.tailMap(mCurrentTimeMs + 1);
+ } catch (IllegalArgumentException e) {
+ mRemainingCues = null;
+ }
+ mDone = false;
+ } catch (NoSuchElementException e) {
+ mDone = true;
+ mRemainingCues = null;
+ mListIterator = null;
+ return;
+ }
+ } while (!mListIterator.hasNext());
+ }
+
+ private long mCurrentTimeMs;
+ private Iterator<Cue> mListIterator;
+ private boolean mDone;
+ private SortedMap<Long, Vector<Cue> > mRemainingCues;
+ private Iterator<Cue> mLastListIterator;
+ private Pair<Long,Cue> mLastEntry;
+ }
+
+ CueList() {
+ mCues = new TreeMap<Long, Vector<Cue>>();
+ }
+ }
+
+ /** @hide */
+ public static class Cue {
+ public long mStartTimeMs;
+ public long mEndTimeMs;
+ public long[] mInnerTimesMs;
+ public long mRunID;
+
+ /** @hide */
+ public Cue mNextInRun;
+
+ public void onTime(long timeMs) { }
+ }
+
+ /** @hide update mRunsByEndTime (with default end time) */
+ protected void finishedRun(long runID) {
+ if (runID != 0 && runID != ~0) {
+ Run run = mRunsByID.get(runID);
+ if (run != null) {
+ run.storeByEndTimeMs(mRunsByEndTime);
+ }
+ }
+ }
+
+ /** @hide update mRunsByEndTime with given end time */
+ public void setRunDiscardTimeMs(long runID, long timeMs) {
+ if (runID != 0 && runID != ~0) {
+ Run run = mRunsByID.get(runID);
+ if (run != null) {
+ run.mEndTimeMs = timeMs;
+ run.storeByEndTimeMs(mRunsByEndTime);
+ }
+ }
+ }
+
+ /** @hide whether this is a text track who fires events instead getting rendered */
+ public int getTrackType() {
+ return getRenderingWidget() == null
+ ? TrackInfo.MEDIA_TRACK_TYPE_TIMEDTEXT
+ : TrackInfo.MEDIA_TRACK_TYPE_SUBTITLE;
+ }
+
+
+ /** @hide */
+ private static class Run {
+ public Cue mFirstCue;
+ public Run mNextRunAtEndTimeMs;
+ public Run mPrevRunAtEndTimeMs;
+ public long mEndTimeMs = -1;
+ public long mRunID = 0;
+ private long mStoredEndTimeMs = -1;
+
+ public void storeByEndTimeMs(LongSparseArray<Run> runsByEndTime) {
+ // remove old value if any
+ int ix = runsByEndTime.indexOfKey(mStoredEndTimeMs);
+ if (ix >= 0) {
+ if (mPrevRunAtEndTimeMs == null) {
+ assert(this == runsByEndTime.valueAt(ix));
+ if (mNextRunAtEndTimeMs == null) {
+ runsByEndTime.removeAt(ix);
+ } else {
+ runsByEndTime.setValueAt(ix, mNextRunAtEndTimeMs);
+ }
+ }
+ removeAtEndTimeMs();
+ }
+
+ // add new value
+ if (mEndTimeMs >= 0) {
+ mPrevRunAtEndTimeMs = null;
+ mNextRunAtEndTimeMs = runsByEndTime.get(mEndTimeMs);
+ if (mNextRunAtEndTimeMs != null) {
+ mNextRunAtEndTimeMs.mPrevRunAtEndTimeMs = this;
+ }
+ runsByEndTime.put(mEndTimeMs, this);
+ mStoredEndTimeMs = mEndTimeMs;
+ }
+ }
+
+ public void removeAtEndTimeMs() {
+ Run prev = mPrevRunAtEndTimeMs;
+
+ if (mPrevRunAtEndTimeMs != null) {
+ mPrevRunAtEndTimeMs.mNextRunAtEndTimeMs = mNextRunAtEndTimeMs;
+ mPrevRunAtEndTimeMs = null;
+ }
+ if (mNextRunAtEndTimeMs != null) {
+ mNextRunAtEndTimeMs.mPrevRunAtEndTimeMs = prev;
+ mNextRunAtEndTimeMs = null;
+ }
+ }
+ }
+
+ /**
+ * Interface for rendering subtitles onto a Canvas.
+ */
+ public interface RenderingWidget {
+ /**
+ * Sets the widget's callback, which is used to send updates when the
+ * rendered data has changed.
+ *
+ * @param callback update callback
+ */
+ public void setOnChangedListener(OnChangedListener callback);
+
+ /**
+ * Sets the widget's size.
+ *
+ * @param width width in pixels
+ * @param height height in pixels
+ */
+ public void setSize(int width, int height);
+
+ /**
+ * Sets whether the widget should draw subtitles.
+ *
+ * @param visible true if subtitles should be drawn, false otherwise
+ */
+ public void setVisible(boolean visible);
+
+ /**
+ * Renders subtitles onto a {@link Canvas}.
+ *
+ * @param c canvas on which to render subtitles
+ */
+ public void draw(Canvas c);
+
+ /**
+ * Called when the widget is attached to a window.
+ */
+ public void onAttachedToWindow();
+
+ /**
+ * Called when the widget is detached from a window.
+ */
+ public void onDetachedFromWindow();
+
+ /**
+ * Callback used to send updates about changes to rendering data.
+ */
+ public interface OnChangedListener {
+ /**
+ * Called when the rendering data has changed.
+ *
+ * @param renderingWidget the widget whose data has changed
+ */
+ public void onChanged(RenderingWidget renderingWidget);
+ }
+ }
+}
diff --git a/android/media/SyncParams.java b/android/media/SyncParams.java
new file mode 100644
index 00000000..9f6bfe14
--- /dev/null
+++ b/android/media/SyncParams.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+import android.annotation.IntDef;
+
+/**
+ * Structure for common A/V sync params.
+ *
+ * Used by {@link MediaSync} {link MediaSync#getSyncParams()} and
+ * {link MediaSync#setSyncParams(SyncParams)}
+ * to control A/V sync behavior.
+ * <p> <strong>audio adjust mode:</strong>
+ * select handling of audio track when changing playback speed due to sync.
+ * <ul>
+ * <li> {@link SyncParams#AUDIO_ADJUST_MODE_DEFAULT}:
+ * System will determine best handling. </li>
+ * <li> {@link SyncParams#AUDIO_ADJUST_MODE_STRETCH}:
+ * Change the speed of audio playback without altering its pitch.</li>
+ * <li> {@link SyncParams#AUDIO_ADJUST_MODE_RESAMPLE}:
+ * Change the speed of audio playback by resampling the audio.</li>
+ * </ul>
+ * <p> <strong>sync source:</strong> select
+ * clock source for sync.
+ * <ul>
+ * <li> {@link SyncParams#SYNC_SOURCE_DEFAULT}:
+ * System will determine best selection.</li>
+ * <li> {@link SyncParams#SYNC_SOURCE_SYSTEM_CLOCK}:
+ * Use system clock for sync source.</li>
+ * <li> {@link SyncParams#SYNC_SOURCE_AUDIO}:
+ * Use audio track for sync source.</li>
+ * <li> {@link SyncParams#SYNC_SOURCE_VSYNC}:
+ * Syncronize media to vsync.</li>
+ * </ul>
+ * <p> <strong>tolerance:</strong> specifies the amount of allowed playback rate
+ * change to keep media in sync with the sync source. The handling of this depends
+ * on the sync source, but must not be negative, and must be less than one.
+ * <p> <strong>frameRate:</strong> initial hint for video frame rate. Used when
+ * sync source is vsync. Negative values can be used to clear a previous hint.
+ */
+public final class SyncParams {
+ /** @hide */
+ @IntDef(
+ value = {
+ SYNC_SOURCE_DEFAULT,
+ SYNC_SOURCE_SYSTEM_CLOCK,
+ SYNC_SOURCE_AUDIO,
+ SYNC_SOURCE_VSYNC,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface SyncSource {}
+
+ /**
+ * Use the default sync source (default). If media has video, the sync renders to a
+ * surface that directly renders to a display, and tolerance is non zero (e.g. not
+ * less than 0.001) vsync source is used for clock source. Otherwise, if media has
+ * audio, audio track is used. Finally, if media has no audio, system clock is used.
+ */
+ public static final int SYNC_SOURCE_DEFAULT = 0;
+
+ /**
+ * Use system monotonic clock for sync source.
+ *
+ * @see System#nanoTime
+ */
+ public static final int SYNC_SOURCE_SYSTEM_CLOCK = 1;
+
+ /**
+ * Use audio track for sync source. This requires audio data and an audio track.
+ *
+ * @see AudioTrack#getTimeStamp
+ */
+ public static final int SYNC_SOURCE_AUDIO = 2;
+
+ /**
+ * Use vsync as the sync source. This requires video data and an output surface that
+ * directly renders to the display, e.g. {@link android.view.SurfaceView}
+ * <p>
+ * This mode allows smoother playback experience by adjusting the playback speed
+ * to match the vsync rate, e.g. playing 30fps content on a 59.94Hz display.
+ * When using this mode, the tolerance should be set to greater than 0 (e.g. at least
+ * 1/1000), so that the playback speed can actually be adjusted.
+ * <p>
+ * This mode can also be used to play 25fps content on a 60Hz display using
+ * a 2:3 pulldown (basically playing the content at 24fps), which results on
+ * better playback experience on most devices. In this case the tolerance should be
+ * at least (1/24).
+ *
+ * @see android.view.Choreographer.FrameCallback#doFrame
+ * @see android.view.Display#getAppVsyncOffsetNanos
+ */
+ public static final int SYNC_SOURCE_VSYNC = 3;
+
+ /** @hide */
+ @IntDef(
+ value = {
+ AUDIO_ADJUST_MODE_DEFAULT,
+ AUDIO_ADJUST_MODE_STRETCH,
+ AUDIO_ADJUST_MODE_RESAMPLE,
+ }
+ )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AudioAdjustMode {}
+
+ /**
+ * System will determine best handling of audio for playback rate
+ * adjustments.
+ * <p>
+ * Used by default. This will make audio play faster or slower as required
+ * by the sync source without changing its pitch; however, system may fall
+ * back to some other method (e.g. change the pitch, or mute the audio) if
+ * time stretching is no longer supported for the playback rate.
+ */
+ public static final int AUDIO_ADJUST_MODE_DEFAULT = 0;
+
+ /**
+ * Time stretch audio when playback rate must be adjusted.
+ * <p>
+ * This will make audio play faster or slower as required by the sync source
+ * without changing its pitch, as long as it is supported for the playback
+ * rate.
+ *
+ * @see MediaSync#PLAYBACK_RATE_AUDIO_MODE_STRETCH
+ * @see MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_STRETCH
+ */
+ public static final int AUDIO_ADJUST_MODE_STRETCH = 1;
+
+ /**
+ * Resample audio when playback rate must be adjusted.
+ * <p>
+ * This will make audio play faster or slower as required by the sync source
+ * by changing its pitch (making it lower to play slower, and higher to play
+ * faster.)
+ *
+ * @see MediaSync#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE
+ * @see MediaPlayer#PLAYBACK_RATE_AUDIO_MODE_RESAMPLE
+ */
+ public static final int AUDIO_ADJUST_MODE_RESAMPLE = 2;
+
+ // flags to indicate which params are actually set
+ private static final int SET_SYNC_SOURCE = 1 << 0;
+ private static final int SET_AUDIO_ADJUST_MODE = 1 << 1;
+ private static final int SET_TOLERANCE = 1 << 2;
+ private static final int SET_FRAME_RATE = 1 << 3;
+ private int mSet = 0;
+
+ // params
+ private int mAudioAdjustMode = AUDIO_ADJUST_MODE_DEFAULT;
+ private int mSyncSource = SYNC_SOURCE_DEFAULT;
+ private float mTolerance = 0.f;
+ private float mFrameRate = 0.f;
+
+ /**
+ * Allows defaults to be returned for properties not set.
+ * Otherwise a {@link java.lang.IllegalArgumentException} exception
+ * is raised when getting those properties
+ * which have defaults but have never been set.
+ * @return this <code>SyncParams</code> instance.
+ */
+ public SyncParams allowDefaults() {
+ mSet |= SET_SYNC_SOURCE | SET_AUDIO_ADJUST_MODE | SET_TOLERANCE;
+ return this;
+ }
+
+ /**
+ * Sets the audio adjust mode.
+ * @param audioAdjustMode
+ * @return this <code>SyncParams</code> instance.
+ */
+ public SyncParams setAudioAdjustMode(@AudioAdjustMode int audioAdjustMode) {
+ mAudioAdjustMode = audioAdjustMode;
+ mSet |= SET_AUDIO_ADJUST_MODE;
+ return this;
+ }
+
+ /**
+ * Retrieves the audio adjust mode.
+ * @return audio adjust mode
+ * @throws IllegalStateException if the audio adjust mode is not set.
+ */
+ public @AudioAdjustMode int getAudioAdjustMode() {
+ if ((mSet & SET_AUDIO_ADJUST_MODE) == 0) {
+ throw new IllegalStateException("audio adjust mode not set");
+ }
+ return mAudioAdjustMode;
+ }
+
+ /**
+ * Sets the sync source.
+ * @param syncSource
+ * @return this <code>SyncParams</code> instance.
+ */
+ public SyncParams setSyncSource(@SyncSource int syncSource) {
+ mSyncSource = syncSource;
+ mSet |= SET_SYNC_SOURCE;
+ return this;
+ }
+
+ /**
+ * Retrieves the sync source.
+ * @return sync source
+ * @throws IllegalStateException if the sync source is not set.
+ */
+ public @SyncSource int getSyncSource() {
+ if ((mSet & SET_SYNC_SOURCE) == 0) {
+ throw new IllegalStateException("sync source not set");
+ }
+ return mSyncSource;
+ }
+
+ /**
+ * Sets the tolerance. The default tolerance is platform specific, but is never more than 1/24.
+ * @param tolerance A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate})
+ * @return this <code>SyncParams</code> instance.
+ * @throws IllegalArgumentException if the tolerance is negative, or not less than one.
+ */
+ public SyncParams setTolerance(float tolerance) {
+ if (tolerance < 0.f || tolerance >= 1.f) {
+ throw new IllegalArgumentException("tolerance must be less than one and non-negative");
+ }
+ mTolerance = tolerance;
+ mSet |= SET_TOLERANCE;
+ return this;
+ }
+
+ /**
+ * Retrieves the tolerance factor.
+ * @return tolerance factor. A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate})
+ * @throws IllegalStateException if tolerance is not set.
+ */
+ public float getTolerance() {
+ if ((mSet & SET_TOLERANCE) == 0) {
+ throw new IllegalStateException("tolerance not set");
+ }
+ return mTolerance;
+ }
+
+ /**
+ * Sets the video frame rate hint to be used. By default the frame rate is unspecified.
+ * @param frameRate A non-negative number used as an initial hint on
+ * the video frame rate to be used when using vsync as the sync source. A negative
+ * number is used to clear a previous hint.
+ * @return this <code>SyncParams</code> instance.
+ */
+ public SyncParams setFrameRate(float frameRate) {
+ mFrameRate = frameRate;
+ mSet |= SET_FRAME_RATE;
+ return this;
+ }
+
+ /**
+ * Retrieves the video frame rate hint.
+ * @return frame rate factor. A non-negative number representing
+ * the maximum deviation of the playback rate from the playback rate
+ * set. ({@code abs(actual_rate - set_rate) / set_rate}), or a negative
+ * number representing the desire to clear a previous hint using these params.
+ * @throws IllegalStateException if frame rate is not set.
+ */
+ public float getFrameRate() {
+ if ((mSet & SET_FRAME_RATE) == 0) {
+ throw new IllegalStateException("frame rate not set");
+ }
+ return mFrameRate;
+ }
+
+}
diff --git a/android/media/ThumbnailUtils.java b/android/media/ThumbnailUtils.java
new file mode 100644
index 00000000..abd6f4a4
--- /dev/null
+++ b/android/media/ThumbnailUtils.java
@@ -0,0 +1,522 @@
+/*
+ * Copyright (C) 2009 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.ContentResolver;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Canvas;
+import android.graphics.Matrix;
+import android.graphics.Rect;
+import android.media.MediaMetadataRetriever;
+import android.media.MediaFile.MediaFileType;
+import android.net.Uri;
+import android.os.ParcelFileDescriptor;
+import android.provider.MediaStore.Images;
+import android.util.Log;
+
+import java.io.FileInputStream;
+import java.io.FileDescriptor;
+import java.io.IOException;
+
+/**
+ * Thumbnail generation routines for media provider.
+ */
+
+public class ThumbnailUtils {
+ private static final String TAG = "ThumbnailUtils";
+
+ /* Maximum pixels size for created bitmap. */
+ private static final int MAX_NUM_PIXELS_THUMBNAIL = 512 * 384;
+ private static final int MAX_NUM_PIXELS_MICRO_THUMBNAIL = 160 * 120;
+ private static final int UNCONSTRAINED = -1;
+
+ /* Options used internally. */
+ private static final int OPTIONS_NONE = 0x0;
+ private static final int OPTIONS_SCALE_UP = 0x1;
+
+ /**
+ * Constant used to indicate we should recycle the input in
+ * {@link #extractThumbnail(Bitmap, int, int, int)} unless the output is the input.
+ */
+ public static final int OPTIONS_RECYCLE_INPUT = 0x2;
+
+ /**
+ * Constant used to indicate the dimension of mini thumbnail.
+ * @hide Only used by media framework and media provider internally.
+ */
+ public static final int TARGET_SIZE_MINI_THUMBNAIL = 320;
+
+ /**
+ * Constant used to indicate the dimension of micro thumbnail.
+ * @hide Only used by media framework and media provider internally.
+ */
+ public static final int TARGET_SIZE_MICRO_THUMBNAIL = 96;
+
+ /**
+ * This method first examines if the thumbnail embedded in EXIF is bigger than our target
+ * size. If not, then it'll create a thumbnail from original image. Due to efficiency
+ * consideration, we want to let MediaThumbRequest avoid calling this method twice for
+ * both kinds, so it only requests for MICRO_KIND and set saveImage to true.
+ *
+ * This method always returns a "square thumbnail" for MICRO_KIND thumbnail.
+ *
+ * @param filePath the path of image file
+ * @param kind could be MINI_KIND or MICRO_KIND
+ * @return Bitmap, or null on failures
+ *
+ * @hide This method is only used by media framework and media provider internally.
+ */
+ public static Bitmap createImageThumbnail(String filePath, int kind) {
+ boolean wantMini = (kind == Images.Thumbnails.MINI_KIND);
+ int targetSize = wantMini
+ ? TARGET_SIZE_MINI_THUMBNAIL
+ : TARGET_SIZE_MICRO_THUMBNAIL;
+ int maxPixels = wantMini
+ ? MAX_NUM_PIXELS_THUMBNAIL
+ : MAX_NUM_PIXELS_MICRO_THUMBNAIL;
+ SizedThumbnailBitmap sizedThumbnailBitmap = new SizedThumbnailBitmap();
+ Bitmap bitmap = null;
+ MediaFileType fileType = MediaFile.getFileType(filePath);
+ if (fileType != null && (fileType.fileType == MediaFile.FILE_TYPE_JPEG
+ || MediaFile.isRawImageFileType(fileType.fileType))) {
+ createThumbnailFromEXIF(filePath, targetSize, maxPixels, sizedThumbnailBitmap);
+ bitmap = sizedThumbnailBitmap.mBitmap;
+ }
+
+ if (bitmap == null) {
+ FileInputStream stream = null;
+ try {
+ stream = new FileInputStream(filePath);
+ FileDescriptor fd = stream.getFD();
+ BitmapFactory.Options options = new BitmapFactory.Options();
+ options.inSampleSize = 1;
+ options.inJustDecodeBounds = true;
+ BitmapFactory.decodeFileDescriptor(fd, null, options);
+ if (options.mCancel || options.outWidth == -1
+ || options.outHeight == -1) {
+ return null;
+ }
+ options.inSampleSize = computeSampleSize(
+ options, targetSize, maxPixels);
+ options.inJustDecodeBounds = false;
+
+ options.inDither = false;
+ options.inPreferredConfig = Bitmap.Config.ARGB_8888;
+ bitmap = BitmapFactory.decodeFileDescriptor(fd, null, options);
+ } catch (IOException ex) {
+ Log.e(TAG, "", ex);
+ } catch (OutOfMemoryError oom) {
+ Log.e(TAG, "Unable to decode file " + filePath + ". OutOfMemoryError.", oom);
+ } finally {
+ try {
+ if (stream != null) {
+ stream.close();
+ }
+ } catch (IOException ex) {
+ Log.e(TAG, "", ex);
+ }
+ }
+
+ }
+
+ if (kind == Images.Thumbnails.MICRO_KIND) {
+ // now we make it a "square thumbnail" for MICRO_KIND thumbnail
+ bitmap = extractThumbnail(bitmap,
+ TARGET_SIZE_MICRO_THUMBNAIL,
+ TARGET_SIZE_MICRO_THUMBNAIL, OPTIONS_RECYCLE_INPUT);
+ }
+ return bitmap;
+ }
+
+ /**
+ * Create a video thumbnail for a video. May return null if the video is
+ * corrupt or the format is not supported.
+ *
+ * @param filePath the path of video file
+ * @param kind could be MINI_KIND or MICRO_KIND
+ */
+ public static Bitmap createVideoThumbnail(String filePath, int kind) {
+ Bitmap bitmap = null;
+ MediaMetadataRetriever retriever = new MediaMetadataRetriever();
+ try {
+ retriever.setDataSource(filePath);
+ bitmap = retriever.getFrameAtTime(-1);
+ } catch (IllegalArgumentException ex) {
+ // Assume this is a corrupt video file
+ } catch (RuntimeException ex) {
+ // Assume this is a corrupt video file.
+ } finally {
+ try {
+ retriever.release();
+ } catch (RuntimeException ex) {
+ // Ignore failures while cleaning up.
+ }
+ }
+
+ if (bitmap == null) return null;
+
+ if (kind == Images.Thumbnails.MINI_KIND) {
+ // Scale down the bitmap if it's too large.
+ int width = bitmap.getWidth();
+ int height = bitmap.getHeight();
+ int max = Math.max(width, height);
+ if (max > 512) {
+ float scale = 512f / max;
+ int w = Math.round(scale * width);
+ int h = Math.round(scale * height);
+ bitmap = Bitmap.createScaledBitmap(bitmap, w, h, true);
+ }
+ } else if (kind == Images.Thumbnails.MICRO_KIND) {
+ bitmap = extractThumbnail(bitmap,
+ TARGET_SIZE_MICRO_THUMBNAIL,
+ TARGET_SIZE_MICRO_THUMBNAIL,
+ OPTIONS_RECYCLE_INPUT);
+ }
+ return bitmap;
+ }
+
+ /**
+ * Creates a centered bitmap of the desired size.
+ *
+ * @param source original bitmap source
+ * @param width targeted width
+ * @param height targeted height
+ */
+ public static Bitmap extractThumbnail(
+ Bitmap source, int width, int height) {
+ return extractThumbnail(source, width, height, OPTIONS_NONE);
+ }
+
+ /**
+ * Creates a centered bitmap of the desired size.
+ *
+ * @param source original bitmap source
+ * @param width targeted width
+ * @param height targeted height
+ * @param options options used during thumbnail extraction
+ */
+ public static Bitmap extractThumbnail(
+ Bitmap source, int width, int height, int options) {
+ if (source == null) {
+ return null;
+ }
+
+ float scale;
+ if (source.getWidth() < source.getHeight()) {
+ scale = width / (float) source.getWidth();
+ } else {
+ scale = height / (float) source.getHeight();
+ }
+ Matrix matrix = new Matrix();
+ matrix.setScale(scale, scale);
+ Bitmap thumbnail = transform(matrix, source, width, height,
+ OPTIONS_SCALE_UP | options);
+ return thumbnail;
+ }
+
+ /*
+ * Compute the sample size as a function of minSideLength
+ * and maxNumOfPixels.
+ * minSideLength is used to specify that minimal width or height of a
+ * bitmap.
+ * maxNumOfPixels is used to specify the maximal size in pixels that is
+ * tolerable in terms of memory usage.
+ *
+ * The function returns a sample size based on the constraints.
+ * Both size and minSideLength can be passed in as IImage.UNCONSTRAINED,
+ * which indicates no care of the corresponding constraint.
+ * The functions prefers returning a sample size that
+ * generates a smaller bitmap, unless minSideLength = IImage.UNCONSTRAINED.
+ *
+ * Also, the function rounds up the sample size to a power of 2 or multiple
+ * of 8 because BitmapFactory only honors sample size this way.
+ * For example, BitmapFactory downsamples an image by 2 even though the
+ * request is 3. So we round up the sample size to avoid OOM.
+ */
+ private static int computeSampleSize(BitmapFactory.Options options,
+ int minSideLength, int maxNumOfPixels) {
+ int initialSize = computeInitialSampleSize(options, minSideLength,
+ maxNumOfPixels);
+
+ int roundedSize;
+ if (initialSize <= 8 ) {
+ roundedSize = 1;
+ while (roundedSize < initialSize) {
+ roundedSize <<= 1;
+ }
+ } else {
+ roundedSize = (initialSize + 7) / 8 * 8;
+ }
+
+ return roundedSize;
+ }
+
+ private static int computeInitialSampleSize(BitmapFactory.Options options,
+ int minSideLength, int maxNumOfPixels) {
+ double w = options.outWidth;
+ double h = options.outHeight;
+
+ int lowerBound = (maxNumOfPixels == UNCONSTRAINED) ? 1 :
+ (int) Math.ceil(Math.sqrt(w * h / maxNumOfPixels));
+ int upperBound = (minSideLength == UNCONSTRAINED) ? 128 :
+ (int) Math.min(Math.floor(w / minSideLength),
+ Math.floor(h / minSideLength));
+
+ if (upperBound < lowerBound) {
+ // return the larger one when there is no overlapping zone.
+ return lowerBound;
+ }
+
+ if ((maxNumOfPixels == UNCONSTRAINED) &&
+ (minSideLength == UNCONSTRAINED)) {
+ return 1;
+ } else if (minSideLength == UNCONSTRAINED) {
+ return lowerBound;
+ } else {
+ return upperBound;
+ }
+ }
+
+ /**
+ * Make a bitmap from a given Uri, minimal side length, and maximum number of pixels.
+ * The image data will be read from specified pfd if it's not null, otherwise
+ * a new input stream will be created using specified ContentResolver.
+ *
+ * Clients are allowed to pass their own BitmapFactory.Options used for bitmap decoding. A
+ * new BitmapFactory.Options will be created if options is null.
+ */
+ private static Bitmap makeBitmap(int minSideLength, int maxNumOfPixels,
+ Uri uri, ContentResolver cr, ParcelFileDescriptor pfd,
+ BitmapFactory.Options options) {
+ Bitmap b = null;
+ try {
+ if (pfd == null) pfd = makeInputStream(uri, cr);
+ if (pfd == null) return null;
+ if (options == null) options = new BitmapFactory.Options();
+
+ FileDescriptor fd = pfd.getFileDescriptor();
+ options.inSampleSize = 1;
+ options.inJustDecodeBounds = true;
+ BitmapFactory.decodeFileDescriptor(fd, null, options);
+ if (options.mCancel || options.outWidth == -1
+ || options.outHeight == -1) {
+ return null;
+ }
+ options.inSampleSize = computeSampleSize(
+ options, minSideLength, maxNumOfPixels);
+ options.inJustDecodeBounds = false;
+
+ options.inDither = false;
+ options.inPreferredConfig = Bitmap.Config.ARGB_8888;
+ b = BitmapFactory.decodeFileDescriptor(fd, null, options);
+ } catch (OutOfMemoryError ex) {
+ Log.e(TAG, "Got oom exception ", ex);
+ return null;
+ } finally {
+ closeSilently(pfd);
+ }
+ return b;
+ }
+
+ private static void closeSilently(ParcelFileDescriptor c) {
+ if (c == null) return;
+ try {
+ c.close();
+ } catch (Throwable t) {
+ // do nothing
+ }
+ }
+
+ private static ParcelFileDescriptor makeInputStream(
+ Uri uri, ContentResolver cr) {
+ try {
+ return cr.openFileDescriptor(uri, "r");
+ } catch (IOException ex) {
+ return null;
+ }
+ }
+
+ /**
+ * Transform source Bitmap to targeted width and height.
+ */
+ private static Bitmap transform(Matrix scaler,
+ Bitmap source,
+ int targetWidth,
+ int targetHeight,
+ int options) {
+ boolean scaleUp = (options & OPTIONS_SCALE_UP) != 0;
+ boolean recycle = (options & OPTIONS_RECYCLE_INPUT) != 0;
+
+ int deltaX = source.getWidth() - targetWidth;
+ int deltaY = source.getHeight() - targetHeight;
+ if (!scaleUp && (deltaX < 0 || deltaY < 0)) {
+ /*
+ * In this case the bitmap is smaller, at least in one dimension,
+ * than the target. Transform it by placing as much of the image
+ * as possible into the target and leaving the top/bottom or
+ * left/right (or both) black.
+ */
+ Bitmap b2 = Bitmap.createBitmap(targetWidth, targetHeight,
+ Bitmap.Config.ARGB_8888);
+ Canvas c = new Canvas(b2);
+
+ int deltaXHalf = Math.max(0, deltaX / 2);
+ int deltaYHalf = Math.max(0, deltaY / 2);
+ Rect src = new Rect(
+ deltaXHalf,
+ deltaYHalf,
+ deltaXHalf + Math.min(targetWidth, source.getWidth()),
+ deltaYHalf + Math.min(targetHeight, source.getHeight()));
+ int dstX = (targetWidth - src.width()) / 2;
+ int dstY = (targetHeight - src.height()) / 2;
+ Rect dst = new Rect(
+ dstX,
+ dstY,
+ targetWidth - dstX,
+ targetHeight - dstY);
+ c.drawBitmap(source, src, dst, null);
+ if (recycle) {
+ source.recycle();
+ }
+ c.setBitmap(null);
+ return b2;
+ }
+ float bitmapWidthF = source.getWidth();
+ float bitmapHeightF = source.getHeight();
+
+ float bitmapAspect = bitmapWidthF / bitmapHeightF;
+ float viewAspect = (float) targetWidth / targetHeight;
+
+ if (bitmapAspect > viewAspect) {
+ float scale = targetHeight / bitmapHeightF;
+ if (scale < .9F || scale > 1F) {
+ scaler.setScale(scale, scale);
+ } else {
+ scaler = null;
+ }
+ } else {
+ float scale = targetWidth / bitmapWidthF;
+ if (scale < .9F || scale > 1F) {
+ scaler.setScale(scale, scale);
+ } else {
+ scaler = null;
+ }
+ }
+
+ Bitmap b1;
+ if (scaler != null) {
+ // this is used for minithumb and crop, so we want to filter here.
+ b1 = Bitmap.createBitmap(source, 0, 0,
+ source.getWidth(), source.getHeight(), scaler, true);
+ } else {
+ b1 = source;
+ }
+
+ if (recycle && b1 != source) {
+ source.recycle();
+ }
+
+ int dx1 = Math.max(0, b1.getWidth() - targetWidth);
+ int dy1 = Math.max(0, b1.getHeight() - targetHeight);
+
+ Bitmap b2 = Bitmap.createBitmap(
+ b1,
+ dx1 / 2,
+ dy1 / 2,
+ targetWidth,
+ targetHeight);
+
+ if (b2 != b1) {
+ if (recycle || b1 != source) {
+ b1.recycle();
+ }
+ }
+
+ return b2;
+ }
+
+ /**
+ * SizedThumbnailBitmap contains the bitmap, which is downsampled either from
+ * the thumbnail in exif or the full image.
+ * mThumbnailData, mThumbnailWidth and mThumbnailHeight are set together only if mThumbnail
+ * is not null.
+ *
+ * The width/height of the sized bitmap may be different from mThumbnailWidth/mThumbnailHeight.
+ */
+ private static class SizedThumbnailBitmap {
+ public byte[] mThumbnailData;
+ public Bitmap mBitmap;
+ public int mThumbnailWidth;
+ public int mThumbnailHeight;
+ }
+
+ /**
+ * Creates a bitmap by either downsampling from the thumbnail in EXIF or the full image.
+ * The functions returns a SizedThumbnailBitmap,
+ * which contains a downsampled bitmap and the thumbnail data in EXIF if exists.
+ */
+ private static void createThumbnailFromEXIF(String filePath, int targetSize,
+ int maxPixels, SizedThumbnailBitmap sizedThumbBitmap) {
+ if (filePath == null) return;
+
+ ExifInterface exif = null;
+ byte [] thumbData = null;
+ try {
+ exif = new ExifInterface(filePath);
+ thumbData = exif.getThumbnail();
+ } catch (IOException ex) {
+ Log.w(TAG, ex);
+ }
+
+ BitmapFactory.Options fullOptions = new BitmapFactory.Options();
+ BitmapFactory.Options exifOptions = new BitmapFactory.Options();
+ int exifThumbWidth = 0;
+ int fullThumbWidth = 0;
+
+ // Compute exifThumbWidth.
+ if (thumbData != null) {
+ exifOptions.inJustDecodeBounds = true;
+ BitmapFactory.decodeByteArray(thumbData, 0, thumbData.length, exifOptions);
+ exifOptions.inSampleSize = computeSampleSize(exifOptions, targetSize, maxPixels);
+ exifThumbWidth = exifOptions.outWidth / exifOptions.inSampleSize;
+ }
+
+ // Compute fullThumbWidth.
+ fullOptions.inJustDecodeBounds = true;
+ BitmapFactory.decodeFile(filePath, fullOptions);
+ fullOptions.inSampleSize = computeSampleSize(fullOptions, targetSize, maxPixels);
+ fullThumbWidth = fullOptions.outWidth / fullOptions.inSampleSize;
+
+ // Choose the larger thumbnail as the returning sizedThumbBitmap.
+ if (thumbData != null && exifThumbWidth >= fullThumbWidth) {
+ int width = exifOptions.outWidth;
+ int height = exifOptions.outHeight;
+ exifOptions.inJustDecodeBounds = false;
+ sizedThumbBitmap.mBitmap = BitmapFactory.decodeByteArray(thumbData, 0,
+ thumbData.length, exifOptions);
+ if (sizedThumbBitmap.mBitmap != null) {
+ sizedThumbBitmap.mThumbnailData = thumbData;
+ sizedThumbBitmap.mThumbnailWidth = width;
+ sizedThumbBitmap.mThumbnailHeight = height;
+ }
+ } else {
+ fullOptions.inJustDecodeBounds = false;
+ sizedThumbBitmap.mBitmap = BitmapFactory.decodeFile(filePath, fullOptions);
+ }
+ }
+}
diff --git a/android/media/TimedMetaData.java b/android/media/TimedMetaData.java
new file mode 100644
index 00000000..0ab52d73
--- /dev/null
+++ b/android/media/TimedMetaData.java
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+
+/**
+ * Class that embodies one timed metadata access unit, including
+ *
+ * <ul>
+ * <li> a time stamp, and </li>
+ * <li> raw uninterpreted byte-array extracted directly from the container. </li>
+ * </ul>
+ *
+ * @see MediaPlayer#setOnTimedMetaDataAvailableListener(android.media.MediaPlayer.OnTimedMetaDataListener)
+ */
+public final class TimedMetaData {
+ private static final String TAG = "TimedMetaData";
+
+ private long mTimestampUs;
+ private byte[] mMetaData;
+
+ /**
+ * @hide
+ */
+ static TimedMetaData createTimedMetaDataFromParcel(Parcel parcel) {
+ return new TimedMetaData(parcel);
+ }
+
+ private TimedMetaData(Parcel parcel) {
+ if (!parseParcel(parcel)) {
+ throw new IllegalArgumentException("parseParcel() fails");
+ }
+ }
+
+ /**
+ * @return the timestamp associated with this metadata access unit in microseconds;
+ * 0 denotes playback start.
+ */
+ public long getTimestamp() {
+ return mTimestampUs;
+ }
+
+ /**
+ * @return raw, uninterpreted content of this metadata access unit; for ID3 tags this includes
+ * everything starting from the 3 byte signature "ID3".
+ */
+ public byte[] getMetaData() {
+ return mMetaData;
+ }
+
+ private boolean parseParcel(Parcel parcel) {
+ parcel.setDataPosition(0);
+ if (parcel.dataAvail() == 0) {
+ return false;
+ }
+
+ mTimestampUs = parcel.readLong();
+ mMetaData = new byte[parcel.readInt()];
+ parcel.readByteArray(mMetaData);
+
+ return true;
+ }
+}
diff --git a/android/media/TimedText.java b/android/media/TimedText.java
new file mode 100644
index 00000000..e6a7e139
--- /dev/null
+++ b/android/media/TimedText.java
@@ -0,0 +1,734 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.graphics.Rect;
+import android.os.Parcel;
+import android.util.Log;
+import java.util.HashMap;
+import java.util.Set;
+import java.util.List;
+import java.util.ArrayList;
+
+/**
+ * Class to hold the timed text's metadata, including:
+ * <ul>
+ * <li> The characters for rendering</li>
+ * <li> The rendering position for the timed text</li>
+ * </ul>
+ *
+ * <p> To render the timed text, applications need to do the following:
+ *
+ * <ul>
+ * <li> Implement the {@link MediaPlayer.OnTimedTextListener} interface</li>
+ * <li> Register the {@link MediaPlayer.OnTimedTextListener} callback on a MediaPlayer object that is used for playback</li>
+ * <li> When a onTimedText callback is received, do the following:
+ * <ul>
+ * <li> call {@link #getText} to get the characters for rendering</li>
+ * <li> call {@link #getBounds} to get the text rendering area/region</li>
+ * </ul>
+ * </li>
+ * </ul>
+ *
+ * @see android.media.MediaPlayer
+ */
+public final class TimedText
+{
+ private static final int FIRST_PUBLIC_KEY = 1;
+
+ // These keys must be in sync with the keys in TextDescription.h
+ private static final int KEY_DISPLAY_FLAGS = 1; // int
+ private static final int KEY_STYLE_FLAGS = 2; // int
+ private static final int KEY_BACKGROUND_COLOR_RGBA = 3; // int
+ private static final int KEY_HIGHLIGHT_COLOR_RGBA = 4; // int
+ private static final int KEY_SCROLL_DELAY = 5; // int
+ private static final int KEY_WRAP_TEXT = 6; // int
+ private static final int KEY_START_TIME = 7; // int
+ private static final int KEY_STRUCT_BLINKING_TEXT_LIST = 8; // List<CharPos>
+ private static final int KEY_STRUCT_FONT_LIST = 9; // List<Font>
+ private static final int KEY_STRUCT_HIGHLIGHT_LIST = 10; // List<CharPos>
+ private static final int KEY_STRUCT_HYPER_TEXT_LIST = 11; // List<HyperText>
+ private static final int KEY_STRUCT_KARAOKE_LIST = 12; // List<Karaoke>
+ private static final int KEY_STRUCT_STYLE_LIST = 13; // List<Style>
+ private static final int KEY_STRUCT_TEXT_POS = 14; // TextPos
+ private static final int KEY_STRUCT_JUSTIFICATION = 15; // Justification
+ private static final int KEY_STRUCT_TEXT = 16; // Text
+
+ private static final int LAST_PUBLIC_KEY = 16;
+
+ private static final int FIRST_PRIVATE_KEY = 101;
+
+ // The following keys are used between TimedText.java and
+ // TextDescription.cpp in order to parce the Parcel.
+ private static final int KEY_GLOBAL_SETTING = 101;
+ private static final int KEY_LOCAL_SETTING = 102;
+ private static final int KEY_START_CHAR = 103;
+ private static final int KEY_END_CHAR = 104;
+ private static final int KEY_FONT_ID = 105;
+ private static final int KEY_FONT_SIZE = 106;
+ private static final int KEY_TEXT_COLOR_RGBA = 107;
+
+ private static final int LAST_PRIVATE_KEY = 107;
+
+ private static final String TAG = "TimedText";
+
+ private final HashMap<Integer, Object> mKeyObjectMap =
+ new HashMap<Integer, Object>();
+
+ private int mDisplayFlags = -1;
+ private int mBackgroundColorRGBA = -1;
+ private int mHighlightColorRGBA = -1;
+ private int mScrollDelay = -1;
+ private int mWrapText = -1;
+
+ private List<CharPos> mBlinkingPosList = null;
+ private List<CharPos> mHighlightPosList = null;
+ private List<Karaoke> mKaraokeList = null;
+ private List<Font> mFontList = null;
+ private List<Style> mStyleList = null;
+ private List<HyperText> mHyperTextList = null;
+
+ private Rect mTextBounds = null;
+ private String mTextChars = null;
+
+ private Justification mJustification;
+
+ /**
+ * Helper class to hold the start char offset and end char offset
+ * for Blinking Text or Highlight Text. endChar is the end offset
+ * of the text (startChar + number of characters to be highlighted
+ * or blinked). The member variables in this class are read-only.
+ * {@hide}
+ */
+ public static final class CharPos {
+ /**
+ * The offset of the start character
+ */
+ public final int startChar;
+
+ /**
+ * The offset of the end character
+ */
+ public final int endChar;
+
+ /**
+ * Constuctor
+ * @param startChar the offset of the start character.
+ * @param endChar the offset of the end character.
+ */
+ public CharPos(int startChar, int endChar) {
+ this.startChar = startChar;
+ this.endChar = endChar;
+ }
+ }
+
+ /**
+ * Helper class to hold the justification for text display in the text box.
+ * The member variables in this class are read-only.
+ * {@hide}
+ */
+ public static final class Justification {
+ /**
+ * horizontal justification 0: left, 1: centered, -1: right
+ */
+ public final int horizontalJustification;
+
+ /**
+ * vertical justification 0: top, 1: centered, -1: bottom
+ */
+ public final int verticalJustification;
+
+ /**
+ * Constructor
+ * @param horizontal the horizontal justification of the text.
+ * @param vertical the vertical justification of the text.
+ */
+ public Justification(int horizontal, int vertical) {
+ this.horizontalJustification = horizontal;
+ this.verticalJustification = vertical;
+ }
+ }
+
+ /**
+ * Helper class to hold the style information to display the text.
+ * The member variables in this class are read-only.
+ * {@hide}
+ */
+ public static final class Style {
+ /**
+ * The offset of the start character which applys this style
+ */
+ public final int startChar;
+
+ /**
+ * The offset of the end character which applys this style
+ */
+ public final int endChar;
+
+ /**
+ * ID of the font. This ID will be used to choose the font
+ * to be used from the font list.
+ */
+ public final int fontID;
+
+ /**
+ * True if the characters should be bold
+ */
+ public final boolean isBold;
+
+ /**
+ * True if the characters should be italic
+ */
+ public final boolean isItalic;
+
+ /**
+ * True if the characters should be underlined
+ */
+ public final boolean isUnderlined;
+
+ /**
+ * The size of the font
+ */
+ public final int fontSize;
+
+ /**
+ * To specify the RGBA color: 8 bits each of red, green, blue,
+ * and an alpha(transparency) value
+ */
+ public final int colorRGBA;
+
+ /**
+ * Constructor
+ * @param startChar the offset of the start character which applys this style
+ * @param endChar the offset of the end character which applys this style
+ * @param fontId the ID of the font.
+ * @param isBold whether the characters should be bold.
+ * @param isItalic whether the characters should be italic.
+ * @param isUnderlined whether the characters should be underlined.
+ * @param fontSize the size of the font.
+ * @param colorRGBA red, green, blue, and alpha value for color.
+ */
+ public Style(int startChar, int endChar, int fontId,
+ boolean isBold, boolean isItalic, boolean isUnderlined,
+ int fontSize, int colorRGBA) {
+ this.startChar = startChar;
+ this.endChar = endChar;
+ this.fontID = fontId;
+ this.isBold = isBold;
+ this.isItalic = isItalic;
+ this.isUnderlined = isUnderlined;
+ this.fontSize = fontSize;
+ this.colorRGBA = colorRGBA;
+ }
+ }
+
+ /**
+ * Helper class to hold the font ID and name.
+ * The member variables in this class are read-only.
+ * {@hide}
+ */
+ public static final class Font {
+ /**
+ * The font ID
+ */
+ public final int ID;
+
+ /**
+ * The font name
+ */
+ public final String name;
+
+ /**
+ * Constructor
+ * @param id the font ID.
+ * @param name the font name.
+ */
+ public Font(int id, String name) {
+ this.ID = id;
+ this.name = name;
+ }
+ }
+
+ /**
+ * Helper class to hold the karaoke information.
+ * The member variables in this class are read-only.
+ * {@hide}
+ */
+ public static final class Karaoke {
+ /**
+ * The start time (in milliseconds) to highlight the characters
+ * specified by startChar and endChar.
+ */
+ public final int startTimeMs;
+
+ /**
+ * The end time (in milliseconds) to highlight the characters
+ * specified by startChar and endChar.
+ */
+ public final int endTimeMs;
+
+ /**
+ * The offset of the start character to be highlighted
+ */
+ public final int startChar;
+
+ /**
+ * The offset of the end character to be highlighted
+ */
+ public final int endChar;
+
+ /**
+ * Constructor
+ * @param startTimeMs the start time (in milliseconds) to highlight
+ * the characters between startChar and endChar.
+ * @param endTimeMs the end time (in milliseconds) to highlight
+ * the characters between startChar and endChar.
+ * @param startChar the offset of the start character to be highlighted.
+ * @param endChar the offset of the end character to be highlighted.
+ */
+ public Karaoke(int startTimeMs, int endTimeMs, int startChar, int endChar) {
+ this.startTimeMs = startTimeMs;
+ this.endTimeMs = endTimeMs;
+ this.startChar = startChar;
+ this.endChar = endChar;
+ }
+ }
+
+ /**
+ * Helper class to hold the hyper text information.
+ * The member variables in this class are read-only.
+ * {@hide}
+ */
+ public static final class HyperText {
+ /**
+ * The offset of the start character
+ */
+ public final int startChar;
+
+ /**
+ * The offset of the end character
+ */
+ public final int endChar;
+
+ /**
+ * The linked-to URL
+ */
+ public final String URL;
+
+ /**
+ * The "alt" string for user display
+ */
+ public final String altString;
+
+
+ /**
+ * Constructor
+ * @param startChar the offset of the start character.
+ * @param endChar the offset of the end character.
+ * @param url the linked-to URL.
+ * @param alt the "alt" string for display.
+ */
+ public HyperText(int startChar, int endChar, String url, String alt) {
+ this.startChar = startChar;
+ this.endChar = endChar;
+ this.URL = url;
+ this.altString = alt;
+ }
+ }
+
+ /**
+ * @param obj the byte array which contains the timed text.
+ * @throws IllegalArgumentExcept if parseParcel() fails.
+ * {@hide}
+ */
+ public TimedText(Parcel parcel) {
+ if (!parseParcel(parcel)) {
+ mKeyObjectMap.clear();
+ throw new IllegalArgumentException("parseParcel() fails");
+ }
+ }
+
+ /**
+ * Get the characters in the timed text.
+ *
+ * @return the characters as a String object in the TimedText. Applications
+ * should stop rendering previous timed text at the current rendering region if
+ * a null is returned, until the next non-null timed text is received.
+ */
+ public String getText() {
+ return mTextChars;
+ }
+
+ /**
+ * Get the rectangle area or region for rendering the timed text as specified
+ * by a Rect object.
+ *
+ * @return the rectangle region to render the characters in the timed text.
+ * If no bounds information is available (a null is returned), render the
+ * timed text at the center bottom of the display.
+ */
+ public Rect getBounds() {
+ return mTextBounds;
+ }
+
+ /*
+ * Go over all the records, collecting metadata keys and fields in the
+ * Parcel. These are stored in mKeyObjectMap for application to retrieve.
+ * @return false if an error occurred during parsing. Otherwise, true.
+ */
+ private boolean parseParcel(Parcel parcel) {
+ parcel.setDataPosition(0);
+ if (parcel.dataAvail() == 0) {
+ return false;
+ }
+
+ int type = parcel.readInt();
+ if (type == KEY_LOCAL_SETTING) {
+ type = parcel.readInt();
+ if (type != KEY_START_TIME) {
+ return false;
+ }
+ int mStartTimeMs = parcel.readInt();
+ mKeyObjectMap.put(type, mStartTimeMs);
+
+ type = parcel.readInt();
+ if (type != KEY_STRUCT_TEXT) {
+ return false;
+ }
+
+ int textLen = parcel.readInt();
+ byte[] text = parcel.createByteArray();
+ if (text == null || text.length == 0) {
+ mTextChars = null;
+ } else {
+ mTextChars = new String(text);
+ }
+
+ } else if (type != KEY_GLOBAL_SETTING) {
+ Log.w(TAG, "Invalid timed text key found: " + type);
+ return false;
+ }
+
+ while (parcel.dataAvail() > 0) {
+ int key = parcel.readInt();
+ if (!isValidKey(key)) {
+ Log.w(TAG, "Invalid timed text key found: " + key);
+ return false;
+ }
+
+ Object object = null;
+
+ switch (key) {
+ case KEY_STRUCT_STYLE_LIST: {
+ readStyle(parcel);
+ object = mStyleList;
+ break;
+ }
+ case KEY_STRUCT_FONT_LIST: {
+ readFont(parcel);
+ object = mFontList;
+ break;
+ }
+ case KEY_STRUCT_HIGHLIGHT_LIST: {
+ readHighlight(parcel);
+ object = mHighlightPosList;
+ break;
+ }
+ case KEY_STRUCT_KARAOKE_LIST: {
+ readKaraoke(parcel);
+ object = mKaraokeList;
+ break;
+ }
+ case KEY_STRUCT_HYPER_TEXT_LIST: {
+ readHyperText(parcel);
+ object = mHyperTextList;
+
+ break;
+ }
+ case KEY_STRUCT_BLINKING_TEXT_LIST: {
+ readBlinkingText(parcel);
+ object = mBlinkingPosList;
+
+ break;
+ }
+ case KEY_WRAP_TEXT: {
+ mWrapText = parcel.readInt();
+ object = mWrapText;
+ break;
+ }
+ case KEY_HIGHLIGHT_COLOR_RGBA: {
+ mHighlightColorRGBA = parcel.readInt();
+ object = mHighlightColorRGBA;
+ break;
+ }
+ case KEY_DISPLAY_FLAGS: {
+ mDisplayFlags = parcel.readInt();
+ object = mDisplayFlags;
+ break;
+ }
+ case KEY_STRUCT_JUSTIFICATION: {
+
+ int horizontal = parcel.readInt();
+ int vertical = parcel.readInt();
+ mJustification = new Justification(horizontal, vertical);
+
+ object = mJustification;
+ break;
+ }
+ case KEY_BACKGROUND_COLOR_RGBA: {
+ mBackgroundColorRGBA = parcel.readInt();
+ object = mBackgroundColorRGBA;
+ break;
+ }
+ case KEY_STRUCT_TEXT_POS: {
+ int top = parcel.readInt();
+ int left = parcel.readInt();
+ int bottom = parcel.readInt();
+ int right = parcel.readInt();
+ mTextBounds = new Rect(left, top, right, bottom);
+
+ break;
+ }
+ case KEY_SCROLL_DELAY: {
+ mScrollDelay = parcel.readInt();
+ object = mScrollDelay;
+ break;
+ }
+ default: {
+ break;
+ }
+ }
+
+ if (object != null) {
+ if (mKeyObjectMap.containsKey(key)) {
+ mKeyObjectMap.remove(key);
+ }
+ // Previous mapping will be replaced with the new object, if there was one.
+ mKeyObjectMap.put(key, object);
+ }
+ }
+
+ return true;
+ }
+
+ /*
+ * To parse and store the Style list.
+ */
+ private void readStyle(Parcel parcel) {
+ boolean endOfStyle = false;
+ int startChar = -1;
+ int endChar = -1;
+ int fontId = -1;
+ boolean isBold = false;
+ boolean isItalic = false;
+ boolean isUnderlined = false;
+ int fontSize = -1;
+ int colorRGBA = -1;
+ while (!endOfStyle && (parcel.dataAvail() > 0)) {
+ int key = parcel.readInt();
+ switch (key) {
+ case KEY_START_CHAR: {
+ startChar = parcel.readInt();
+ break;
+ }
+ case KEY_END_CHAR: {
+ endChar = parcel.readInt();
+ break;
+ }
+ case KEY_FONT_ID: {
+ fontId = parcel.readInt();
+ break;
+ }
+ case KEY_STYLE_FLAGS: {
+ int flags = parcel.readInt();
+ // In the absence of any bits set in flags, the text
+ // is plain. Otherwise, 1: bold, 2: italic, 4: underline
+ isBold = ((flags % 2) == 1);
+ isItalic = ((flags % 4) >= 2);
+ isUnderlined = ((flags / 4) == 1);
+ break;
+ }
+ case KEY_FONT_SIZE: {
+ fontSize = parcel.readInt();
+ break;
+ }
+ case KEY_TEXT_COLOR_RGBA: {
+ colorRGBA = parcel.readInt();
+ break;
+ }
+ default: {
+ // End of the Style parsing. Reset the data position back
+ // to the position before the last parcel.readInt() call.
+ parcel.setDataPosition(parcel.dataPosition() - 4);
+ endOfStyle = true;
+ break;
+ }
+ }
+ }
+
+ Style style = new Style(startChar, endChar, fontId, isBold,
+ isItalic, isUnderlined, fontSize, colorRGBA);
+ if (mStyleList == null) {
+ mStyleList = new ArrayList<Style>();
+ }
+ mStyleList.add(style);
+ }
+
+ /*
+ * To parse and store the Font list
+ */
+ private void readFont(Parcel parcel) {
+ int entryCount = parcel.readInt();
+
+ for (int i = 0; i < entryCount; i++) {
+ int id = parcel.readInt();
+ int nameLen = parcel.readInt();
+
+ byte[] text = parcel.createByteArray();
+ final String name = new String(text, 0, nameLen);
+
+ Font font = new Font(id, name);
+
+ if (mFontList == null) {
+ mFontList = new ArrayList<Font>();
+ }
+ mFontList.add(font);
+ }
+ }
+
+ /*
+ * To parse and store the Highlight list
+ */
+ private void readHighlight(Parcel parcel) {
+ int startChar = parcel.readInt();
+ int endChar = parcel.readInt();
+ CharPos pos = new CharPos(startChar, endChar);
+
+ if (mHighlightPosList == null) {
+ mHighlightPosList = new ArrayList<CharPos>();
+ }
+ mHighlightPosList.add(pos);
+ }
+
+ /*
+ * To parse and store the Karaoke list
+ */
+ private void readKaraoke(Parcel parcel) {
+ int entryCount = parcel.readInt();
+
+ for (int i = 0; i < entryCount; i++) {
+ int startTimeMs = parcel.readInt();
+ int endTimeMs = parcel.readInt();
+ int startChar = parcel.readInt();
+ int endChar = parcel.readInt();
+ Karaoke kara = new Karaoke(startTimeMs, endTimeMs,
+ startChar, endChar);
+
+ if (mKaraokeList == null) {
+ mKaraokeList = new ArrayList<Karaoke>();
+ }
+ mKaraokeList.add(kara);
+ }
+ }
+
+ /*
+ * To parse and store HyperText list
+ */
+ private void readHyperText(Parcel parcel) {
+ int startChar = parcel.readInt();
+ int endChar = parcel.readInt();
+
+ int len = parcel.readInt();
+ byte[] url = parcel.createByteArray();
+ final String urlString = new String(url, 0, len);
+
+ len = parcel.readInt();
+ byte[] alt = parcel.createByteArray();
+ final String altString = new String(alt, 0, len);
+ HyperText hyperText = new HyperText(startChar, endChar, urlString, altString);
+
+
+ if (mHyperTextList == null) {
+ mHyperTextList = new ArrayList<HyperText>();
+ }
+ mHyperTextList.add(hyperText);
+ }
+
+ /*
+ * To parse and store blinking text list
+ */
+ private void readBlinkingText(Parcel parcel) {
+ int startChar = parcel.readInt();
+ int endChar = parcel.readInt();
+ CharPos blinkingPos = new CharPos(startChar, endChar);
+
+ if (mBlinkingPosList == null) {
+ mBlinkingPosList = new ArrayList<CharPos>();
+ }
+ mBlinkingPosList.add(blinkingPos);
+ }
+
+ /*
+ * To check whether the given key is valid.
+ * @param key the key to be checked.
+ * @return true if the key is a valid one. Otherwise, false.
+ */
+ private boolean isValidKey(final int key) {
+ if (!((key >= FIRST_PUBLIC_KEY) && (key <= LAST_PUBLIC_KEY))
+ && !((key >= FIRST_PRIVATE_KEY) && (key <= LAST_PRIVATE_KEY))) {
+ return false;
+ }
+ return true;
+ }
+
+ /*
+ * To check whether the given key is contained in this TimedText object.
+ * @param key the key to be checked.
+ * @return true if the key is contained in this TimedText object.
+ * Otherwise, false.
+ */
+ private boolean containsKey(final int key) {
+ if (isValidKey(key) && mKeyObjectMap.containsKey(key)) {
+ return true;
+ }
+ return false;
+ }
+
+ /*
+ * @return a set of the keys contained in this TimedText object.
+ */
+ private Set keySet() {
+ return mKeyObjectMap.keySet();
+ }
+
+ /*
+ * To retrieve the object associated with the key. Caller must make sure
+ * the key is present using the containsKey method otherwise a
+ * RuntimeException will occur.
+ * @param key the key used to retrieve the object.
+ * @return an object. The object could be 1) an instance of Integer; 2) a
+ * List of CharPos, Karaoke, Font, Style, and HyperText, or 3) an instance of
+ * Justification.
+ */
+ private Object getObject(final int key) {
+ if (containsKey(key)) {
+ return mKeyObjectMap.get(key);
+ } else {
+ throw new IllegalArgumentException("Invalid key: " + key);
+ }
+ }
+}
diff --git a/android/media/ToneGenerator.java b/android/media/ToneGenerator.java
new file mode 100644
index 00000000..4661226c
--- /dev/null
+++ b/android/media/ToneGenerator.java
@@ -0,0 +1,897 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+
+
+/**
+ * This class provides methods to play DTMF tones (ITU-T Recommendation Q.23),
+ * call supervisory tones (3GPP TS 22.001, CEPT) and proprietary tones (3GPP TS 31.111).
+ * Depending on call state and routing options, tones are mixed to the downlink audio
+ * or output to the speaker phone or headset.
+ * This API is not for generating tones over the uplink audio path.
+ */
+public class ToneGenerator
+{
+
+ /* Values for toneType parameter of ToneGenerator() constructor */
+ /*
+ * List of all available tones: These constants must be kept consistant with
+ * the enum in ToneGenerator C++ class. */
+
+ /**
+ * Default value for an unknown or unspecified tone.
+ * @hide
+ */
+ public static final int TONE_UNKNOWN = -1;
+
+ /**
+ * DTMF tone for key 0: 1336Hz, 941Hz, continuous</p>
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_0 = 0;
+ /**
+ * DTMF tone for key 1: 1209Hz, 697Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_1 = 1;
+ /**
+ * DTMF tone for key 2: 1336Hz, 697Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_2 = 2;
+ /**
+ * DTMF tone for key 3: 1477Hz, 697Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_3 = 3;
+ /**
+ * DTMF tone for key 4: 1209Hz, 770Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_4 = 4;
+ /**
+ * DTMF tone for key 5: 1336Hz, 770Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_5 = 5;
+ /**
+ * DTMF tone for key 6: 1477Hz, 770Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_6 = 6;
+ /**
+ * DTMF tone for key 7: 1209Hz, 852Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_7 = 7;
+ /**
+ * DTMF tone for key 8: 1336Hz, 852Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_8 = 8;
+ /**
+ * DTMF tone for key 9: 1477Hz, 852Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_9 = 9;
+ /**
+ * DTMF tone for key *: 1209Hz, 941Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_S = 10;
+ /**
+ * DTMF tone for key #: 1477Hz, 941Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_P = 11;
+ /**
+ * DTMF tone for key A: 1633Hz, 697Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_A = 12;
+ /**
+ * DTMF tone for key B: 1633Hz, 770Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_B = 13;
+ /**
+ * DTMF tone for key C: 1633Hz, 852Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_C = 14;
+ /**
+ * DTMF tone for key D: 1633Hz, 941Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_DTMF_D = 15;
+ /**
+ * Call supervisory tone, Dial tone:
+ * CEPT: 425Hz, continuous
+ * ANSI (IS-95): 350Hz+440Hz, continuous
+ * JAPAN: 400Hz, continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_DIAL = 16;
+ /**
+ * Call supervisory tone, Busy:
+ * CEPT: 425Hz, 500ms ON, 500ms OFF...
+ * ANSI (IS-95): 480Hz+620Hz, 500ms ON, 500ms OFF...
+ * JAPAN: 400Hz, 500ms ON, 500ms OFF...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_BUSY = 17;
+ /**
+ * Call supervisory tone, Congestion:
+ * CEPT, JAPAN: 425Hz, 200ms ON, 200ms OFF...
+ * ANSI (IS-95): 480Hz+620Hz, 250ms ON, 250ms OFF...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_CONGESTION = 18;
+ /**
+ * Call supervisory tone, Radio path acknowlegment :
+ * CEPT, ANSI: 425Hz, 200ms ON
+ * JAPAN: 400Hz, 1s ON, 2s OFF...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_RADIO_ACK = 19;
+ /**
+ * Call supervisory tone, Radio path not available: 425Hz, 200ms ON, 200 OFF 3 bursts
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_RADIO_NOTAVAIL = 20;
+ /**
+ * Call supervisory tone, Error/Special info: 950Hz+1400Hz+1800Hz, 330ms ON, 1s OFF...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_ERROR = 21;
+ /**
+ * Call supervisory tone, Call Waiting:
+ * CEPT, JAPAN: 425Hz, 200ms ON, 600ms OFF, 200ms ON, 3s OFF...
+ * ANSI (IS-95): 440 Hz, 300 ms ON, 9.7 s OFF,
+ * (100 ms ON, 100 ms OFF, 100 ms ON, 9.7s OFF ...)
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_CALL_WAITING = 22;
+ /**
+ * Call supervisory tone, Ring Tone:
+ * CEPT, JAPAN: 425Hz, 1s ON, 4s OFF...
+ * ANSI (IS-95): 440Hz + 480Hz, 2s ON, 4s OFF...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_RINGTONE = 23;
+ /**
+ * Proprietary tone, general beep: 400Hz+1200Hz, 35ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_PROP_BEEP = 24;
+ /**
+ * Proprietary tone, positive acknowlegement: 1200Hz, 100ms ON, 100ms OFF 2 bursts
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_PROP_ACK = 25;
+ /**
+ * Proprietary tone, negative acknowlegement: 300Hz+400Hz+500Hz, 400ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_PROP_NACK = 26;
+ /**
+ * Proprietary tone, prompt tone: 400Hz+1200Hz, 200ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_PROP_PROMPT = 27;
+ /**
+ * Proprietary tone, general double beep: twice 400Hz+1200Hz, 35ms ON, 200ms OFF, 35ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_PROP_BEEP2 = 28;
+ /**
+ * Call supervisory tone (IS-95), intercept tone: alternating 440 Hz and 620 Hz tones,
+ * each on for 250 ms
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_INTERCEPT = 29;
+ /**
+ * Call supervisory tone (IS-95), abbreviated intercept: intercept tone limited to 4 seconds
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_INTERCEPT_ABBREV = 30;
+ /**
+ * Call supervisory tone (IS-95), abbreviated congestion: congestion tone limited to 4 seconds
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_CONGESTION_ABBREV = 31;
+ /**
+ * Call supervisory tone (IS-95), confirm tone: a 350 Hz tone added to a 440 Hz tone
+ * repeated 3 times in a 100 ms on, 100 ms off cycle
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_CONFIRM = 32;
+ /**
+ * Call supervisory tone (IS-95), pip tone: four bursts of 480 Hz tone (0.1 s on, 0.1 s off).
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_SUP_PIP = 33;
+ /**
+ * CDMA Dial tone : 425Hz continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_DIAL_TONE_LITE = 34;
+ /**
+ * CDMA USA Ringback: 440Hz+480Hz 2s ON, 4000 OFF ...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_NETWORK_USA_RINGBACK = 35;
+ /**
+ * CDMA Intercept tone: 440Hz 250ms ON, 620Hz 250ms ON ...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_INTERCEPT = 36;
+ /**
+ * CDMA Abbr Intercept tone: 440Hz 250ms ON, 620Hz 250ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ABBR_INTERCEPT = 37;
+ /**
+ * CDMA Reorder tone: 480Hz+620Hz 250ms ON, 250ms OFF...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_REORDER = 38;
+ /**
+ *
+ * CDMA Abbr Reorder tone: 480Hz+620Hz 250ms ON, 250ms OFF repeated for 8 times
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ABBR_REORDER = 39;
+ /**
+ * CDMA Network Busy tone: 480Hz+620Hz 500ms ON, 500ms OFF continuous
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_NETWORK_BUSY = 40;
+ /**
+ * CDMA Confirm tone: 350Hz+440Hz 100ms ON, 100ms OFF repeated for 3 times
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CONFIRM = 41;
+ /**
+ *
+ * CDMA answer tone: silent tone - defintion Frequency 0, 0ms ON, 0ms OFF
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ANSWER = 42;
+ /**
+ *
+ * CDMA Network Callwaiting tone: 440Hz 300ms ON
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_NETWORK_CALLWAITING = 43;
+ /**
+ * CDMA PIP tone: 480Hz 100ms ON, 100ms OFF repeated for 4 times
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_PIP = 44;
+ /**
+ * ISDN Call Signal Normal tone: {2091Hz 32ms ON, 2556 64ms ON} 20 times,
+ * 2091 32ms ON, 2556 48ms ON, 4s OFF
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALL_SIGNAL_ISDN_NORMAL = 45;
+ /**
+ * ISDN Call Signal Intergroup tone: {2091Hz 32ms ON, 2556 64ms ON} 8 times,
+ * 2091Hz 32ms ON, 400ms OFF, {2091Hz 32ms ON, 2556Hz 64ms ON} times,
+ * 2091Hz 32ms ON, 4s OFF.
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALL_SIGNAL_ISDN_INTERGROUP = 46;
+ /**
+ * ISDN Call Signal SP PRI tone:{2091Hz 32ms ON, 2556 64ms ON} 4 times
+ * 2091Hz 16ms ON, 200ms OFF, {2091Hz 32ms ON, 2556Hz 64ms ON} 4 times,
+ * 2091Hz 16ms ON, 200ms OFF
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALL_SIGNAL_ISDN_SP_PRI = 47;
+ /**
+ * ISDN Call sign PAT3 tone: silent tone
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALL_SIGNAL_ISDN_PAT3 = 48;
+ /**
+ * ISDN Ping Ring tone: {2091Hz 32ms ON, 2556Hz 64ms ON} 5 times
+ * 2091Hz 20ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALL_SIGNAL_ISDN_PING_RING = 49;
+ /**
+ *
+ * ISDN Pat5 tone: silent tone
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALL_SIGNAL_ISDN_PAT5 = 50;
+ /**
+ *
+ * ISDN Pat6 tone: silent tone
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALL_SIGNAL_ISDN_PAT6 = 51;
+ /**
+ * ISDN Pat7 tone: silent tone
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALL_SIGNAL_ISDN_PAT7 = 52;
+ /**
+ * TONE_CDMA_HIGH_L tone: {3700Hz 25ms, 4000Hz 25ms} 40 times
+ * 4000ms OFF, Repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_L = 53;
+ /**
+ * TONE_CDMA_MED_L tone: {2600Hz 25ms, 2900Hz 25ms} 40 times
+ * 4000ms OFF, Repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_L = 54;
+ /**
+ * TONE_CDMA_LOW_L tone: {1300Hz 25ms, 1450Hz 25ms} 40 times,
+ * 4000ms OFF, Repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_L = 55;
+ /**
+ * CDMA HIGH SS tone: {3700Hz 25ms, 4000Hz 25ms} repeat 16 times,
+ * 400ms OFF, repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_SS = 56;
+ /**
+ * CDMA MED SS tone: {2600Hz 25ms, 2900Hz 25ms} repeat 16 times,
+ * 400ms OFF, repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_SS = 57;
+ /**
+ * CDMA LOW SS tone: {1300z 25ms, 1450Hz 25ms} repeat 16 times,
+ * 400ms OFF, repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_SS = 58;
+ /**
+ * CDMA HIGH SSL tone: {3700Hz 25ms, 4000Hz 25ms} 8 times,
+ * 200ms OFF, {3700Hz 25ms, 4000Hz 25ms} repeat 8 times,
+ * 200ms OFF, {3700Hz 25ms, 4000Hz 25ms} repeat 16 times,
+ * 4000ms OFF, repeat ...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_SSL = 59;
+ /**
+ * CDMA MED SSL tone: {2600Hz 25ms, 2900Hz 25ms} 8 times,
+ * 200ms OFF, {2600Hz 25ms, 2900Hz 25ms} repeat 8 times,
+ * 200ms OFF, {2600Hz 25ms, 2900Hz 25ms} repeat 16 times,
+ * 4000ms OFF, repeat ...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_SSL = 60;
+ /**
+ * CDMA LOW SSL tone: {1300Hz 25ms, 1450Hz 25ms} 8 times,
+ * 200ms OFF, {1300Hz 25ms, 1450Hz 25ms} repeat 8 times,
+ * 200ms OFF, {1300Hz 25ms, 1450Hz 25ms} repeat 16 times,
+ * 4000ms OFF, repeat ...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_SSL = 61;
+ /**
+ * CDMA HIGH SS2 tone: {3700Hz 25ms, 4000Hz 25ms} 20 times,
+ * 1000ms OFF, {3700Hz 25ms, 4000Hz 25ms} 20 times,
+ * 3000ms OFF, repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_SS_2 = 62;
+ /**
+ * CDMA MED SS2 tone: {2600Hz 25ms, 2900Hz 25ms} 20 times,
+ * 1000ms OFF, {2600Hz 25ms, 2900Hz 25ms} 20 times,
+ * 3000ms OFF, repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_SS_2 = 63;
+ /**
+ * CDMA LOW SS2 tone: {1300Hz 25ms, 1450Hz 25ms} 20 times,
+ * 1000ms OFF, {1300Hz 25ms, 1450Hz 25ms} 20 times,
+ * 3000ms OFF, repeat ....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_SS_2 = 64;
+ /**
+ * CDMA HIGH SLS tone: {3700Hz 25ms, 4000Hz 25ms} 10 times,
+ * 500ms OFF, {3700Hz 25ms, 4000Hz 25ms} 20 times, 500ms OFF,
+ * {3700Hz 25ms, 4000Hz 25ms} 10 times, 3000ms OFF, REPEAT
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_SLS = 65;
+ /**
+ * CDMA MED SLS tone: {2600Hz 25ms, 2900Hz 25ms} 10 times,
+ * 500ms OFF, {2600Hz 25ms, 2900Hz 25ms} 20 times, 500ms OFF,
+ * {2600Hz 25ms, 2900Hz 25ms} 10 times, 3000ms OFF, REPEAT
+ *
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_SLS = 66;
+ /**
+ * CDMA LOW SLS tone: {1300Hz 25ms, 1450Hz 25ms} 10 times,
+ * 500ms OFF, {1300Hz 25ms, 1450Hz 25ms} 20 times, 500ms OFF,
+ * {1300Hz 25ms, 1450Hz 25ms} 10 times, 3000ms OFF, REPEAT
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_SLS = 67;
+ /**
+ * CDMA HIGH S X4 tone: {3700Hz 25ms, 4000Hz 25ms} 10 times,
+ * 500ms OFF, {3700Hz 25ms, 4000Hz 25ms} 10 times, 500ms OFF,
+ * {3700Hz 25ms, 4000Hz 25ms} 10 times, 500ms OFF,
+ * {3700Hz 25ms, 4000Hz 25ms} 10 times, 2500ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_S_X4 = 68;
+ /**
+ * CDMA MED S X4 tone: {2600Hz 25ms, 2900Hz 25ms} 10 times,
+ * 500ms OFF, {2600Hz 25ms, 2900Hz 25ms} 10 times, 500ms OFF,
+ * {2600Hz 25ms, 2900Hz 25ms} 10 times, 500ms OFF,
+ * {2600Hz 25ms, 2900Hz 25ms} 10 times, 2500ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_S_X4 = 69;
+ /**
+ * CDMA LOW S X4 tone: {2600Hz 25ms, 2900Hz 25ms} 10 times,
+ * 500ms OFF, {2600Hz 25ms, 2900Hz 25ms} 10 times, 500ms OFF,
+ * {2600Hz 25ms, 2900Hz 25ms} 10 times, 500ms OFF,
+ * {2600Hz 25ms, 2900Hz 25ms} 10 times, 2500ms OFF, REPEAT....
+ *
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_S_X4 = 70;
+ /**
+ * CDMA HIGH PBX L: {3700Hz 25ms, 4000Hz 25ms}20 times,
+ * 2000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_PBX_L = 71;
+ /**
+ * CDMA MED PBX L: {2600Hz 25ms, 2900Hz 25ms}20 times,
+ * 2000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_PBX_L = 72;
+ /**
+ * CDMA LOW PBX L: {1300Hz 25ms,1450Hz 25ms}20 times,
+ * 2000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_PBX_L = 73;
+ /**
+ * CDMA HIGH PBX SS tone: {3700Hz 25ms, 4000Hz 25ms} 8 times
+ * 200 ms OFF, {3700Hz 25ms 4000Hz 25ms}8 times,
+ * 2000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_PBX_SS = 74;
+ /**
+ * CDMA MED PBX SS tone: {2600Hz 25ms, 2900Hz 25ms} 8 times
+ * 200 ms OFF, {2600Hz 25ms 2900Hz 25ms}8 times,
+ * 2000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_PBX_SS = 75;
+ /**
+ * CDMA LOW PBX SS tone: {1300Hz 25ms, 1450Hz 25ms} 8 times
+ * 200 ms OFF, {1300Hz 25ms 1450Hz 25ms}8 times,
+ * 2000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_PBX_SS = 76;
+ /**
+ * CDMA HIGH PBX SSL tone:{3700Hz 25ms, 4000Hz 25ms} 8 times
+ * 200ms OFF, {3700Hz 25ms, 4000Hz 25ms} 8 times, 200ms OFF,
+ * {3700Hz 25ms, 4000Hz 25ms} 16 times, 1000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_PBX_SSL = 77;
+ /**
+ * CDMA MED PBX SSL tone:{2600Hz 25ms, 2900Hz 25ms} 8 times
+ * 200ms OFF, {2600Hz 25ms, 2900Hz 25ms} 8 times, 200ms OFF,
+ * {2600Hz 25ms, 2900Hz 25ms} 16 times, 1000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_PBX_SSL = 78;
+ /**
+ * CDMA LOW PBX SSL tone:{1300Hz 25ms, 1450Hz 25ms} 8 times
+ * 200ms OFF, {1300Hz 25ms, 1450Hz 25ms} 8 times, 200ms OFF,
+ * {1300Hz 25ms, 1450Hz 25ms} 16 times, 1000ms OFF, REPEAT....
+ *
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_PBX_SSL = 79;
+ /**
+ * CDMA HIGH PBX SSL tone:{3700Hz 25ms, 4000Hz 25ms} 8 times
+ * 200ms OFF, {3700Hz 25ms, 4000Hz 25ms} 16 times, 200ms OFF,
+ * {3700Hz 25ms, 4000Hz 25ms} 8 times, 1000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_PBX_SLS = 80;
+ /**
+ * CDMA HIGH PBX SLS tone:{2600Hz 25ms, 2900Hz 25ms} 8 times
+ * 200ms OFF, {2600Hz 25ms, 2900Hz 25ms} 16 times, 200ms OFF,
+ * {2600Hz 25ms, 2900Hz 25ms} 8 times, 1000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_PBX_SLS = 81;
+ /**
+ * CDMA HIGH PBX SLS tone:{1300Hz 25ms, 1450Hz 25ms} 8 times
+ * 200ms OFF, {1300Hz 25ms, 1450Hz 25ms} 16 times, 200ms OFF,
+ * {1300Hz 25ms, 1450Hz 25ms} 8 times, 1000ms OFF, REPEAT....
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_PBX_SLS = 82;
+ /**
+ * CDMA HIGH PBX X S4 tone: {3700Hz 25ms 4000Hz 25ms} 8 times,
+ * 200ms OFF, {3700Hz 25ms 4000Hz 25ms} 8 times, 200ms OFF,
+ * {3700Hz 25ms 4000Hz 25ms} 8 times, 200ms OFF,
+ * {3700Hz 25ms 4000Hz 25ms} 8 times, 800ms OFF, REPEAT...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_HIGH_PBX_S_X4 = 83;
+ /**
+ * CDMA MED PBX X S4 tone: {2600Hz 25ms 2900Hz 25ms} 8 times,
+ * 200ms OFF, {2600Hz 25ms 2900Hz 25ms} 8 times, 200ms OFF,
+ * {2600Hz 25ms 2900Hz 25ms} 8 times, 200ms OFF,
+ * {2600Hz 25ms 2900Hz 25ms} 8 times, 800ms OFF, REPEAT...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_MED_PBX_S_X4 = 84;
+ /**
+ * CDMA LOW PBX X S4 tone: {1300Hz 25ms 1450Hz 25ms} 8 times,
+ * 200ms OFF, {1300Hz 25ms 1450Hz 25ms} 8 times, 200ms OFF,
+ * {1300Hz 25ms 1450Hz 25ms} 8 times, 200ms OFF,
+ * {1300Hz 25ms 1450Hz 25ms} 8 times, 800ms OFF, REPEAT...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_LOW_PBX_S_X4 = 85;
+ /**
+ * CDMA Alert Network Lite tone: 1109Hz 62ms ON, 784Hz 62ms ON, 740Hz 62ms ON
+ * 622Hz 62ms ON, 1109Hz 62ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ALERT_NETWORK_LITE = 86;
+ /**
+ * CDMA Alert Auto Redial tone: {1245Hz 62ms ON, 659Hz 62ms ON} 3 times,
+ * 1245 62ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ALERT_AUTOREDIAL_LITE = 87;
+ /**
+ * CDMA One Min Beep tone: 1150Hz+770Hz 400ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ONE_MIN_BEEP = 88;
+ /**
+ *
+ * CDMA KEYPAD Volume key lite tone: 941Hz+1477Hz 120ms ON
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_KEYPAD_VOLUME_KEY_LITE = 89;
+ /**
+ * CDMA PRESSHOLDKEY LITE tone: 587Hz 375ms ON, 1175Hz 125ms ON
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_PRESSHOLDKEY_LITE = 90;
+ /**
+ * CDMA ALERT INCALL LITE tone: 587Hz 62ms, 784 62ms, 831Hz 62ms,
+ * 784Hz 62ms, 1109 62ms, 784Hz 62ms, 831Hz 62ms, 784Hz 62ms
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ALERT_INCALL_LITE = 91;
+ /**
+ * CDMA EMERGENCY RINGBACK tone: {941Hz 125ms ON, 10ms OFF} 3times
+ * 4990ms OFF, REPEAT...
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_EMERGENCY_RINGBACK = 92;
+ /**
+ * CDMA ALERT CALL GUARD tone: {1319Hz 125ms ON, 125ms OFF} 3 times
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ALERT_CALL_GUARD = 93;
+ /**
+ * CDMA SOFT ERROR LITE tone: 1047Hz 125ms ON, 370Hz 125ms
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_SOFT_ERROR_LITE = 94;
+ /**
+ * CDMA CALLDROP LITE tone: 1480Hz 125ms, 1397Hz 125ms, 784Hz 125ms
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_CALLDROP_LITE = 95;
+ /**
+ * CDMA_NETWORK_BUSY_ONE_SHOT tone: 425Hz 500ms ON, 500ms OFF.
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_NETWORK_BUSY_ONE_SHOT = 96;
+ /**
+ * CDMA_ABBR_ALERT tone: 1150Hz+770Hz 400ms ON
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_ABBR_ALERT = 97;
+ /**
+ * CDMA_SIGNAL_OFF - silent tone
+ *
+ * @see #ToneGenerator(int, int)
+ */
+ public static final int TONE_CDMA_SIGNAL_OFF = 98;
+
+ /** Maximum volume, for use with {@link #ToneGenerator(int,int)} */
+ public static final int MAX_VOLUME = 100;
+ /** Minimum volume setting, for use with {@link #ToneGenerator(int,int)} */
+ public static final int MIN_VOLUME = 0;
+
+
+ /**
+ * ToneGenerator class contructor specifying output stream type and volume.
+ *
+ * @param streamType The streame type used for tone playback (e.g. STREAM_MUSIC).
+ * @param volume The volume of the tone, given in percentage of maximum volume (from 0-100).
+ *
+ */
+ public ToneGenerator(int streamType, int volume) {
+ native_setup(streamType, volume);
+ }
+
+ /**
+ * This method starts the playback of a tone of the specified type.
+ * only one tone can play at a time: if a tone is playing while this method is called,
+ * this tone is stopped and replaced by the one requested.
+ * @param toneType The type of tone generated chosen from the following list:
+ * <ul>
+ * <li>{@link #TONE_DTMF_0}
+ * <li>{@link #TONE_DTMF_1}
+ * <li>{@link #TONE_DTMF_2}
+ * <li>{@link #TONE_DTMF_3}
+ * <li>{@link #TONE_DTMF_4}
+ * <li>{@link #TONE_DTMF_5}
+ * <li>{@link #TONE_DTMF_6}
+ * <li>{@link #TONE_DTMF_7}
+ * <li>{@link #TONE_DTMF_8}
+ * <li>{@link #TONE_DTMF_9}
+ * <li>{@link #TONE_DTMF_A}
+ * <li>{@link #TONE_DTMF_B}
+ * <li>{@link #TONE_DTMF_C}
+ * <li>{@link #TONE_DTMF_D}
+ * <li>{@link #TONE_SUP_DIAL}
+ * <li>{@link #TONE_SUP_BUSY}
+ * <li>{@link #TONE_SUP_CONGESTION}
+ * <li>{@link #TONE_SUP_RADIO_ACK}
+ * <li>{@link #TONE_SUP_RADIO_NOTAVAIL}
+ * <li>{@link #TONE_SUP_ERROR}
+ * <li>{@link #TONE_SUP_CALL_WAITING}
+ * <li>{@link #TONE_SUP_RINGTONE}
+ * <li>{@link #TONE_PROP_BEEP}
+ * <li>{@link #TONE_PROP_ACK}
+ * <li>{@link #TONE_PROP_NACK}
+ * <li>{@link #TONE_PROP_PROMPT}
+ * <li>{@link #TONE_PROP_BEEP2}
+ * <li>{@link #TONE_SUP_INTERCEPT}
+ * <li>{@link #TONE_SUP_INTERCEPT_ABBREV}
+ * <li>{@link #TONE_SUP_CONGESTION_ABBREV}
+ * <li>{@link #TONE_SUP_CONFIRM}
+ * <li>{@link #TONE_SUP_PIP}
+ * <li>{@link #TONE_CDMA_DIAL_TONE_LITE}
+ * <li>{@link #TONE_CDMA_NETWORK_USA_RINGBACK}
+ * <li>{@link #TONE_CDMA_INTERCEPT}
+ * <li>{@link #TONE_CDMA_ABBR_INTERCEPT}
+ * <li>{@link #TONE_CDMA_REORDER}
+ * <li>{@link #TONE_CDMA_ABBR_REORDER}
+ * <li>{@link #TONE_CDMA_NETWORK_BUSY}
+ * <li>{@link #TONE_CDMA_CONFIRM}
+ * <li>{@link #TONE_CDMA_ANSWER}
+ * <li>{@link #TONE_CDMA_NETWORK_CALLWAITING}
+ * <li>{@link #TONE_CDMA_PIP}
+ * <li>{@link #TONE_CDMA_CALL_SIGNAL_ISDN_NORMAL}
+ * <li>{@link #TONE_CDMA_CALL_SIGNAL_ISDN_INTERGROUP}
+ * <li>{@link #TONE_CDMA_CALL_SIGNAL_ISDN_SP_PRI}
+ * <li>{@link #TONE_CDMA_CALL_SIGNAL_ISDN_PAT3}
+ * <li>{@link #TONE_CDMA_CALL_SIGNAL_ISDN_PING_RING}
+ * <li>{@link #TONE_CDMA_CALL_SIGNAL_ISDN_PAT5}
+ * <li>{@link #TONE_CDMA_CALL_SIGNAL_ISDN_PAT6}
+ * <li>{@link #TONE_CDMA_CALL_SIGNAL_ISDN_PAT7}
+ * <li>{@link #TONE_CDMA_HIGH_L}
+ * <li>{@link #TONE_CDMA_MED_L}
+ * <li>{@link #TONE_CDMA_LOW_L}
+ * <li>{@link #TONE_CDMA_HIGH_SS}
+ * <li>{@link #TONE_CDMA_MED_SS}
+ * <li>{@link #TONE_CDMA_LOW_SS}
+ * <li>{@link #TONE_CDMA_HIGH_SSL}
+ * <li>{@link #TONE_CDMA_MED_SSL}
+ * <li>{@link #TONE_CDMA_LOW_SSL}
+ * <li>{@link #TONE_CDMA_HIGH_SS_2}
+ * <li>{@link #TONE_CDMA_MED_SS_2}
+ * <li>{@link #TONE_CDMA_LOW_SS_2}
+ * <li>{@link #TONE_CDMA_HIGH_SLS}
+ * <li>{@link #TONE_CDMA_MED_SLS}
+ * <li>{@link #TONE_CDMA_LOW_SLS}
+ * <li>{@link #TONE_CDMA_HIGH_S_X4}
+ * <li>{@link #TONE_CDMA_MED_S_X4}
+ * <li>{@link #TONE_CDMA_LOW_S_X4}
+ * <li>{@link #TONE_CDMA_HIGH_PBX_L}
+ * <li>{@link #TONE_CDMA_MED_PBX_L}
+ * <li>{@link #TONE_CDMA_LOW_PBX_L}
+ * <li>{@link #TONE_CDMA_HIGH_PBX_SS}
+ * <li>{@link #TONE_CDMA_MED_PBX_SS}
+ * <li>{@link #TONE_CDMA_LOW_PBX_SS}
+ * <li>{@link #TONE_CDMA_HIGH_PBX_SSL}
+ * <li>{@link #TONE_CDMA_MED_PBX_SSL}
+ * <li>{@link #TONE_CDMA_LOW_PBX_SSL}
+ * <li>{@link #TONE_CDMA_HIGH_PBX_SLS}
+ * <li>{@link #TONE_CDMA_MED_PBX_SLS}
+ * <li>{@link #TONE_CDMA_LOW_PBX_SLS}
+ * <li>{@link #TONE_CDMA_HIGH_PBX_S_X4}
+ * <li>{@link #TONE_CDMA_MED_PBX_S_X4}
+ * <li>{@link #TONE_CDMA_LOW_PBX_S_X4}
+ * <li>{@link #TONE_CDMA_ALERT_NETWORK_LITE}
+ * <li>{@link #TONE_CDMA_ALERT_AUTOREDIAL_LITE}
+ * <li>{@link #TONE_CDMA_ONE_MIN_BEEP}
+ * <li>{@link #TONE_CDMA_KEYPAD_VOLUME_KEY_LITE}
+ * <li>{@link #TONE_CDMA_PRESSHOLDKEY_LITE}
+ * <li>{@link #TONE_CDMA_ALERT_INCALL_LITE}
+ * <li>{@link #TONE_CDMA_EMERGENCY_RINGBACK}
+ * <li>{@link #TONE_CDMA_ALERT_CALL_GUARD}
+ * <li>{@link #TONE_CDMA_SOFT_ERROR_LITE}
+ * <li>{@link #TONE_CDMA_CALLDROP_LITE}
+ * <li>{@link #TONE_CDMA_NETWORK_BUSY_ONE_SHOT}
+ * <li>{@link #TONE_CDMA_ABBR_ALERT}
+ * <li>{@link #TONE_CDMA_SIGNAL_OFF}
+ * </ul>
+ * @see #ToneGenerator(int, int)
+ */
+ public boolean startTone(int toneType) {
+ return startTone(toneType, -1);
+ }
+
+ /**
+ * This method starts the playback of a tone of the specified type for the specified duration.
+ * @param toneType The type of tone generated @see {@link #startTone(int)}.
+ * @param durationMs The tone duration in milliseconds. If the tone is limited in time by definition,
+ * the actual duration will be the minimum of durationMs and the defined tone duration. Setting durationMs to -1,
+ * is equivalent to calling {@link #startTone(int)}.
+ */
+ public native boolean startTone(int toneType, int durationMs);
+
+ /**
+ * This method stops the tone currently playing playback.
+ * @see #ToneGenerator(int, int)
+ */
+ public native void stopTone();
+
+ /**
+ * Releases resources associated with this ToneGenerator object. It is good
+ * practice to call this method when you're done using the ToneGenerator.
+ */
+ public native void release();
+
+ private native final void native_setup(int streamType, int volume);
+
+ private native final void native_finalize();
+
+ /**
+ * Returns the audio session ID.
+ *
+ * @return the ID of the audio session this ToneGenerator belongs to or 0 if an error
+ * occured.
+ */
+ public native final int getAudioSessionId();
+
+ @Override
+ protected void finalize() { native_finalize(); }
+
+ @SuppressWarnings("unused")
+ private long mNativeContext; // accessed by native methods
+}
diff --git a/android/media/TtmlRenderer.java b/android/media/TtmlRenderer.java
new file mode 100644
index 00000000..9d587b94
--- /dev/null
+++ b/android/media/TtmlRenderer.java
@@ -0,0 +1,746 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.Context;
+import android.text.TextUtils;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.View;
+import android.view.accessibility.CaptioningManager;
+import android.widget.LinearLayout;
+import android.widget.TextView;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.TreeSet;
+import java.util.Vector;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.xmlpull.v1.XmlPullParser;
+import org.xmlpull.v1.XmlPullParserException;
+import org.xmlpull.v1.XmlPullParserFactory;
+
+/** @hide */
+public class TtmlRenderer extends SubtitleController.Renderer {
+ private final Context mContext;
+
+ private static final String MEDIA_MIMETYPE_TEXT_TTML = "application/ttml+xml";
+
+ private TtmlRenderingWidget mRenderingWidget;
+
+ public TtmlRenderer(Context context) {
+ mContext = context;
+ }
+
+ @Override
+ public boolean supports(MediaFormat format) {
+ if (format.containsKey(MediaFormat.KEY_MIME)) {
+ return format.getString(MediaFormat.KEY_MIME).equals(MEDIA_MIMETYPE_TEXT_TTML);
+ }
+ return false;
+ }
+
+ @Override
+ public SubtitleTrack createTrack(MediaFormat format) {
+ if (mRenderingWidget == null) {
+ mRenderingWidget = new TtmlRenderingWidget(mContext);
+ }
+ return new TtmlTrack(mRenderingWidget, format);
+ }
+}
+
+/**
+ * A class which provides utillity methods for TTML parsing.
+ *
+ * @hide
+ */
+final class TtmlUtils {
+ public static final String TAG_TT = "tt";
+ public static final String TAG_HEAD = "head";
+ public static final String TAG_BODY = "body";
+ public static final String TAG_DIV = "div";
+ public static final String TAG_P = "p";
+ public static final String TAG_SPAN = "span";
+ public static final String TAG_BR = "br";
+ public static final String TAG_STYLE = "style";
+ public static final String TAG_STYLING = "styling";
+ public static final String TAG_LAYOUT = "layout";
+ public static final String TAG_REGION = "region";
+ public static final String TAG_METADATA = "metadata";
+ public static final String TAG_SMPTE_IMAGE = "smpte:image";
+ public static final String TAG_SMPTE_DATA = "smpte:data";
+ public static final String TAG_SMPTE_INFORMATION = "smpte:information";
+ public static final String PCDATA = "#pcdata";
+ public static final String ATTR_BEGIN = "begin";
+ public static final String ATTR_DURATION = "dur";
+ public static final String ATTR_END = "end";
+ public static final long INVALID_TIMESTAMP = Long.MAX_VALUE;
+
+ /**
+ * Time expression RE according to the spec:
+ * http://www.w3.org/TR/ttaf1-dfxp/#timing-value-timeExpression
+ */
+ private static final Pattern CLOCK_TIME = Pattern.compile(
+ "^([0-9][0-9]+):([0-9][0-9]):([0-9][0-9])"
+ + "(?:(\\.[0-9]+)|:([0-9][0-9])(?:\\.([0-9]+))?)?$");
+
+ private static final Pattern OFFSET_TIME = Pattern.compile(
+ "^([0-9]+(?:\\.[0-9]+)?)(h|m|s|ms|f|t)$");
+
+ private TtmlUtils() {
+ }
+
+ /**
+ * Parses the given time expression and returns a timestamp in millisecond.
+ * <p>
+ * For the format of the time expression, please refer <a href=
+ * "http://www.w3.org/TR/ttaf1-dfxp/#timing-value-timeExpression">timeExpression</a>
+ *
+ * @param time A string which includes time expression.
+ * @param frameRate the framerate of the stream.
+ * @param subframeRate the sub-framerate of the stream
+ * @param tickRate the tick rate of the stream.
+ * @return the parsed timestamp in micro-second.
+ * @throws NumberFormatException if the given string does not match to the
+ * format.
+ */
+ public static long parseTimeExpression(String time, int frameRate, int subframeRate,
+ int tickRate) throws NumberFormatException {
+ Matcher matcher = CLOCK_TIME.matcher(time);
+ if (matcher.matches()) {
+ String hours = matcher.group(1);
+ double durationSeconds = Long.parseLong(hours) * 3600;
+ String minutes = matcher.group(2);
+ durationSeconds += Long.parseLong(minutes) * 60;
+ String seconds = matcher.group(3);
+ durationSeconds += Long.parseLong(seconds);
+ String fraction = matcher.group(4);
+ durationSeconds += (fraction != null) ? Double.parseDouble(fraction) : 0;
+ String frames = matcher.group(5);
+ durationSeconds += (frames != null) ? ((double)Long.parseLong(frames)) / frameRate : 0;
+ String subframes = matcher.group(6);
+ durationSeconds += (subframes != null) ? ((double)Long.parseLong(subframes))
+ / subframeRate / frameRate
+ : 0;
+ return (long)(durationSeconds * 1000);
+ }
+ matcher = OFFSET_TIME.matcher(time);
+ if (matcher.matches()) {
+ String timeValue = matcher.group(1);
+ double value = Double.parseDouble(timeValue);
+ String unit = matcher.group(2);
+ if (unit.equals("h")) {
+ value *= 3600L * 1000000L;
+ } else if (unit.equals("m")) {
+ value *= 60 * 1000000;
+ } else if (unit.equals("s")) {
+ value *= 1000000;
+ } else if (unit.equals("ms")) {
+ value *= 1000;
+ } else if (unit.equals("f")) {
+ value = value / frameRate * 1000000;
+ } else if (unit.equals("t")) {
+ value = value / tickRate * 1000000;
+ }
+ return (long)value;
+ }
+ throw new NumberFormatException("Malformed time expression : " + time);
+ }
+
+ /**
+ * Applies <a href
+ * src="http://www.w3.org/TR/ttaf1-dfxp/#content-attribute-space">the
+ * default space policy</a> to the given string.
+ *
+ * @param in A string to apply the policy.
+ */
+ public static String applyDefaultSpacePolicy(String in) {
+ return applySpacePolicy(in, true);
+ }
+
+ /**
+ * Applies the space policy to the given string. This applies <a href
+ * src="http://www.w3.org/TR/ttaf1-dfxp/#content-attribute-space">the
+ * default space policy</a> with linefeed-treatment as treat-as-space
+ * or preserve.
+ *
+ * @param in A string to apply the policy.
+ * @param treatLfAsSpace Whether convert line feeds to spaces or not.
+ */
+ public static String applySpacePolicy(String in, boolean treatLfAsSpace) {
+ // Removes CR followed by LF. ref:
+ // http://www.w3.org/TR/xml/#sec-line-ends
+ String crRemoved = in.replaceAll("\r\n", "\n");
+ // Apply suppress-at-line-break="auto" and
+ // white-space-treatment="ignore-if-surrounding-linefeed"
+ String spacesNeighboringLfRemoved = crRemoved.replaceAll(" *\n *", "\n");
+ // Apply linefeed-treatment="treat-as-space"
+ String lfToSpace = treatLfAsSpace ? spacesNeighboringLfRemoved.replaceAll("\n", " ")
+ : spacesNeighboringLfRemoved;
+ // Apply white-space-collapse="true"
+ String spacesCollapsed = lfToSpace.replaceAll("[ \t\\x0B\f\r]+", " ");
+ return spacesCollapsed;
+ }
+
+ /**
+ * Returns the timed text for the given time period.
+ *
+ * @param root The root node of the TTML document.
+ * @param startUs The start time of the time period in microsecond.
+ * @param endUs The end time of the time period in microsecond.
+ */
+ public static String extractText(TtmlNode root, long startUs, long endUs) {
+ StringBuilder text = new StringBuilder();
+ extractText(root, startUs, endUs, text, false);
+ return text.toString().replaceAll("\n$", "");
+ }
+
+ private static void extractText(TtmlNode node, long startUs, long endUs, StringBuilder out,
+ boolean inPTag) {
+ if (node.mName.equals(TtmlUtils.PCDATA) && inPTag) {
+ out.append(node.mText);
+ } else if (node.mName.equals(TtmlUtils.TAG_BR) && inPTag) {
+ out.append("\n");
+ } else if (node.mName.equals(TtmlUtils.TAG_METADATA)) {
+ // do nothing.
+ } else if (node.isActive(startUs, endUs)) {
+ boolean pTag = node.mName.equals(TtmlUtils.TAG_P);
+ int length = out.length();
+ for (int i = 0; i < node.mChildren.size(); ++i) {
+ extractText(node.mChildren.get(i), startUs, endUs, out, pTag || inPTag);
+ }
+ if (pTag && length != out.length()) {
+ out.append("\n");
+ }
+ }
+ }
+
+ /**
+ * Returns a TTML fragment string for the given time period.
+ *
+ * @param root The root node of the TTML document.
+ * @param startUs The start time of the time period in microsecond.
+ * @param endUs The end time of the time period in microsecond.
+ */
+ public static String extractTtmlFragment(TtmlNode root, long startUs, long endUs) {
+ StringBuilder fragment = new StringBuilder();
+ extractTtmlFragment(root, startUs, endUs, fragment);
+ return fragment.toString();
+ }
+
+ private static void extractTtmlFragment(TtmlNode node, long startUs, long endUs,
+ StringBuilder out) {
+ if (node.mName.equals(TtmlUtils.PCDATA)) {
+ out.append(node.mText);
+ } else if (node.mName.equals(TtmlUtils.TAG_BR)) {
+ out.append("<br/>");
+ } else if (node.isActive(startUs, endUs)) {
+ out.append("<");
+ out.append(node.mName);
+ out.append(node.mAttributes);
+ out.append(">");
+ for (int i = 0; i < node.mChildren.size(); ++i) {
+ extractTtmlFragment(node.mChildren.get(i), startUs, endUs, out);
+ }
+ out.append("</");
+ out.append(node.mName);
+ out.append(">");
+ }
+ }
+}
+
+/**
+ * A container class which represents a cue in TTML.
+ * @hide
+ */
+class TtmlCue extends SubtitleTrack.Cue {
+ public String mText;
+ public String mTtmlFragment;
+
+ public TtmlCue(long startTimeMs, long endTimeMs, String text, String ttmlFragment) {
+ this.mStartTimeMs = startTimeMs;
+ this.mEndTimeMs = endTimeMs;
+ this.mText = text;
+ this.mTtmlFragment = ttmlFragment;
+ }
+}
+
+/**
+ * A container class which represents a node in TTML.
+ *
+ * @hide
+ */
+class TtmlNode {
+ public final String mName;
+ public final String mAttributes;
+ public final TtmlNode mParent;
+ public final String mText;
+ public final List<TtmlNode> mChildren = new ArrayList<TtmlNode>();
+ public final long mRunId;
+ public final long mStartTimeMs;
+ public final long mEndTimeMs;
+
+ public TtmlNode(String name, String attributes, String text, long startTimeMs, long endTimeMs,
+ TtmlNode parent, long runId) {
+ this.mName = name;
+ this.mAttributes = attributes;
+ this.mText = text;
+ this.mStartTimeMs = startTimeMs;
+ this.mEndTimeMs = endTimeMs;
+ this.mParent = parent;
+ this.mRunId = runId;
+ }
+
+ /**
+ * Check if this node is active in the given time range.
+ *
+ * @param startTimeMs The start time of the range to check in microsecond.
+ * @param endTimeMs The end time of the range to check in microsecond.
+ * @return return true if the given range overlaps the time range of this
+ * node.
+ */
+ public boolean isActive(long startTimeMs, long endTimeMs) {
+ return this.mEndTimeMs > startTimeMs && this.mStartTimeMs < endTimeMs;
+ }
+}
+
+/**
+ * A simple TTML parser (http://www.w3.org/TR/ttaf1-dfxp/) which supports DFXP
+ * presentation profile.
+ * <p>
+ * Supported features in this parser are:
+ * <ul>
+ * <li>content
+ * <li>core
+ * <li>presentation
+ * <li>profile
+ * <li>structure
+ * <li>time-offset
+ * <li>timing
+ * <li>tickRate
+ * <li>time-clock-with-frames
+ * <li>time-clock
+ * <li>time-offset-with-frames
+ * <li>time-offset-with-ticks
+ * </ul>
+ * </p>
+ *
+ * @hide
+ */
+class TtmlParser {
+ static final String TAG = "TtmlParser";
+
+ // TODO: read and apply the following attributes if specified.
+ private static final int DEFAULT_FRAMERATE = 30;
+ private static final int DEFAULT_SUBFRAMERATE = 1;
+ private static final int DEFAULT_TICKRATE = 1;
+
+ private XmlPullParser mParser;
+ private final TtmlNodeListener mListener;
+ private long mCurrentRunId;
+
+ public TtmlParser(TtmlNodeListener listener) {
+ mListener = listener;
+ }
+
+ /**
+ * Parse TTML data. Once this is called, all the previous data are
+ * reset and it starts parsing for the given text.
+ *
+ * @param ttmlText TTML text to parse.
+ * @throws XmlPullParserException
+ * @throws IOException
+ */
+ public void parse(String ttmlText, long runId) throws XmlPullParserException, IOException {
+ mParser = null;
+ mCurrentRunId = runId;
+ loadParser(ttmlText);
+ parseTtml();
+ }
+
+ private void loadParser(String ttmlFragment) throws XmlPullParserException {
+ XmlPullParserFactory factory = XmlPullParserFactory.newInstance();
+ factory.setNamespaceAware(false);
+ mParser = factory.newPullParser();
+ StringReader in = new StringReader(ttmlFragment);
+ mParser.setInput(in);
+ }
+
+ private void extractAttribute(XmlPullParser parser, int i, StringBuilder out) {
+ out.append(" ");
+ out.append(parser.getAttributeName(i));
+ out.append("=\"");
+ out.append(parser.getAttributeValue(i));
+ out.append("\"");
+ }
+
+ private void parseTtml() throws XmlPullParserException, IOException {
+ LinkedList<TtmlNode> nodeStack = new LinkedList<TtmlNode>();
+ int depthInUnsupportedTag = 0;
+ boolean active = true;
+ while (!isEndOfDoc()) {
+ int eventType = mParser.getEventType();
+ TtmlNode parent = nodeStack.peekLast();
+ if (active) {
+ if (eventType == XmlPullParser.START_TAG) {
+ if (!isSupportedTag(mParser.getName())) {
+ Log.w(TAG, "Unsupported tag " + mParser.getName() + " is ignored.");
+ depthInUnsupportedTag++;
+ active = false;
+ } else {
+ TtmlNode node = parseNode(parent);
+ nodeStack.addLast(node);
+ if (parent != null) {
+ parent.mChildren.add(node);
+ }
+ }
+ } else if (eventType == XmlPullParser.TEXT) {
+ String text = TtmlUtils.applyDefaultSpacePolicy(mParser.getText());
+ if (!TextUtils.isEmpty(text)) {
+ parent.mChildren.add(new TtmlNode(
+ TtmlUtils.PCDATA, "", text, 0, TtmlUtils.INVALID_TIMESTAMP,
+ parent, mCurrentRunId));
+
+ }
+ } else if (eventType == XmlPullParser.END_TAG) {
+ if (mParser.getName().equals(TtmlUtils.TAG_P)) {
+ mListener.onTtmlNodeParsed(nodeStack.getLast());
+ } else if (mParser.getName().equals(TtmlUtils.TAG_TT)) {
+ mListener.onRootNodeParsed(nodeStack.getLast());
+ }
+ nodeStack.removeLast();
+ }
+ } else {
+ if (eventType == XmlPullParser.START_TAG) {
+ depthInUnsupportedTag++;
+ } else if (eventType == XmlPullParser.END_TAG) {
+ depthInUnsupportedTag--;
+ if (depthInUnsupportedTag == 0) {
+ active = true;
+ }
+ }
+ }
+ mParser.next();
+ }
+ }
+
+ private TtmlNode parseNode(TtmlNode parent) throws XmlPullParserException, IOException {
+ int eventType = mParser.getEventType();
+ if (!(eventType == XmlPullParser.START_TAG)) {
+ return null;
+ }
+ StringBuilder attrStr = new StringBuilder();
+ long start = 0;
+ long end = TtmlUtils.INVALID_TIMESTAMP;
+ long dur = 0;
+ for (int i = 0; i < mParser.getAttributeCount(); ++i) {
+ String attr = mParser.getAttributeName(i);
+ String value = mParser.getAttributeValue(i);
+ // TODO: check if it's safe to ignore the namespace of attributes as follows.
+ attr = attr.replaceFirst("^.*:", "");
+ if (attr.equals(TtmlUtils.ATTR_BEGIN)) {
+ start = TtmlUtils.parseTimeExpression(value, DEFAULT_FRAMERATE,
+ DEFAULT_SUBFRAMERATE, DEFAULT_TICKRATE);
+ } else if (attr.equals(TtmlUtils.ATTR_END)) {
+ end = TtmlUtils.parseTimeExpression(value, DEFAULT_FRAMERATE, DEFAULT_SUBFRAMERATE,
+ DEFAULT_TICKRATE);
+ } else if (attr.equals(TtmlUtils.ATTR_DURATION)) {
+ dur = TtmlUtils.parseTimeExpression(value, DEFAULT_FRAMERATE, DEFAULT_SUBFRAMERATE,
+ DEFAULT_TICKRATE);
+ } else {
+ extractAttribute(mParser, i, attrStr);
+ }
+ }
+ if (parent != null) {
+ start += parent.mStartTimeMs;
+ if (end != TtmlUtils.INVALID_TIMESTAMP) {
+ end += parent.mStartTimeMs;
+ }
+ }
+ if (dur > 0) {
+ if (end != TtmlUtils.INVALID_TIMESTAMP) {
+ Log.e(TAG, "'dur' and 'end' attributes are defined at the same time." +
+ "'end' value is ignored.");
+ }
+ end = start + dur;
+ }
+ if (parent != null) {
+ // If the end time remains unspecified, then the end point is
+ // interpreted as the end point of the external time interval.
+ if (end == TtmlUtils.INVALID_TIMESTAMP &&
+ parent.mEndTimeMs != TtmlUtils.INVALID_TIMESTAMP &&
+ end > parent.mEndTimeMs) {
+ end = parent.mEndTimeMs;
+ }
+ }
+ TtmlNode node = new TtmlNode(mParser.getName(), attrStr.toString(), null, start, end,
+ parent, mCurrentRunId);
+ return node;
+ }
+
+ private boolean isEndOfDoc() throws XmlPullParserException {
+ return (mParser.getEventType() == XmlPullParser.END_DOCUMENT);
+ }
+
+ private static boolean isSupportedTag(String tag) {
+ if (tag.equals(TtmlUtils.TAG_TT) || tag.equals(TtmlUtils.TAG_HEAD) ||
+ tag.equals(TtmlUtils.TAG_BODY) || tag.equals(TtmlUtils.TAG_DIV) ||
+ tag.equals(TtmlUtils.TAG_P) || tag.equals(TtmlUtils.TAG_SPAN) ||
+ tag.equals(TtmlUtils.TAG_BR) || tag.equals(TtmlUtils.TAG_STYLE) ||
+ tag.equals(TtmlUtils.TAG_STYLING) || tag.equals(TtmlUtils.TAG_LAYOUT) ||
+ tag.equals(TtmlUtils.TAG_REGION) || tag.equals(TtmlUtils.TAG_METADATA) ||
+ tag.equals(TtmlUtils.TAG_SMPTE_IMAGE) || tag.equals(TtmlUtils.TAG_SMPTE_DATA) ||
+ tag.equals(TtmlUtils.TAG_SMPTE_INFORMATION)) {
+ return true;
+ }
+ return false;
+ }
+}
+
+/** @hide */
+interface TtmlNodeListener {
+ void onTtmlNodeParsed(TtmlNode node);
+ void onRootNodeParsed(TtmlNode node);
+}
+
+/** @hide */
+class TtmlTrack extends SubtitleTrack implements TtmlNodeListener {
+ private static final String TAG = "TtmlTrack";
+
+ private final TtmlParser mParser = new TtmlParser(this);
+ private final TtmlRenderingWidget mRenderingWidget;
+ private String mParsingData;
+ private Long mCurrentRunID;
+
+ private final LinkedList<TtmlNode> mTtmlNodes;
+ private final TreeSet<Long> mTimeEvents;
+ private TtmlNode mRootNode;
+
+ TtmlTrack(TtmlRenderingWidget renderingWidget, MediaFormat format) {
+ super(format);
+
+ mTtmlNodes = new LinkedList<TtmlNode>();
+ mTimeEvents = new TreeSet<Long>();
+ mRenderingWidget = renderingWidget;
+ mParsingData = "";
+ }
+
+ @Override
+ public TtmlRenderingWidget getRenderingWidget() {
+ return mRenderingWidget;
+ }
+
+ @Override
+ public void onData(byte[] data, boolean eos, long runID) {
+ try {
+ // TODO: handle UTF-8 conversion properly
+ String str = new String(data, "UTF-8");
+
+ // implement intermixing restriction for TTML.
+ synchronized(mParser) {
+ if (mCurrentRunID != null && runID != mCurrentRunID) {
+ throw new IllegalStateException(
+ "Run #" + mCurrentRunID +
+ " in progress. Cannot process run #" + runID);
+ }
+ mCurrentRunID = runID;
+ mParsingData += str;
+ if (eos) {
+ try {
+ mParser.parse(mParsingData, mCurrentRunID);
+ } catch (XmlPullParserException e) {
+ e.printStackTrace();
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ finishedRun(runID);
+ mParsingData = "";
+ mCurrentRunID = null;
+ }
+ }
+ } catch (java.io.UnsupportedEncodingException e) {
+ Log.w(TAG, "subtitle data is not UTF-8 encoded: " + e);
+ }
+ }
+
+ @Override
+ public void onTtmlNodeParsed(TtmlNode node) {
+ mTtmlNodes.addLast(node);
+ addTimeEvents(node);
+ }
+
+ @Override
+ public void onRootNodeParsed(TtmlNode node) {
+ mRootNode = node;
+ TtmlCue cue = null;
+ while ((cue = getNextResult()) != null) {
+ addCue(cue);
+ }
+ mRootNode = null;
+ mTtmlNodes.clear();
+ mTimeEvents.clear();
+ }
+
+ @Override
+ public void updateView(Vector<SubtitleTrack.Cue> activeCues) {
+ if (!mVisible) {
+ // don't keep the state if we are not visible
+ return;
+ }
+
+ if (DEBUG && mTimeProvider != null) {
+ try {
+ Log.d(TAG, "at " +
+ (mTimeProvider.getCurrentTimeUs(false, true) / 1000) +
+ " ms the active cues are:");
+ } catch (IllegalStateException e) {
+ Log.d(TAG, "at (illegal state) the active cues are:");
+ }
+ }
+
+ mRenderingWidget.setActiveCues(activeCues);
+ }
+
+ /**
+ * Returns a {@link TtmlCue} in the presentation time order.
+ * {@code null} is returned if there is no more timed text to show.
+ */
+ public TtmlCue getNextResult() {
+ while (mTimeEvents.size() >= 2) {
+ long start = mTimeEvents.pollFirst();
+ long end = mTimeEvents.first();
+ List<TtmlNode> activeCues = getActiveNodes(start, end);
+ if (!activeCues.isEmpty()) {
+ return new TtmlCue(start, end,
+ TtmlUtils.applySpacePolicy(TtmlUtils.extractText(
+ mRootNode, start, end), false),
+ TtmlUtils.extractTtmlFragment(mRootNode, start, end));
+ }
+ }
+ return null;
+ }
+
+ private void addTimeEvents(TtmlNode node) {
+ mTimeEvents.add(node.mStartTimeMs);
+ mTimeEvents.add(node.mEndTimeMs);
+ for (int i = 0; i < node.mChildren.size(); ++i) {
+ addTimeEvents(node.mChildren.get(i));
+ }
+ }
+
+ private List<TtmlNode> getActiveNodes(long startTimeUs, long endTimeUs) {
+ List<TtmlNode> activeNodes = new ArrayList<TtmlNode>();
+ for (int i = 0; i < mTtmlNodes.size(); ++i) {
+ TtmlNode node = mTtmlNodes.get(i);
+ if (node.isActive(startTimeUs, endTimeUs)) {
+ activeNodes.add(node);
+ }
+ }
+ return activeNodes;
+ }
+}
+
+/**
+ * Widget capable of rendering TTML captions.
+ *
+ * @hide
+ */
+class TtmlRenderingWidget extends LinearLayout implements SubtitleTrack.RenderingWidget {
+
+ /** Callback for rendering changes. */
+ private OnChangedListener mListener;
+ private final TextView mTextView;
+
+ public TtmlRenderingWidget(Context context) {
+ this(context, null);
+ }
+
+ public TtmlRenderingWidget(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public TtmlRenderingWidget(Context context, AttributeSet attrs, int defStyleAttr) {
+ this(context, attrs, defStyleAttr, 0);
+ }
+
+ public TtmlRenderingWidget(Context context, AttributeSet attrs, int defStyleAttr,
+ int defStyleRes) {
+ super(context, attrs, defStyleAttr, defStyleRes);
+ // Cannot render text over video when layer type is hardware.
+ setLayerType(View.LAYER_TYPE_SOFTWARE, null);
+
+ CaptioningManager captionManager = (CaptioningManager) context.getSystemService(
+ Context.CAPTIONING_SERVICE);
+ mTextView = new TextView(context);
+ mTextView.setTextColor(captionManager.getUserStyle().foregroundColor);
+ addView(mTextView, LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT);
+ mTextView.setGravity(Gravity.BOTTOM | Gravity.CENTER_HORIZONTAL);
+ }
+
+ @Override
+ public void setOnChangedListener(OnChangedListener listener) {
+ mListener = listener;
+ }
+
+ @Override
+ public void setSize(int width, int height) {
+ final int widthSpec = MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY);
+
+ measure(widthSpec, heightSpec);
+ layout(0, 0, width, height);
+ }
+
+ @Override
+ public void setVisible(boolean visible) {
+ if (visible) {
+ setVisibility(View.VISIBLE);
+ } else {
+ setVisibility(View.GONE);
+ }
+ }
+
+ @Override
+ public void onAttachedToWindow() {
+ super.onAttachedToWindow();
+ }
+
+ @Override
+ public void onDetachedFromWindow() {
+ super.onDetachedFromWindow();
+ }
+
+ public void setActiveCues(Vector<SubtitleTrack.Cue> activeCues) {
+ final int count = activeCues.size();
+ String subtitleText = "";
+ for (int i = 0; i < count; i++) {
+ TtmlCue cue = (TtmlCue) activeCues.get(i);
+ subtitleText += cue.mText + "\n";
+ }
+ mTextView.setText(subtitleText);
+
+ if (mListener != null) {
+ mListener.onChanged(this);
+ }
+ }
+}
diff --git a/android/media/UnsupportedSchemeException.java b/android/media/UnsupportedSchemeException.java
new file mode 100644
index 00000000..d7b5d47f
--- /dev/null
+++ b/android/media/UnsupportedSchemeException.java
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+/**
+ * Exception thrown when an attempt is made to construct a MediaDrm object
+ * using a crypto scheme UUID that is not supported by the device
+ */
+public final class UnsupportedSchemeException extends MediaDrmException {
+ public UnsupportedSchemeException(String detailMessage) {
+ super(detailMessage);
+ }
+}
diff --git a/android/media/Utils.java b/android/media/Utils.java
new file mode 100644
index 00000000..5b62f16a
--- /dev/null
+++ b/android/media/Utils.java
@@ -0,0 +1,381 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.Context;
+import android.content.ContentResolver;
+import android.database.Cursor;
+import android.net.Uri;
+import android.os.Environment;
+import android.os.FileUtils;
+import android.provider.OpenableColumns;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Range;
+import android.util.Rational;
+import android.util.Size;
+
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.Vector;
+
+// package private
+class Utils {
+ private static final String TAG = "Utils";
+
+ /**
+ * Sorts distinct (non-intersecting) range array in ascending order.
+ * @throws java.lang.IllegalArgumentException if ranges are not distinct
+ */
+ public static <T extends Comparable<? super T>> void sortDistinctRanges(Range<T>[] ranges) {
+ Arrays.sort(ranges, new Comparator<Range<T>>() {
+ @Override
+ public int compare(Range<T> lhs, Range<T> rhs) {
+ if (lhs.getUpper().compareTo(rhs.getLower()) < 0) {
+ return -1;
+ } else if (lhs.getLower().compareTo(rhs.getUpper()) > 0) {
+ return 1;
+ }
+ throw new IllegalArgumentException(
+ "sample rate ranges must be distinct (" + lhs + " and " + rhs + ")");
+ }
+ });
+ }
+
+ /**
+ * Returns the intersection of two sets of non-intersecting ranges
+ * @param one a sorted set of non-intersecting ranges in ascending order
+ * @param another another sorted set of non-intersecting ranges in ascending order
+ * @return the intersection of the two sets, sorted in ascending order
+ */
+ public static <T extends Comparable<? super T>>
+ Range<T>[] intersectSortedDistinctRanges(Range<T>[] one, Range<T>[] another) {
+ int ix = 0;
+ Vector<Range<T>> result = new Vector<Range<T>>();
+ for (Range<T> range: another) {
+ while (ix < one.length &&
+ one[ix].getUpper().compareTo(range.getLower()) < 0) {
+ ++ix;
+ }
+ while (ix < one.length &&
+ one[ix].getUpper().compareTo(range.getUpper()) < 0) {
+ result.add(range.intersect(one[ix]));
+ ++ix;
+ }
+ if (ix == one.length) {
+ break;
+ }
+ if (one[ix].getLower().compareTo(range.getUpper()) <= 0) {
+ result.add(range.intersect(one[ix]));
+ }
+ }
+ return result.toArray(new Range[result.size()]);
+ }
+
+ /**
+ * Returns the index of the range that contains a value in a sorted array of distinct ranges.
+ * @param ranges a sorted array of non-intersecting ranges in ascending order
+ * @param value the value to search for
+ * @return if the value is in one of the ranges, it returns the index of that range. Otherwise,
+ * the return value is {@code (-1-index)} for the {@code index} of the range that is
+ * immediately following {@code value}.
+ */
+ public static <T extends Comparable<? super T>>
+ int binarySearchDistinctRanges(Range<T>[] ranges, T value) {
+ return Arrays.binarySearch(ranges, Range.create(value, value),
+ new Comparator<Range<T>>() {
+ @Override
+ public int compare(Range<T> lhs, Range<T> rhs) {
+ if (lhs.getUpper().compareTo(rhs.getLower()) < 0) {
+ return -1;
+ } else if (lhs.getLower().compareTo(rhs.getUpper()) > 0) {
+ return 1;
+ }
+ return 0;
+ }
+ });
+ }
+
+ /**
+ * Returns greatest common divisor
+ */
+ static int gcd(int a, int b) {
+ if (a == 0 && b == 0) {
+ return 1;
+ }
+ if (b < 0) {
+ b = -b;
+ }
+ if (a < 0) {
+ a = -a;
+ }
+ while (a != 0) {
+ int c = b % a;
+ b = a;
+ a = c;
+ }
+ return b;
+ }
+
+ /** Returns the equivalent factored range {@code newrange}, where for every
+ * {@code e}: {@code newrange.contains(e)} implies that {@code range.contains(e * factor)},
+ * and {@code !newrange.contains(e)} implies that {@code !range.contains(e * factor)}.
+ */
+ static Range<Integer>factorRange(Range<Integer> range, int factor) {
+ if (factor == 1) {
+ return range;
+ }
+ return Range.create(divUp(range.getLower(), factor), range.getUpper() / factor);
+ }
+
+ /** Returns the equivalent factored range {@code newrange}, where for every
+ * {@code e}: {@code newrange.contains(e)} implies that {@code range.contains(e * factor)},
+ * and {@code !newrange.contains(e)} implies that {@code !range.contains(e * factor)}.
+ */
+ static Range<Long>factorRange(Range<Long> range, long factor) {
+ if (factor == 1) {
+ return range;
+ }
+ return Range.create(divUp(range.getLower(), factor), range.getUpper() / factor);
+ }
+
+ private static Rational scaleRatio(Rational ratio, int num, int den) {
+ int common = gcd(num, den);
+ num /= common;
+ den /= common;
+ return new Rational(
+ (int)(ratio.getNumerator() * (double)num), // saturate to int
+ (int)(ratio.getDenominator() * (double)den)); // saturate to int
+ }
+
+ static Range<Rational> scaleRange(Range<Rational> range, int num, int den) {
+ if (num == den) {
+ return range;
+ }
+ return Range.create(
+ scaleRatio(range.getLower(), num, den),
+ scaleRatio(range.getUpper(), num, den));
+ }
+
+ static Range<Integer> alignRange(Range<Integer> range, int align) {
+ return range.intersect(
+ divUp(range.getLower(), align) * align,
+ (range.getUpper() / align) * align);
+ }
+
+ static int divUp(int num, int den) {
+ return (num + den - 1) / den;
+ }
+
+ static long divUp(long num, long den) {
+ return (num + den - 1) / den;
+ }
+
+ /**
+ * Returns least common multiple
+ */
+ private static long lcm(int a, int b) {
+ if (a == 0 || b == 0) {
+ throw new IllegalArgumentException("lce is not defined for zero arguments");
+ }
+ return (long)a * b / gcd(a, b);
+ }
+
+ static Range<Integer> intRangeFor(double v) {
+ return Range.create((int)v, (int)Math.ceil(v));
+ }
+
+ static Range<Long> longRangeFor(double v) {
+ return Range.create((long)v, (long)Math.ceil(v));
+ }
+
+ static Size parseSize(Object o, Size fallback) {
+ try {
+ return Size.parseSize((String) o);
+ } catch (ClassCastException e) {
+ } catch (NumberFormatException e) {
+ } catch (NullPointerException e) {
+ return fallback;
+ }
+ Log.w(TAG, "could not parse size '" + o + "'");
+ return fallback;
+ }
+
+ static int parseIntSafely(Object o, int fallback) {
+ if (o == null) {
+ return fallback;
+ }
+ try {
+ String s = (String)o;
+ return Integer.parseInt(s);
+ } catch (ClassCastException e) {
+ } catch (NumberFormatException e) {
+ } catch (NullPointerException e) {
+ return fallback;
+ }
+ Log.w(TAG, "could not parse integer '" + o + "'");
+ return fallback;
+ }
+
+ static Range<Integer> parseIntRange(Object o, Range<Integer> fallback) {
+ try {
+ String s = (String)o;
+ int ix = s.indexOf('-');
+ if (ix >= 0) {
+ return Range.create(
+ Integer.parseInt(s.substring(0, ix), 10),
+ Integer.parseInt(s.substring(ix + 1), 10));
+ }
+ int value = Integer.parseInt(s);
+ return Range.create(value, value);
+ } catch (ClassCastException e) {
+ } catch (NumberFormatException e) {
+ } catch (NullPointerException e) {
+ return fallback;
+ } catch (IllegalArgumentException e) {
+ }
+ Log.w(TAG, "could not parse integer range '" + o + "'");
+ return fallback;
+ }
+
+ static Range<Long> parseLongRange(Object o, Range<Long> fallback) {
+ try {
+ String s = (String)o;
+ int ix = s.indexOf('-');
+ if (ix >= 0) {
+ return Range.create(
+ Long.parseLong(s.substring(0, ix), 10),
+ Long.parseLong(s.substring(ix + 1), 10));
+ }
+ long value = Long.parseLong(s);
+ return Range.create(value, value);
+ } catch (ClassCastException e) {
+ } catch (NumberFormatException e) {
+ } catch (NullPointerException e) {
+ return fallback;
+ } catch (IllegalArgumentException e) {
+ }
+ Log.w(TAG, "could not parse long range '" + o + "'");
+ return fallback;
+ }
+
+ static Range<Rational> parseRationalRange(Object o, Range<Rational> fallback) {
+ try {
+ String s = (String)o;
+ int ix = s.indexOf('-');
+ if (ix >= 0) {
+ return Range.create(
+ Rational.parseRational(s.substring(0, ix)),
+ Rational.parseRational(s.substring(ix + 1)));
+ }
+ Rational value = Rational.parseRational(s);
+ return Range.create(value, value);
+ } catch (ClassCastException e) {
+ } catch (NumberFormatException e) {
+ } catch (NullPointerException e) {
+ return fallback;
+ } catch (IllegalArgumentException e) {
+ }
+ Log.w(TAG, "could not parse rational range '" + o + "'");
+ return fallback;
+ }
+
+ static Pair<Size, Size> parseSizeRange(Object o) {
+ try {
+ String s = (String)o;
+ int ix = s.indexOf('-');
+ if (ix >= 0) {
+ return Pair.create(
+ Size.parseSize(s.substring(0, ix)),
+ Size.parseSize(s.substring(ix + 1)));
+ }
+ Size value = Size.parseSize(s);
+ return Pair.create(value, value);
+ } catch (ClassCastException e) {
+ } catch (NumberFormatException e) {
+ } catch (NullPointerException e) {
+ return null;
+ } catch (IllegalArgumentException e) {
+ }
+ Log.w(TAG, "could not parse size range '" + o + "'");
+ return null;
+ }
+
+ /**
+ * Creates a unique file in the specified external storage with the desired name. If the name is
+ * taken, the new file's name will have '(%d)' to avoid overwriting files.
+ *
+ * @param context {@link Context} to query the file name from.
+ * @param subdirectory One of the directories specified in {@link android.os.Environment}
+ * @param fileName desired name for the file.
+ * @param mimeType MIME type of the file to create.
+ * @return the File object in the storage, or null if an error occurs.
+ */
+ public static File getUniqueExternalFile(Context context, String subdirectory, String fileName,
+ String mimeType) {
+ File externalStorage = Environment.getExternalStoragePublicDirectory(subdirectory);
+ // Make sure the storage subdirectory exists
+ externalStorage.mkdirs();
+
+ File outFile = null;
+ try {
+ // Ensure the file has a unique name, as to not override any existing file
+ outFile = FileUtils.buildUniqueFile(externalStorage, mimeType, fileName);
+ } catch (FileNotFoundException e) {
+ // This might also be reached if the number of repeated files gets too high
+ Log.e(TAG, "Unable to get a unique file name: " + e);
+ return null;
+ }
+ return outFile;
+ }
+
+ /**
+ * Returns a file's display name from its {@link android.content.ContentResolver.SCHEME_FILE}
+ * or {@link android.content.ContentResolver.SCHEME_CONTENT} Uri. The display name of a file
+ * includes its extension.
+ *
+ * @param context Context trying to resolve the file's display name.
+ * @param uri Uri of the file.
+ * @return the file's display name, or the uri's string if something fails or the uri isn't in
+ * the schemes specified above.
+ */
+ static String getFileDisplayNameFromUri(Context context, Uri uri) {
+ String scheme = uri.getScheme();
+
+ if (ContentResolver.SCHEME_FILE.equals(scheme)) {
+ return uri.getLastPathSegment();
+ } else if (ContentResolver.SCHEME_CONTENT.equals(scheme)) {
+ // We need to query the ContentResolver to get the actual file name as the Uri masks it.
+ // This means we want the name used for display purposes only.
+ String[] proj = {
+ OpenableColumns.DISPLAY_NAME
+ };
+ try (Cursor cursor = context.getContentResolver().query(uri, proj, null, null, null)) {
+ if (cursor != null && cursor.getCount() != 0) {
+ cursor.moveToFirst();
+ return cursor.getString(cursor.getColumnIndex(OpenableColumns.DISPLAY_NAME));
+ }
+ }
+ }
+
+ // This will only happen if the Uri isn't either SCHEME_CONTENT or SCHEME_FILE, so we assume
+ // it already represents the file's name.
+ return uri.toString();
+ }
+}
diff --git a/android/media/VolumeAutomation.java b/android/media/VolumeAutomation.java
new file mode 100644
index 00000000..ff2e6459
--- /dev/null
+++ b/android/media/VolumeAutomation.java
@@ -0,0 +1,40 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.annotation.NonNull;
+import android.media.VolumeShaper.Configuration;
+
+/**
+ * {@code VolumeAutomation} defines an interface for automatic volume control
+ * of {@link AudioTrack} and {@link MediaPlayer} objects.
+ */
+public interface VolumeAutomation {
+ /**
+ * Returns a {@link VolumeShaper} object that can be used modify the volume envelope
+ * of the player or track.
+ *
+ * @param configuration the {@link VolumeShaper.Configuration configuration}
+ * that specifies the curve and duration to use.
+ * @return a {@code VolumeShaper} object
+ * @throws IllegalArgumentException if the {@code configuration} is not allowed by the player.
+ * @throws IllegalStateException if too many {@code VolumeShaper}s are requested
+ * or the state of the player does not permit its creation (e.g. player is released).
+ */
+ public @NonNull VolumeShaper createVolumeShaper(
+ @NonNull VolumeShaper.Configuration configuration);
+}
diff --git a/android/media/VolumePolicy.java b/android/media/VolumePolicy.java
new file mode 100644
index 00000000..bbcce82f
--- /dev/null
+++ b/android/media/VolumePolicy.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.util.Objects;
+
+/** @hide */
+public final class VolumePolicy implements Parcelable {
+ public static final VolumePolicy DEFAULT = new VolumePolicy(false, false, true, 400);
+
+ /**
+ * Accessibility volume policy where the STREAM_MUSIC volume (i.e. media volume) affects
+ * the STREAM_ACCESSIBILITY volume, and vice-versa.
+ */
+ public static final int A11Y_MODE_MEDIA_A11Y_VOLUME = 0;
+ /**
+ * Accessibility volume policy where the STREAM_ACCESSIBILITY volume is independent from
+ * any other volume.
+ */
+ public static final int A11Y_MODE_INDEPENDENT_A11Y_VOLUME = 1;
+
+ /** Allow volume adjustments lower from vibrate to enter ringer mode = silent */
+ public final boolean volumeDownToEnterSilent;
+
+ /** Allow volume adjustments higher to exit ringer mode = silent */
+ public final boolean volumeUpToExitSilent;
+
+ /** Automatically enter do not disturb when ringer mode = silent */
+ public final boolean doNotDisturbWhenSilent;
+
+ /** Only allow volume adjustment from vibrate to silent after this
+ number of milliseconds since an adjustment from normal to vibrate. */
+ public final int vibrateToSilentDebounce;
+
+ public VolumePolicy(boolean volumeDownToEnterSilent, boolean volumeUpToExitSilent,
+ boolean doNotDisturbWhenSilent, int vibrateToSilentDebounce) {
+ this.volumeDownToEnterSilent = volumeDownToEnterSilent;
+ this.volumeUpToExitSilent = volumeUpToExitSilent;
+ this.doNotDisturbWhenSilent = doNotDisturbWhenSilent;
+ this.vibrateToSilentDebounce = vibrateToSilentDebounce;
+ }
+
+ @Override
+ public String toString() {
+ return "VolumePolicy[volumeDownToEnterSilent=" + volumeDownToEnterSilent
+ + ",volumeUpToExitSilent=" + volumeUpToExitSilent
+ + ",doNotDisturbWhenSilent=" + doNotDisturbWhenSilent
+ + ",vibrateToSilentDebounce=" + vibrateToSilentDebounce + "]";
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(volumeDownToEnterSilent, volumeUpToExitSilent, doNotDisturbWhenSilent,
+ vibrateToSilentDebounce);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof VolumePolicy)) return false;
+ if (o == this) return true;
+ final VolumePolicy other = (VolumePolicy) o;
+ return other.volumeDownToEnterSilent == volumeDownToEnterSilent
+ && other.volumeUpToExitSilent == volumeUpToExitSilent
+ && other.doNotDisturbWhenSilent == doNotDisturbWhenSilent
+ && other.vibrateToSilentDebounce == vibrateToSilentDebounce;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(volumeDownToEnterSilent ? 1 : 0);
+ dest.writeInt(volumeUpToExitSilent ? 1 : 0);
+ dest.writeInt(doNotDisturbWhenSilent ? 1 : 0);
+ dest.writeInt(vibrateToSilentDebounce);
+ }
+
+ public static final Parcelable.Creator<VolumePolicy> CREATOR
+ = new Parcelable.Creator<VolumePolicy>() {
+ @Override
+ public VolumePolicy createFromParcel(Parcel p) {
+ return new VolumePolicy(p.readInt() != 0,
+ p.readInt() != 0,
+ p.readInt() != 0,
+ p.readInt());
+ }
+
+ @Override
+ public VolumePolicy[] newArray(int size) {
+ return new VolumePolicy[size];
+ }
+ };
+} \ No newline at end of file
diff --git a/android/media/VolumeProvider.java b/android/media/VolumeProvider.java
new file mode 100644
index 00000000..1c017c56
--- /dev/null
+++ b/android/media/VolumeProvider.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+import android.annotation.IntDef;
+import android.media.session.MediaSession;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Handles requests to adjust or set the volume on a session. This is also used
+ * to push volume updates back to the session. The provider must call
+ * {@link #setCurrentVolume(int)} each time the volume being provided changes.
+ * <p>
+ * You can set a volume provider on a session by calling
+ * {@link MediaSession#setPlaybackToRemote}.
+ */
+public abstract class VolumeProvider {
+
+ /**
+ * @hide
+ */
+ @IntDef({VOLUME_CONTROL_FIXED, VOLUME_CONTROL_RELATIVE, VOLUME_CONTROL_ABSOLUTE})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ControlType {}
+
+ /**
+ * The volume is fixed and can not be modified. Requests to change volume
+ * should be ignored.
+ */
+ public static final int VOLUME_CONTROL_FIXED = 0;
+
+ /**
+ * The volume control uses relative adjustment via
+ * {@link #onAdjustVolume(int)}. Attempts to set the volume to a specific
+ * value should be ignored.
+ */
+ public static final int VOLUME_CONTROL_RELATIVE = 1;
+
+ /**
+ * The volume control uses an absolute value. It may be adjusted using
+ * {@link #onAdjustVolume(int)} or set directly using
+ * {@link #onSetVolumeTo(int)}.
+ */
+ public static final int VOLUME_CONTROL_ABSOLUTE = 2;
+
+ private final int mControlType;
+ private final int mMaxVolume;
+ private int mCurrentVolume;
+ private Callback mCallback;
+
+ /**
+ * Create a new volume provider for handling volume events. You must specify
+ * the type of volume control, the maximum volume that can be used, and the
+ * current volume on the output.
+ *
+ * @param volumeControl The method for controlling volume that is used by
+ * this provider.
+ * @param maxVolume The maximum allowed volume.
+ * @param currentVolume The current volume on the output.
+ */
+ public VolumeProvider(@ControlType int volumeControl, int maxVolume, int currentVolume) {
+ mControlType = volumeControl;
+ mMaxVolume = maxVolume;
+ mCurrentVolume = currentVolume;
+ }
+
+ /**
+ * Get the volume control type that this volume provider uses.
+ *
+ * @return The volume control type for this volume provider
+ */
+ @ControlType
+ public final int getVolumeControl() {
+ return mControlType;
+ }
+
+ /**
+ * Get the maximum volume this provider allows.
+ *
+ * @return The max allowed volume.
+ */
+ public final int getMaxVolume() {
+ return mMaxVolume;
+ }
+
+ /**
+ * Gets the current volume. This will be the last value set by
+ * {@link #setCurrentVolume(int)}.
+ *
+ * @return The current volume.
+ */
+ public final int getCurrentVolume() {
+ return mCurrentVolume;
+ }
+
+ /**
+ * Notify the system that the current volume has been changed. This must be
+ * called every time the volume changes to ensure it is displayed properly.
+ *
+ * @param currentVolume The current volume on the output.
+ */
+ public final void setCurrentVolume(int currentVolume) {
+ mCurrentVolume = currentVolume;
+ if (mCallback != null) {
+ mCallback.onVolumeChanged(this);
+ }
+ }
+
+ /**
+ * Override to handle requests to set the volume of the current output.
+ * After the volume has been modified {@link #setCurrentVolume} must be
+ * called to notify the system.
+ *
+ * @param volume The volume to set the output to.
+ */
+ public void onSetVolumeTo(int volume) {
+ }
+
+ /**
+ * Override to handle requests to adjust the volume of the current output.
+ * Direction will be one of {@link AudioManager#ADJUST_LOWER},
+ * {@link AudioManager#ADJUST_RAISE}, {@link AudioManager#ADJUST_SAME}.
+ * After the volume has been modified {@link #setCurrentVolume} must be
+ * called to notify the system.
+ *
+ * @param direction The direction to change the volume in.
+ */
+ public void onAdjustVolume(int direction) {
+ }
+
+ /**
+ * Sets a callback to receive volume changes.
+ * @hide
+ */
+ public void setCallback(Callback callback) {
+ mCallback = callback;
+ }
+
+ /**
+ * Listens for changes to the volume.
+ * @hide
+ */
+ public static abstract class Callback {
+ public abstract void onVolumeChanged(VolumeProvider volumeProvider);
+ }
+}
diff --git a/android/media/VolumeShaper.java b/android/media/VolumeShaper.java
new file mode 100644
index 00000000..30687065
--- /dev/null
+++ b/android/media/VolumeShaper.java
@@ -0,0 +1,1420 @@
+/*
+ * Copyright 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.AutoCloseable;
+import java.lang.ref.WeakReference;
+import java.util.Arrays;
+import java.util.Objects;
+
+/**
+ * The {@code VolumeShaper} class is used to automatically control audio volume during media
+ * playback, allowing simple implementation of transition effects and ducking.
+ * It is created from implementations of {@code VolumeAutomation},
+ * such as {@code MediaPlayer} and {@code AudioTrack} (referred to as "players" below),
+ * by {@link MediaPlayer#createVolumeShaper} or {@link AudioTrack#createVolumeShaper}.
+ *
+ * A {@code VolumeShaper} is intended for short volume changes.
+ * If the audio output sink changes during
+ * a {@code VolumeShaper} transition, the precise curve position may be lost, and the
+ * {@code VolumeShaper} may advance to the end of the curve for the new audio output sink.
+ *
+ * The {@code VolumeShaper} appears as an additional scaling on the audio output,
+ * and adjusts independently of track or stream volume controls.
+ */
+public final class VolumeShaper implements AutoCloseable {
+ /* member variables */
+ private int mId;
+ private final WeakReference<PlayerBase> mWeakPlayerBase;
+
+ /* package */ VolumeShaper(
+ @NonNull Configuration configuration, @NonNull PlayerBase playerBase) {
+ mWeakPlayerBase = new WeakReference<PlayerBase>(playerBase);
+ mId = applyPlayer(configuration, new Operation.Builder().defer().build());
+ }
+
+ /* package */ int getId() {
+ return mId;
+ }
+
+ /**
+ * Applies the {@link VolumeShaper.Operation} to the {@code VolumeShaper}.
+ *
+ * Applying {@link VolumeShaper.Operation#PLAY} after {@code PLAY}
+ * or {@link VolumeShaper.Operation#REVERSE} after
+ * {@code REVERSE} has no effect.
+ *
+ * Applying {@link VolumeShaper.Operation#PLAY} when the player
+ * hasn't started will synchronously start the {@code VolumeShaper} when
+ * playback begins.
+ *
+ * @param operation the {@code operation} to apply.
+ * @throws IllegalStateException if the player is uninitialized or if there
+ * is a critical failure. In that case, the {@code VolumeShaper} should be
+ * recreated.
+ */
+ public void apply(@NonNull Operation operation) {
+ /* void */ applyPlayer(new VolumeShaper.Configuration(mId), operation);
+ }
+
+ /**
+ * Replaces the current {@code VolumeShaper}
+ * {@code configuration} with a new {@code configuration}.
+ *
+ * This allows the user to change the volume shape
+ * while the existing {@code VolumeShaper} is in effect.
+ *
+ * The effect of {@code replace()} is similar to an atomic close of
+ * the existing {@code VolumeShaper} and creation of a new {@code VolumeShaper}.
+ *
+ * If the {@code operation} is {@link VolumeShaper.Operation#PLAY} then the
+ * new curve starts immediately.
+ *
+ * If the {@code operation} is
+ * {@link VolumeShaper.Operation#REVERSE}, then the new curve will
+ * be delayed until {@code PLAY} is applied.
+ *
+ * @param configuration the new {@code configuration} to use.
+ * @param operation the {@code operation} to apply to the {@code VolumeShaper}
+ * @param join if true, match the start volume of the
+ * new {@code configuration} to the current volume of the existing
+ * {@code VolumeShaper}, to avoid discontinuity.
+ * @throws IllegalStateException if the player is uninitialized or if there
+ * is a critical failure. In that case, the {@code VolumeShaper} should be
+ * recreated.
+ */
+ public void replace(
+ @NonNull Configuration configuration, @NonNull Operation operation, boolean join) {
+ mId = applyPlayer(
+ configuration,
+ new Operation.Builder(operation).replace(mId, join).build());
+ }
+
+ /**
+ * Returns the current volume scale attributable to the {@code VolumeShaper}.
+ *
+ * This is the last volume from the {@code VolumeShaper} used for the player,
+ * or the initial volume if the {@code VolumeShaper} hasn't been started with
+ * {@link VolumeShaper.Operation#PLAY}.
+ *
+ * @return the volume, linearly represented as a value between 0.f and 1.f.
+ * @throws IllegalStateException if the player is uninitialized or if there
+ * is a critical failure. In that case, the {@code VolumeShaper} should be
+ * recreated.
+ */
+ public float getVolume() {
+ return getStatePlayer(mId).getVolume();
+ }
+
+ /**
+ * Releases the {@code VolumeShaper} object; any volume scale due to the
+ * {@code VolumeShaper} is removed after closing.
+ *
+ * If the volume does not reach 1.f when the {@code VolumeShaper} is closed
+ * (or finalized), there may be an abrupt change of volume.
+ *
+ * {@code close()} may be safely called after a prior {@code close()}.
+ * This class implements the Java {@code AutoClosable} interface and
+ * may be used with try-with-resources.
+ */
+ @Override
+ public void close() {
+ try {
+ /* void */ applyPlayer(
+ new VolumeShaper.Configuration(mId),
+ new Operation.Builder().terminate().build());
+ } catch (IllegalStateException ise) {
+ ; // ok
+ }
+ if (mWeakPlayerBase != null) {
+ mWeakPlayerBase.clear();
+ }
+ }
+
+ @Override
+ protected void finalize() {
+ close(); // ensure we remove the native VolumeShaper
+ }
+
+ /**
+ * Internal call to apply the {@code configuration} and {@code operation} to the player.
+ * Returns a valid shaper id or throws the appropriate exception.
+ * @param configuration
+ * @param operation
+ * @return id a non-negative shaper id.
+ * @throws IllegalStateException if the player has been deallocated or is uninitialized.
+ */
+ private int applyPlayer(
+ @NonNull VolumeShaper.Configuration configuration,
+ @NonNull VolumeShaper.Operation operation) {
+ final int id;
+ if (mWeakPlayerBase != null) {
+ PlayerBase player = mWeakPlayerBase.get();
+ if (player == null) {
+ throw new IllegalStateException("player deallocated");
+ }
+ id = player.playerApplyVolumeShaper(configuration, operation);
+ } else {
+ throw new IllegalStateException("uninitialized shaper");
+ }
+ if (id < 0) {
+ // TODO - get INVALID_OPERATION from platform.
+ final int VOLUME_SHAPER_INVALID_OPERATION = -38; // must match with platform
+ // Due to RPC handling, we translate integer codes to exceptions right before
+ // delivering to the user.
+ if (id == VOLUME_SHAPER_INVALID_OPERATION) {
+ throw new IllegalStateException("player or VolumeShaper deallocated");
+ } else {
+ throw new IllegalArgumentException("invalid configuration or operation: " + id);
+ }
+ }
+ return id;
+ }
+
+ /**
+ * Internal call to retrieve the current {@code VolumeShaper} state.
+ * @param id
+ * @return the current {@code VolumeShaper.State}
+ * @throws IllegalStateException if the player has been deallocated or is uninitialized.
+ */
+ private @NonNull VolumeShaper.State getStatePlayer(int id) {
+ final VolumeShaper.State state;
+ if (mWeakPlayerBase != null) {
+ PlayerBase player = mWeakPlayerBase.get();
+ if (player == null) {
+ throw new IllegalStateException("player deallocated");
+ }
+ state = player.playerGetVolumeShaperState(id);
+ } else {
+ throw new IllegalStateException("uninitialized shaper");
+ }
+ if (state == null) {
+ throw new IllegalStateException("shaper cannot be found");
+ }
+ return state;
+ }
+
+ /**
+ * The {@code VolumeShaper.Configuration} class contains curve
+ * and duration information.
+ * It is constructed by the {@link VolumeShaper.Configuration.Builder}.
+ * <p>
+ * A {@code VolumeShaper.Configuration} is used by
+ * {@link VolumeAutomation#createVolumeShaper(Configuration)
+ * VolumeAutomation.createVolumeShaper(Configuration)} to create
+ * a {@code VolumeShaper} and
+ * by {@link VolumeShaper#replace(Configuration, Operation, boolean)
+ * VolumeShaper.replace(Configuration, Operation, boolean)}
+ * to replace an existing {@code configuration}.
+ * <p>
+ * The {@link AudioTrack} and {@link MediaPlayer} classes implement
+ * the {@link VolumeAutomation} interface.
+ */
+ public static final class Configuration implements Parcelable {
+ private static final int MAXIMUM_CURVE_POINTS = 16;
+
+ /**
+ * Returns the maximum number of curve points allowed for
+ * {@link VolumeShaper.Builder#setCurve(float[], float[])}.
+ */
+ public static int getMaximumCurvePoints() {
+ return MAXIMUM_CURVE_POINTS;
+ }
+
+ // These values must match the native VolumeShaper::Configuration::Type
+ /** @hide */
+ @IntDef({
+ TYPE_ID,
+ TYPE_SCALE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Type {}
+
+ /**
+ * Specifies a {@link VolumeShaper} handle created by {@link #VolumeShaper(int)}
+ * from an id returned by {@code setVolumeShaper()}.
+ * The type, curve, etc. may not be queried from
+ * a {@code VolumeShaper} object of this type;
+ * the handle is used to identify and change the operation of
+ * an existing {@code VolumeShaper} sent to the player.
+ */
+ /* package */ static final int TYPE_ID = 0;
+
+ /**
+ * Specifies a {@link VolumeShaper} to be used
+ * as an additional scale to the current volume.
+ * This is created by the {@link VolumeShaper.Builder}.
+ */
+ /* package */ static final int TYPE_SCALE = 1;
+
+ // These values must match the native InterpolatorType enumeration.
+ /** @hide */
+ @IntDef({
+ INTERPOLATOR_TYPE_STEP,
+ INTERPOLATOR_TYPE_LINEAR,
+ INTERPOLATOR_TYPE_CUBIC,
+ INTERPOLATOR_TYPE_CUBIC_MONOTONIC,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface InterpolatorType {}
+
+ /**
+ * Stepwise volume curve.
+ */
+ public static final int INTERPOLATOR_TYPE_STEP = 0;
+
+ /**
+ * Linear interpolated volume curve.
+ */
+ public static final int INTERPOLATOR_TYPE_LINEAR = 1;
+
+ /**
+ * Cubic interpolated volume curve.
+ * This is default if unspecified.
+ */
+ public static final int INTERPOLATOR_TYPE_CUBIC = 2;
+
+ /**
+ * Cubic interpolated volume curve
+ * that preserves local monotonicity.
+ * So long as the control points are locally monotonic,
+ * the curve interpolation between those points are monotonic.
+ * This is useful for cubic spline interpolated
+ * volume ramps and ducks.
+ */
+ public static final int INTERPOLATOR_TYPE_CUBIC_MONOTONIC = 3;
+
+ // These values must match the native VolumeShaper::Configuration::InterpolatorType
+ /** @hide */
+ @IntDef({
+ OPTION_FLAG_VOLUME_IN_DBFS,
+ OPTION_FLAG_CLOCK_TIME,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface OptionFlag {}
+
+ /**
+ * @hide
+ * Use a dB full scale volume range for the volume curve.
+ *<p>
+ * The volume scale is typically from 0.f to 1.f on a linear scale;
+ * this option changes to -inf to 0.f on a db full scale,
+ * where 0.f is equivalent to a scale of 1.f.
+ */
+ public static final int OPTION_FLAG_VOLUME_IN_DBFS = (1 << 0);
+
+ /**
+ * @hide
+ * Use clock time instead of media time.
+ *<p>
+ * The default implementation of {@code VolumeShaper} is to apply
+ * volume changes by the media time of the player.
+ * Hence, the {@code VolumeShaper} will speed or slow down to
+ * match player changes of playback rate, pause, or resume.
+ *<p>
+ * The {@code OPTION_FLAG_CLOCK_TIME} option allows the {@code VolumeShaper}
+ * progress to be determined by clock time instead of media time.
+ */
+ public static final int OPTION_FLAG_CLOCK_TIME = (1 << 1);
+
+ private static final int OPTION_FLAG_PUBLIC_ALL =
+ OPTION_FLAG_VOLUME_IN_DBFS | OPTION_FLAG_CLOCK_TIME;
+
+ /**
+ * A one second linear ramp from silence to full volume.
+ * Use {@link VolumeShaper.Builder#reflectTimes()}
+ * or {@link VolumeShaper.Builder#invertVolumes()} to generate
+ * the matching linear duck.
+ */
+ public static final Configuration LINEAR_RAMP = new VolumeShaper.Configuration.Builder()
+ .setInterpolatorType(INTERPOLATOR_TYPE_LINEAR)
+ .setCurve(new float[] {0.f, 1.f} /* times */,
+ new float[] {0.f, 1.f} /* volumes */)
+ .setDuration(1000)
+ .build();
+
+ /**
+ * A one second cubic ramp from silence to full volume.
+ * Use {@link VolumeShaper.Builder#reflectTimes()}
+ * or {@link VolumeShaper.Builder#invertVolumes()} to generate
+ * the matching cubic duck.
+ */
+ public static final Configuration CUBIC_RAMP = new VolumeShaper.Configuration.Builder()
+ .setInterpolatorType(INTERPOLATOR_TYPE_CUBIC)
+ .setCurve(new float[] {0.f, 1.f} /* times */,
+ new float[] {0.f, 1.f} /* volumes */)
+ .setDuration(1000)
+ .build();
+
+ /**
+ * A one second sine curve
+ * from silence to full volume for energy preserving cross fades.
+ * Use {@link VolumeShaper.Builder#reflectTimes()} to generate
+ * the matching cosine duck.
+ */
+ public static final Configuration SINE_RAMP;
+
+ /**
+ * A one second sine-squared s-curve ramp
+ * from silence to full volume.
+ * Use {@link VolumeShaper.Builder#reflectTimes()}
+ * or {@link VolumeShaper.Builder#invertVolumes()} to generate
+ * the matching sine-squared s-curve duck.
+ */
+ public static final Configuration SCURVE_RAMP;
+
+ static {
+ final int POINTS = MAXIMUM_CURVE_POINTS;
+ final float times[] = new float[POINTS];
+ final float sines[] = new float[POINTS];
+ final float scurve[] = new float[POINTS];
+ for (int i = 0; i < POINTS; ++i) {
+ times[i] = (float)i / (POINTS - 1);
+ final float sine = (float)Math.sin(times[i] * Math.PI / 2.);
+ sines[i] = sine;
+ scurve[i] = sine * sine;
+ }
+ SINE_RAMP = new VolumeShaper.Configuration.Builder()
+ .setInterpolatorType(INTERPOLATOR_TYPE_CUBIC)
+ .setCurve(times, sines)
+ .setDuration(1000)
+ .build();
+ SCURVE_RAMP = new VolumeShaper.Configuration.Builder()
+ .setInterpolatorType(INTERPOLATOR_TYPE_CUBIC)
+ .setCurve(times, scurve)
+ .setDuration(1000)
+ .build();
+ }
+
+ /*
+ * member variables - these are all final
+ */
+
+ // type of VolumeShaper
+ private final int mType;
+
+ // valid when mType is TYPE_ID
+ private final int mId;
+
+ // valid when mType is TYPE_SCALE
+ private final int mOptionFlags;
+ private final double mDurationMs;
+ private final int mInterpolatorType;
+ private final float[] mTimes;
+ private final float[] mVolumes;
+
+ @Override
+ public String toString() {
+ return "VolumeShaper.Configuration{"
+ + "mType = " + mType
+ + ", mId = " + mId
+ + (mType == TYPE_ID
+ ? "}"
+ : ", mOptionFlags = 0x" + Integer.toHexString(mOptionFlags).toUpperCase()
+ + ", mDurationMs = " + mDurationMs
+ + ", mInterpolatorType = " + mInterpolatorType
+ + ", mTimes[] = " + Arrays.toString(mTimes)
+ + ", mVolumes[] = " + Arrays.toString(mVolumes)
+ + "}");
+ }
+
+ @Override
+ public int hashCode() {
+ return mType == TYPE_ID
+ ? Objects.hash(mType, mId)
+ : Objects.hash(mType, mId,
+ mOptionFlags, mDurationMs, mInterpolatorType,
+ Arrays.hashCode(mTimes), Arrays.hashCode(mVolumes));
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof Configuration)) return false;
+ if (o == this) return true;
+ final Configuration other = (Configuration) o;
+ // Note that exact floating point equality may not be guaranteed
+ // for a theoretically idempotent operation; for example,
+ // there are many cases where a + b - b != a.
+ return mType == other.mType
+ && mId == other.mId
+ && (mType == TYPE_ID
+ || (mOptionFlags == other.mOptionFlags
+ && mDurationMs == other.mDurationMs
+ && mInterpolatorType == other.mInterpolatorType
+ && Arrays.equals(mTimes, other.mTimes)
+ && Arrays.equals(mVolumes, other.mVolumes)));
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ // this needs to match the native VolumeShaper.Configuration parceling
+ dest.writeInt(mType);
+ dest.writeInt(mId);
+ if (mType != TYPE_ID) {
+ dest.writeInt(mOptionFlags);
+ dest.writeDouble(mDurationMs);
+ // this needs to match the native Interpolator parceling
+ dest.writeInt(mInterpolatorType);
+ dest.writeFloat(0.f); // first slope (specifying for native side)
+ dest.writeFloat(0.f); // last slope (specifying for native side)
+ // mTimes and mVolumes should have the same length.
+ dest.writeInt(mTimes.length);
+ for (int i = 0; i < mTimes.length; ++i) {
+ dest.writeFloat(mTimes[i]);
+ dest.writeFloat(mVolumes[i]);
+ }
+ }
+ }
+
+ public static final Parcelable.Creator<VolumeShaper.Configuration> CREATOR
+ = new Parcelable.Creator<VolumeShaper.Configuration>() {
+ @Override
+ public VolumeShaper.Configuration createFromParcel(Parcel p) {
+ // this needs to match the native VolumeShaper.Configuration parceling
+ final int type = p.readInt();
+ final int id = p.readInt();
+ if (type == TYPE_ID) {
+ return new VolumeShaper.Configuration(id);
+ } else {
+ final int optionFlags = p.readInt();
+ final double durationMs = p.readDouble();
+ // this needs to match the native Interpolator parceling
+ final int interpolatorType = p.readInt();
+ final float firstSlope = p.readFloat(); // ignored on the Java side
+ final float lastSlope = p.readFloat(); // ignored on the Java side
+ final int length = p.readInt();
+ final float[] times = new float[length];
+ final float[] volumes = new float[length];
+ for (int i = 0; i < length; ++i) {
+ times[i] = p.readFloat();
+ volumes[i] = p.readFloat();
+ }
+
+ return new VolumeShaper.Configuration(
+ type,
+ id,
+ optionFlags,
+ durationMs,
+ interpolatorType,
+ times,
+ volumes);
+ }
+ }
+
+ @Override
+ public VolumeShaper.Configuration[] newArray(int size) {
+ return new VolumeShaper.Configuration[size];
+ }
+ };
+
+ /**
+ * @hide
+ * Constructs a {@code VolumeShaper} from an id.
+ *
+ * This is an opaque handle for controlling a {@code VolumeShaper} that has
+ * already been sent to a player. The {@code id} is returned from the
+ * initial {@code setVolumeShaper()} call on success.
+ *
+ * These configurations are for native use only,
+ * they are never returned directly to the user.
+ *
+ * @param id
+ * @throws IllegalArgumentException if id is negative.
+ */
+ public Configuration(int id) {
+ if (id < 0) {
+ throw new IllegalArgumentException("negative id " + id);
+ }
+ mType = TYPE_ID;
+ mId = id;
+ mInterpolatorType = 0;
+ mOptionFlags = 0;
+ mDurationMs = 0;
+ mTimes = null;
+ mVolumes = null;
+ }
+
+ /**
+ * Direct constructor for VolumeShaper.
+ * Use the Builder instead.
+ */
+ private Configuration(@Type int type,
+ int id,
+ @OptionFlag int optionFlags,
+ double durationMs,
+ @InterpolatorType int interpolatorType,
+ @NonNull float[] times,
+ @NonNull float[] volumes) {
+ mType = type;
+ mId = id;
+ mOptionFlags = optionFlags;
+ mDurationMs = durationMs;
+ mInterpolatorType = interpolatorType;
+ // Builder should have cloned these arrays already.
+ mTimes = times;
+ mVolumes = volumes;
+ }
+
+ /**
+ * @hide
+ * Returns the {@code VolumeShaper} type.
+ */
+ public @Type int getType() {
+ return mType;
+ }
+
+ /**
+ * @hide
+ * Returns the {@code VolumeShaper} id.
+ */
+ public int getId() {
+ return mId;
+ }
+
+ /**
+ * Returns the interpolator type.
+ */
+ public @InterpolatorType int getInterpolatorType() {
+ return mInterpolatorType;
+ }
+
+ /**
+ * @hide
+ * Returns the option flags
+ */
+ public @OptionFlag int getOptionFlags() {
+ return mOptionFlags & OPTION_FLAG_PUBLIC_ALL;
+ }
+
+ /* package */ @OptionFlag int getAllOptionFlags() {
+ return mOptionFlags;
+ }
+
+ /**
+ * Returns the duration of the volume shape in milliseconds.
+ */
+ public long getDuration() {
+ // casting is safe here as the duration was set as a long in the Builder
+ return (long) mDurationMs;
+ }
+
+ /**
+ * Returns the times (x) coordinate array of the volume curve points.
+ */
+ public float[] getTimes() {
+ return mTimes;
+ }
+
+ /**
+ * Returns the volumes (y) coordinate array of the volume curve points.
+ */
+ public float[] getVolumes() {
+ return mVolumes;
+ }
+
+ /**
+ * Checks the validity of times and volumes point representation.
+ *
+ * {@code times[]} and {@code volumes[]} are two arrays representing points
+ * for the volume curve.
+ *
+ * Note that {@code times[]} and {@code volumes[]} are explicitly checked against
+ * null here to provide the proper error string - those are legitimate
+ * arguments to this method.
+ *
+ * @param times the x coordinates for the points,
+ * must be between 0.f and 1.f and be monotonic.
+ * @param volumes the y coordinates for the points,
+ * must be between 0.f and 1.f for linear and
+ * must be no greater than 0.f for log (dBFS).
+ * @param log set to true if the scale is logarithmic.
+ * @return null if no error, or the reason in a {@code String} for an error.
+ */
+ private static @Nullable String checkCurveForErrors(
+ @Nullable float[] times, @Nullable float[] volumes, boolean log) {
+ if (times == null) {
+ return "times array must be non-null";
+ } else if (volumes == null) {
+ return "volumes array must be non-null";
+ } else if (times.length != volumes.length) {
+ return "array length must match";
+ } else if (times.length < 2) {
+ return "array length must be at least 2";
+ } else if (times.length > MAXIMUM_CURVE_POINTS) {
+ return "array length must be no larger than " + MAXIMUM_CURVE_POINTS;
+ } else if (times[0] != 0.f) {
+ return "times must start at 0.f";
+ } else if (times[times.length - 1] != 1.f) {
+ return "times must end at 1.f";
+ }
+
+ // validate points along the curve
+ for (int i = 1; i < times.length; ++i) {
+ if (!(times[i] > times[i - 1]) /* handle nan */) {
+ return "times not monotonic increasing, check index " + i;
+ }
+ }
+ if (log) {
+ for (int i = 0; i < volumes.length; ++i) {
+ if (!(volumes[i] <= 0.f) /* handle nan */) {
+ return "volumes for log scale cannot be positive, "
+ + "check index " + i;
+ }
+ }
+ } else {
+ for (int i = 0; i < volumes.length; ++i) {
+ if (!(volumes[i] >= 0.f) || !(volumes[i] <= 1.f) /* handle nan */) {
+ return "volumes for linear scale must be between 0.f and 1.f, "
+ + "check index " + i;
+ }
+ }
+ }
+ return null; // no errors
+ }
+
+ private static void checkCurveForErrorsAndThrowException(
+ @Nullable float[] times, @Nullable float[] volumes, boolean log, boolean ise) {
+ final String error = checkCurveForErrors(times, volumes, log);
+ if (error != null) {
+ if (ise) {
+ throw new IllegalStateException(error);
+ } else {
+ throw new IllegalArgumentException(error);
+ }
+ }
+ }
+
+ private static void checkValidVolumeAndThrowException(float volume, boolean log) {
+ if (log) {
+ if (!(volume <= 0.f) /* handle nan */) {
+ throw new IllegalArgumentException("dbfs volume must be 0.f or less");
+ }
+ } else {
+ if (!(volume >= 0.f) || !(volume <= 1.f) /* handle nan */) {
+ throw new IllegalArgumentException("volume must be >= 0.f and <= 1.f");
+ }
+ }
+ }
+
+ private static void clampVolume(float[] volumes, boolean log) {
+ if (log) {
+ for (int i = 0; i < volumes.length; ++i) {
+ if (!(volumes[i] <= 0.f) /* handle nan */) {
+ volumes[i] = 0.f;
+ }
+ }
+ } else {
+ for (int i = 0; i < volumes.length; ++i) {
+ if (!(volumes[i] >= 0.f) /* handle nan */) {
+ volumes[i] = 0.f;
+ } else if (!(volumes[i] <= 1.f)) {
+ volumes[i] = 1.f;
+ }
+ }
+ }
+ }
+
+ /**
+ * Builder class for a {@link VolumeShaper.Configuration} object.
+ * <p> Here is an example where {@code Builder} is used to define the
+ * {@link VolumeShaper.Configuration}.
+ *
+ * <pre class="prettyprint">
+ * VolumeShaper.Configuration LINEAR_RAMP =
+ * new VolumeShaper.Configuration.Builder()
+ * .setInterpolatorType(VolumeShaper.Configuration.INTERPOLATOR_TYPE_LINEAR)
+ * .setCurve(new float[] { 0.f, 1.f }, // times
+ * new float[] { 0.f, 1.f }) // volumes
+ * .setDuration(1000)
+ * .build();
+ * </pre>
+ * <p>
+ */
+ public static final class Builder {
+ private int mType = TYPE_SCALE;
+ private int mId = -1; // invalid
+ private int mInterpolatorType = INTERPOLATOR_TYPE_CUBIC;
+ private int mOptionFlags = OPTION_FLAG_CLOCK_TIME;
+ private double mDurationMs = 1000.;
+ private float[] mTimes = null;
+ private float[] mVolumes = null;
+
+ /**
+ * Constructs a new {@code Builder} with the defaults.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Constructs a new {@code Builder} with settings
+ * copied from a given {@code VolumeShaper.Configuration}.
+ * @param configuration prototypical configuration
+ * which will be reused in the new {@code Builder}.
+ */
+ public Builder(@NonNull Configuration configuration) {
+ mType = configuration.getType();
+ mId = configuration.getId();
+ mOptionFlags = configuration.getAllOptionFlags();
+ mInterpolatorType = configuration.getInterpolatorType();
+ mDurationMs = configuration.getDuration();
+ mTimes = configuration.getTimes().clone();
+ mVolumes = configuration.getVolumes().clone();
+ }
+
+ /**
+ * @hide
+ * Set the {@code id} for system defined shapers.
+ * @param id the {@code id} to set. If non-negative, then it is used.
+ * If -1, then the system is expected to assign one.
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if {@code id} < -1.
+ */
+ public @NonNull Builder setId(int id) {
+ if (id < -1) {
+ throw new IllegalArgumentException("invalid id: " + id);
+ }
+ mId = id;
+ return this;
+ }
+
+ /**
+ * Sets the interpolator type.
+ *
+ * If omitted the default interpolator type is {@link #INTERPOLATOR_TYPE_CUBIC}.
+ *
+ * @param interpolatorType method of interpolation used for the volume curve.
+ * One of {@link #INTERPOLATOR_TYPE_STEP},
+ * {@link #INTERPOLATOR_TYPE_LINEAR},
+ * {@link #INTERPOLATOR_TYPE_CUBIC},
+ * {@link #INTERPOLATOR_TYPE_CUBIC_MONOTONIC}.
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if {@code interpolatorType} is not valid.
+ */
+ public @NonNull Builder setInterpolatorType(@InterpolatorType int interpolatorType) {
+ switch (interpolatorType) {
+ case INTERPOLATOR_TYPE_STEP:
+ case INTERPOLATOR_TYPE_LINEAR:
+ case INTERPOLATOR_TYPE_CUBIC:
+ case INTERPOLATOR_TYPE_CUBIC_MONOTONIC:
+ mInterpolatorType = interpolatorType;
+ break;
+ default:
+ throw new IllegalArgumentException("invalid interpolatorType: "
+ + interpolatorType);
+ }
+ return this;
+ }
+
+ /**
+ * @hide
+ * Sets the optional flags
+ *
+ * If omitted, flags are 0. If {@link #OPTION_FLAG_VOLUME_IN_DBFS} has
+ * changed the volume curve needs to be set again as the acceptable
+ * volume domain has changed.
+ *
+ * @param optionFlags new value to replace the old {@code optionFlags}.
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if flag is not recognized.
+ */
+ public @NonNull Builder setOptionFlags(@OptionFlag int optionFlags) {
+ if ((optionFlags & ~OPTION_FLAG_PUBLIC_ALL) != 0) {
+ throw new IllegalArgumentException("invalid bits in flag: " + optionFlags);
+ }
+ mOptionFlags = mOptionFlags & ~OPTION_FLAG_PUBLIC_ALL | optionFlags;
+ return this;
+ }
+
+ /**
+ * Sets the {@code VolumeShaper} duration in milliseconds.
+ *
+ * If omitted, the default duration is 1 second.
+ *
+ * @param durationMillis
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if {@code durationMillis}
+ * is not strictly positive.
+ */
+ public @NonNull Builder setDuration(long durationMillis) {
+ if (durationMillis <= 0) {
+ throw new IllegalArgumentException(
+ "duration: " + durationMillis + " not positive");
+ }
+ mDurationMs = (double) durationMillis;
+ return this;
+ }
+
+ /**
+ * Sets the volume curve.
+ *
+ * The volume curve is represented by a set of control points given by
+ * two float arrays of equal length,
+ * one representing the time (x) coordinates
+ * and one corresponding to the volume (y) coordinates.
+ * The length must be at least 2
+ * and no greater than {@link VolumeShaper.Configuration#getMaximumCurvePoints()}.
+ * <p>
+ * The volume curve is normalized as follows:
+ * time (x) coordinates should be monotonically increasing, from 0.f to 1.f;
+ * volume (y) coordinates must be within 0.f to 1.f.
+ * <p>
+ * The time scale is set by {@link #setDuration}.
+ * <p>
+ * @param times an array of float values representing
+ * the time line of the volume curve.
+ * @param volumes an array of float values representing
+ * the amplitude of the volume curve.
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if {@code times} or {@code volumes} is invalid.
+ */
+
+ /* Note: volume (y) coordinates must be non-positive for log scaling,
+ * if {@link VolumeShaper.Configuration#OPTION_FLAG_VOLUME_IN_DBFS} is set.
+ */
+
+ public @NonNull Builder setCurve(@NonNull float[] times, @NonNull float[] volumes) {
+ final boolean log = (mOptionFlags & OPTION_FLAG_VOLUME_IN_DBFS) != 0;
+ checkCurveForErrorsAndThrowException(times, volumes, log, false /* ise */);
+ mTimes = times.clone();
+ mVolumes = volumes.clone();
+ return this;
+ }
+
+ /**
+ * Reflects the volume curve so that
+ * the shaper changes volume from the end
+ * to the start.
+ *
+ * @return the same {@code Builder} instance.
+ * @throws IllegalStateException if curve has not been set.
+ */
+ public @NonNull Builder reflectTimes() {
+ final boolean log = (mOptionFlags & OPTION_FLAG_VOLUME_IN_DBFS) != 0;
+ checkCurveForErrorsAndThrowException(mTimes, mVolumes, log, true /* ise */);
+ int i;
+ for (i = 0; i < mTimes.length / 2; ++i) {
+ float temp = mTimes[i];
+ mTimes[i] = 1.f - mTimes[mTimes.length - 1 - i];
+ mTimes[mTimes.length - 1 - i] = 1.f - temp;
+ temp = mVolumes[i];
+ mVolumes[i] = mVolumes[mVolumes.length - 1 - i];
+ mVolumes[mVolumes.length - 1 - i] = temp;
+ }
+ if ((mTimes.length & 1) != 0) {
+ mTimes[i] = 1.f - mTimes[i];
+ }
+ return this;
+ }
+
+ /**
+ * Inverts the volume curve so that the max volume
+ * becomes the min volume and vice versa.
+ *
+ * @return the same {@code Builder} instance.
+ * @throws IllegalStateException if curve has not been set.
+ */
+ public @NonNull Builder invertVolumes() {
+ final boolean log = (mOptionFlags & OPTION_FLAG_VOLUME_IN_DBFS) != 0;
+ checkCurveForErrorsAndThrowException(mTimes, mVolumes, log, true /* ise */);
+ float min = mVolumes[0];
+ float max = mVolumes[0];
+ for (int i = 1; i < mVolumes.length; ++i) {
+ if (mVolumes[i] < min) {
+ min = mVolumes[i];
+ } else if (mVolumes[i] > max) {
+ max = mVolumes[i];
+ }
+ }
+
+ final float maxmin = max + min;
+ for (int i = 0; i < mVolumes.length; ++i) {
+ mVolumes[i] = maxmin - mVolumes[i];
+ }
+ return this;
+ }
+
+ /**
+ * Scale the curve end volume to a target value.
+ *
+ * Keeps the start volume the same.
+ * This works best if the volume curve is monotonic.
+ *
+ * @param volume the target end volume to use.
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if {@code volume} is not valid.
+ * @throws IllegalStateException if curve has not been set.
+ */
+ public @NonNull Builder scaleToEndVolume(float volume) {
+ final boolean log = (mOptionFlags & OPTION_FLAG_VOLUME_IN_DBFS) != 0;
+ checkCurveForErrorsAndThrowException(mTimes, mVolumes, log, true /* ise */);
+ checkValidVolumeAndThrowException(volume, log);
+ final float startVolume = mVolumes[0];
+ final float endVolume = mVolumes[mVolumes.length - 1];
+ if (endVolume == startVolume) {
+ // match with linear ramp
+ final float offset = volume - startVolume;
+ for (int i = 0; i < mVolumes.length; ++i) {
+ mVolumes[i] = mVolumes[i] + offset * mTimes[i];
+ }
+ } else {
+ // scale
+ final float scale = (volume - startVolume) / (endVolume - startVolume);
+ for (int i = 0; i < mVolumes.length; ++i) {
+ mVolumes[i] = scale * (mVolumes[i] - startVolume) + startVolume;
+ }
+ }
+ clampVolume(mVolumes, log);
+ return this;
+ }
+
+ /**
+ * Scale the curve start volume to a target value.
+ *
+ * Keeps the end volume the same.
+ * This works best if the volume curve is monotonic.
+ *
+ * @param volume the target start volume to use.
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if {@code volume} is not valid.
+ * @throws IllegalStateException if curve has not been set.
+ */
+ public @NonNull Builder scaleToStartVolume(float volume) {
+ final boolean log = (mOptionFlags & OPTION_FLAG_VOLUME_IN_DBFS) != 0;
+ checkCurveForErrorsAndThrowException(mTimes, mVolumes, log, true /* ise */);
+ checkValidVolumeAndThrowException(volume, log);
+ final float startVolume = mVolumes[0];
+ final float endVolume = mVolumes[mVolumes.length - 1];
+ if (endVolume == startVolume) {
+ // match with linear ramp
+ final float offset = volume - startVolume;
+ for (int i = 0; i < mVolumes.length; ++i) {
+ mVolumes[i] = mVolumes[i] + offset * (1.f - mTimes[i]);
+ }
+ } else {
+ final float scale = (volume - endVolume) / (startVolume - endVolume);
+ for (int i = 0; i < mVolumes.length; ++i) {
+ mVolumes[i] = scale * (mVolumes[i] - endVolume) + endVolume;
+ }
+ }
+ clampVolume(mVolumes, log);
+ return this;
+ }
+
+ /**
+ * Builds a new {@link VolumeShaper} object.
+ *
+ * @return a new {@link VolumeShaper} object.
+ * @throws IllegalStateException if curve is not properly set.
+ */
+ public @NonNull Configuration build() {
+ final boolean log = (mOptionFlags & OPTION_FLAG_VOLUME_IN_DBFS) != 0;
+ checkCurveForErrorsAndThrowException(mTimes, mVolumes, log, true /* ise */);
+ return new Configuration(mType, mId, mOptionFlags, mDurationMs,
+ mInterpolatorType, mTimes, mVolumes);
+ }
+ } // Configuration.Builder
+ } // Configuration
+
+ /**
+ * The {@code VolumeShaper.Operation} class is used to specify operations
+ * to the {@code VolumeShaper} that affect the volume change.
+ */
+ public static final class Operation implements Parcelable {
+ /**
+ * Forward playback from current volume time position.
+ * At the end of the {@code VolumeShaper} curve,
+ * the last volume value persists.
+ */
+ public static final Operation PLAY =
+ new VolumeShaper.Operation.Builder()
+ .build();
+
+ /**
+ * Reverse playback from current volume time position.
+ * When the position reaches the start of the {@code VolumeShaper} curve,
+ * the first volume value persists.
+ */
+ public static final Operation REVERSE =
+ new VolumeShaper.Operation.Builder()
+ .reverse()
+ .build();
+
+ // No user serviceable parts below.
+
+ // These flags must match the native VolumeShaper::Operation::Flag
+ /** @hide */
+ @IntDef({
+ FLAG_NONE,
+ FLAG_REVERSE,
+ FLAG_TERMINATE,
+ FLAG_JOIN,
+ FLAG_DEFER,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Flag {}
+
+ /**
+ * No special {@code VolumeShaper} operation.
+ */
+ private static final int FLAG_NONE = 0;
+
+ /**
+ * Reverse the {@code VolumeShaper} progress.
+ *
+ * Reverses the {@code VolumeShaper} curve from its current
+ * position. If the {@code VolumeShaper} curve has not started,
+ * it automatically is considered finished.
+ */
+ private static final int FLAG_REVERSE = 1 << 0;
+
+ /**
+ * Terminate the existing {@code VolumeShaper}.
+ * This flag is generally used by itself;
+ * it takes precedence over all other flags.
+ */
+ private static final int FLAG_TERMINATE = 1 << 1;
+
+ /**
+ * Attempt to join as best as possible to the previous {@code VolumeShaper}.
+ * This requires the previous {@code VolumeShaper} to be active and
+ * {@link #setReplaceId} to be set.
+ */
+ private static final int FLAG_JOIN = 1 << 2;
+
+ /**
+ * Defer playback until next operation is sent. This is used
+ * when starting a {@code VolumeShaper} effect.
+ */
+ private static final int FLAG_DEFER = 1 << 3;
+
+ /**
+ * Use the id specified in the configuration, creating
+ * {@code VolumeShaper} as needed; the configuration should be
+ * TYPE_SCALE.
+ */
+ private static final int FLAG_CREATE_IF_NEEDED = 1 << 4;
+
+ private static final int FLAG_PUBLIC_ALL = FLAG_REVERSE | FLAG_TERMINATE;
+
+ private final int mFlags;
+ private final int mReplaceId;
+ private final float mXOffset;
+
+ @Override
+ public String toString() {
+ return "VolumeShaper.Operation{"
+ + "mFlags = 0x" + Integer.toHexString(mFlags).toUpperCase()
+ + ", mReplaceId = " + mReplaceId
+ + ", mXOffset = " + mXOffset
+ + "}";
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mFlags, mReplaceId, mXOffset);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof Operation)) return false;
+ if (o == this) return true;
+ final Operation other = (Operation) o;
+
+ return mFlags == other.mFlags
+ && mReplaceId == other.mReplaceId
+ && Float.compare(mXOffset, other.mXOffset) == 0;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ // this needs to match the native VolumeShaper.Operation parceling
+ dest.writeInt(mFlags);
+ dest.writeInt(mReplaceId);
+ dest.writeFloat(mXOffset);
+ }
+
+ public static final Parcelable.Creator<VolumeShaper.Operation> CREATOR
+ = new Parcelable.Creator<VolumeShaper.Operation>() {
+ @Override
+ public VolumeShaper.Operation createFromParcel(Parcel p) {
+ // this needs to match the native VolumeShaper.Operation parceling
+ final int flags = p.readInt();
+ final int replaceId = p.readInt();
+ final float xOffset = p.readFloat();
+
+ return new VolumeShaper.Operation(
+ flags
+ , replaceId
+ , xOffset);
+ }
+
+ @Override
+ public VolumeShaper.Operation[] newArray(int size) {
+ return new VolumeShaper.Operation[size];
+ }
+ };
+
+ private Operation(@Flag int flags, int replaceId, float xOffset) {
+ mFlags = flags;
+ mReplaceId = replaceId;
+ mXOffset = xOffset;
+ }
+
+ /**
+ * @hide
+ * {@code Builder} class for {@link VolumeShaper.Operation} object.
+ *
+ * Not for public use.
+ */
+ public static final class Builder {
+ int mFlags;
+ int mReplaceId;
+ float mXOffset;
+
+ /**
+ * Constructs a new {@code Builder} with the defaults.
+ */
+ public Builder() {
+ mFlags = 0;
+ mReplaceId = -1;
+ mXOffset = Float.NaN;
+ }
+
+ /**
+ * Constructs a new {@code Builder} from a given {@code VolumeShaper.Operation}
+ * @param operation the {@code VolumeShaper.operation} whose data will be
+ * reused in the new {@code Builder}.
+ */
+ public Builder(@NonNull VolumeShaper.Operation operation) {
+ mReplaceId = operation.mReplaceId;
+ mFlags = operation.mFlags;
+ mXOffset = operation.mXOffset;
+ }
+
+ /**
+ * Replaces the previous {@code VolumeShaper} specified by {@code id}.
+ *
+ * The {@code VolumeShaper} specified by the {@code id} is removed
+ * if it exists. The configuration should be TYPE_SCALE.
+ *
+ * @param id the {@code id} of the previous {@code VolumeShaper}.
+ * @param join if true, match the volume of the previous
+ * shaper to the start volume of the new {@code VolumeShaper}.
+ * @return the same {@code Builder} instance.
+ */
+ public @NonNull Builder replace(int id, boolean join) {
+ mReplaceId = id;
+ if (join) {
+ mFlags |= FLAG_JOIN;
+ } else {
+ mFlags &= ~FLAG_JOIN;
+ }
+ return this;
+ }
+
+ /**
+ * Defers all operations.
+ * @return the same {@code Builder} instance.
+ */
+ public @NonNull Builder defer() {
+ mFlags |= FLAG_DEFER;
+ return this;
+ }
+
+ /**
+ * Terminates the {@code VolumeShaper}.
+ *
+ * Do not call directly, use {@link VolumeShaper#close()}.
+ * @return the same {@code Builder} instance.
+ */
+ public @NonNull Builder terminate() {
+ mFlags |= FLAG_TERMINATE;
+ return this;
+ }
+
+ /**
+ * Reverses direction.
+ * @return the same {@code Builder} instance.
+ */
+ public @NonNull Builder reverse() {
+ mFlags ^= FLAG_REVERSE;
+ return this;
+ }
+
+ /**
+ * Use the id specified in the configuration, creating
+ * {@code VolumeShaper} only as needed; the configuration should be
+ * TYPE_SCALE.
+ *
+ * If the {@code VolumeShaper} with the same id already exists
+ * then the operation has no effect.
+ *
+ * @return the same {@code Builder} instance.
+ */
+ public @NonNull Builder createIfNeeded() {
+ mFlags |= FLAG_CREATE_IF_NEEDED;
+ return this;
+ }
+
+ /**
+ * Sets the {@code xOffset} to use for the {@code VolumeShaper}.
+ *
+ * The {@code xOffset} is the position on the volume curve,
+ * and setting takes effect when the {@code VolumeShaper} is used next.
+ *
+ * @param xOffset a value between (or equal to) 0.f and 1.f, or Float.NaN to ignore.
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if {@code xOffset} is not between 0.f and 1.f,
+ * or a Float.NaN.
+ */
+ public @NonNull Builder setXOffset(float xOffset) {
+ if (xOffset < -0.f) {
+ throw new IllegalArgumentException("Negative xOffset not allowed");
+ } else if (xOffset > 1.f) {
+ throw new IllegalArgumentException("xOffset > 1.f not allowed");
+ }
+ // Float.NaN passes through
+ mXOffset = xOffset;
+ return this;
+ }
+
+ /**
+ * Sets the operation flag. Do not call this directly but one of the
+ * other builder methods.
+ *
+ * @param flags new value for {@code flags}, consisting of ORed flags.
+ * @return the same {@code Builder} instance.
+ * @throws IllegalArgumentException if {@code flags} contains invalid set bits.
+ */
+ private @NonNull Builder setFlags(@Flag int flags) {
+ if ((flags & ~FLAG_PUBLIC_ALL) != 0) {
+ throw new IllegalArgumentException("flag has unknown bits set: " + flags);
+ }
+ mFlags = mFlags & ~FLAG_PUBLIC_ALL | flags;
+ return this;
+ }
+
+ /**
+ * Builds a new {@link VolumeShaper.Operation} object.
+ *
+ * @return a new {@code VolumeShaper.Operation} object
+ */
+ public @NonNull Operation build() {
+ return new Operation(mFlags, mReplaceId, mXOffset);
+ }
+ } // Operation.Builder
+ } // Operation
+
+ /**
+ * @hide
+ * {@code VolumeShaper.State} represents the current progress
+ * of the {@code VolumeShaper}.
+ *
+ * Not for public use.
+ */
+ public static final class State implements Parcelable {
+ private float mVolume;
+ private float mXOffset;
+
+ @Override
+ public String toString() {
+ return "VolumeShaper.State{"
+ + "mVolume = " + mVolume
+ + ", mXOffset = " + mXOffset
+ + "}";
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mVolume, mXOffset);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof State)) return false;
+ if (o == this) return true;
+ final State other = (State) o;
+ return mVolume == other.mVolume
+ && mXOffset == other.mXOffset;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeFloat(mVolume);
+ dest.writeFloat(mXOffset);
+ }
+
+ public static final Parcelable.Creator<VolumeShaper.State> CREATOR
+ = new Parcelable.Creator<VolumeShaper.State>() {
+ @Override
+ public VolumeShaper.State createFromParcel(Parcel p) {
+ return new VolumeShaper.State(
+ p.readFloat() // volume
+ , p.readFloat()); // xOffset
+ }
+
+ @Override
+ public VolumeShaper.State[] newArray(int size) {
+ return new VolumeShaper.State[size];
+ }
+ };
+
+ /* package */ State(float volume, float xOffset) {
+ mVolume = volume;
+ mXOffset = xOffset;
+ }
+
+ /**
+ * Gets the volume of the {@link VolumeShaper.State}.
+ * @return linear volume between 0.f and 1.f.
+ */
+ public float getVolume() {
+ return mVolume;
+ }
+
+ /**
+ * Gets the {@code xOffset} position on the normalized curve
+ * of the {@link VolumeShaper.State}.
+ * @return the curve x position between 0.f and 1.f.
+ */
+ public float getXOffset() {
+ return mXOffset;
+ }
+ } // State
+}
diff --git a/android/media/WebVttRenderer.java b/android/media/WebVttRenderer.java
new file mode 100644
index 00000000..91c53fac
--- /dev/null
+++ b/android/media/WebVttRenderer.java
@@ -0,0 +1,1866 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media;
+
+import android.content.Context;
+import android.text.Layout.Alignment;
+import android.text.SpannableStringBuilder;
+import android.util.ArrayMap;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.accessibility.CaptioningManager;
+import android.view.accessibility.CaptioningManager.CaptionStyle;
+import android.view.accessibility.CaptioningManager.CaptioningChangeListener;
+import android.widget.LinearLayout;
+
+import com.android.internal.widget.SubtitleView;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Vector;
+
+/** @hide */
+public class WebVttRenderer extends SubtitleController.Renderer {
+ private final Context mContext;
+
+ private WebVttRenderingWidget mRenderingWidget;
+
+ public WebVttRenderer(Context context) {
+ mContext = context;
+ }
+
+ @Override
+ public boolean supports(MediaFormat format) {
+ if (format.containsKey(MediaFormat.KEY_MIME)) {
+ return format.getString(MediaFormat.KEY_MIME).equals("text/vtt");
+ }
+ return false;
+ }
+
+ @Override
+ public SubtitleTrack createTrack(MediaFormat format) {
+ if (mRenderingWidget == null) {
+ mRenderingWidget = new WebVttRenderingWidget(mContext);
+ }
+
+ return new WebVttTrack(mRenderingWidget, format);
+ }
+}
+
+/** @hide */
+class TextTrackCueSpan {
+ long mTimestampMs;
+ boolean mEnabled;
+ String mText;
+ TextTrackCueSpan(String text, long timestamp) {
+ mTimestampMs = timestamp;
+ mText = text;
+ // spans with timestamp will be enabled by Cue.onTime
+ mEnabled = (mTimestampMs < 0);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof TextTrackCueSpan)) {
+ return false;
+ }
+ TextTrackCueSpan span = (TextTrackCueSpan) o;
+ return mTimestampMs == span.mTimestampMs &&
+ mText.equals(span.mText);
+ }
+}
+
+/**
+ * @hide
+ *
+ * Extract all text without style, but with timestamp spans.
+ */
+class UnstyledTextExtractor implements Tokenizer.OnTokenListener {
+ StringBuilder mLine = new StringBuilder();
+ Vector<TextTrackCueSpan[]> mLines = new Vector<TextTrackCueSpan[]>();
+ Vector<TextTrackCueSpan> mCurrentLine = new Vector<TextTrackCueSpan>();
+ long mLastTimestamp;
+
+ UnstyledTextExtractor() {
+ init();
+ }
+
+ private void init() {
+ mLine.delete(0, mLine.length());
+ mLines.clear();
+ mCurrentLine.clear();
+ mLastTimestamp = -1;
+ }
+
+ @Override
+ public void onData(String s) {
+ mLine.append(s);
+ }
+
+ @Override
+ public void onStart(String tag, String[] classes, String annotation) { }
+
+ @Override
+ public void onEnd(String tag) { }
+
+ @Override
+ public void onTimeStamp(long timestampMs) {
+ // finish any prior span
+ if (mLine.length() > 0 && timestampMs != mLastTimestamp) {
+ mCurrentLine.add(
+ new TextTrackCueSpan(mLine.toString(), mLastTimestamp));
+ mLine.delete(0, mLine.length());
+ }
+ mLastTimestamp = timestampMs;
+ }
+
+ @Override
+ public void onLineEnd() {
+ // finish any pending span
+ if (mLine.length() > 0) {
+ mCurrentLine.add(
+ new TextTrackCueSpan(mLine.toString(), mLastTimestamp));
+ mLine.delete(0, mLine.length());
+ }
+
+ TextTrackCueSpan[] spans = new TextTrackCueSpan[mCurrentLine.size()];
+ mCurrentLine.toArray(spans);
+ mCurrentLine.clear();
+ mLines.add(spans);
+ }
+
+ public TextTrackCueSpan[][] getText() {
+ // for politeness, finish last cue-line if it ends abruptly
+ if (mLine.length() > 0 || mCurrentLine.size() > 0) {
+ onLineEnd();
+ }
+ TextTrackCueSpan[][] lines = new TextTrackCueSpan[mLines.size()][];
+ mLines.toArray(lines);
+ init();
+ return lines;
+ }
+}
+
+/**
+ * @hide
+ *
+ * Tokenizer tokenizes the WebVTT Cue Text into tags and data
+ */
+class Tokenizer {
+ private static final String TAG = "Tokenizer";
+ private TokenizerPhase mPhase;
+ private TokenizerPhase mDataTokenizer;
+ private TokenizerPhase mTagTokenizer;
+
+ private OnTokenListener mListener;
+ private String mLine;
+ private int mHandledLen;
+
+ interface TokenizerPhase {
+ TokenizerPhase start();
+ void tokenize();
+ }
+
+ class DataTokenizer implements TokenizerPhase {
+ // includes both WebVTT data && escape state
+ private StringBuilder mData;
+
+ public TokenizerPhase start() {
+ mData = new StringBuilder();
+ return this;
+ }
+
+ private boolean replaceEscape(String escape, String replacement, int pos) {
+ if (mLine.startsWith(escape, pos)) {
+ mData.append(mLine.substring(mHandledLen, pos));
+ mData.append(replacement);
+ mHandledLen = pos + escape.length();
+ pos = mHandledLen - 1;
+ return true;
+ }
+ return false;
+ }
+
+ @Override
+ public void tokenize() {
+ int end = mLine.length();
+ for (int pos = mHandledLen; pos < mLine.length(); pos++) {
+ if (mLine.charAt(pos) == '&') {
+ if (replaceEscape("&amp;", "&", pos) ||
+ replaceEscape("&lt;", "<", pos) ||
+ replaceEscape("&gt;", ">", pos) ||
+ replaceEscape("&lrm;", "\u200e", pos) ||
+ replaceEscape("&rlm;", "\u200f", pos) ||
+ replaceEscape("&nbsp;", "\u00a0", pos)) {
+ continue;
+ }
+ } else if (mLine.charAt(pos) == '<') {
+ end = pos;
+ mPhase = mTagTokenizer.start();
+ break;
+ }
+ }
+ mData.append(mLine.substring(mHandledLen, end));
+ // yield mData
+ mListener.onData(mData.toString());
+ mData.delete(0, mData.length());
+ mHandledLen = end;
+ }
+ }
+
+ class TagTokenizer implements TokenizerPhase {
+ private boolean mAtAnnotation;
+ private String mName, mAnnotation;
+
+ public TokenizerPhase start() {
+ mName = mAnnotation = "";
+ mAtAnnotation = false;
+ return this;
+ }
+
+ @Override
+ public void tokenize() {
+ if (!mAtAnnotation)
+ mHandledLen++;
+ if (mHandledLen < mLine.length()) {
+ String[] parts;
+ /**
+ * Collect annotations and end-tags to closing >. Collect tag
+ * name to closing bracket or next white-space.
+ */
+ if (mAtAnnotation || mLine.charAt(mHandledLen) == '/') {
+ parts = mLine.substring(mHandledLen).split(">");
+ } else {
+ parts = mLine.substring(mHandledLen).split("[\t\f >]");
+ }
+ String part = mLine.substring(
+ mHandledLen, mHandledLen + parts[0].length());
+ mHandledLen += parts[0].length();
+
+ if (mAtAnnotation) {
+ mAnnotation += " " + part;
+ } else {
+ mName = part;
+ }
+ }
+
+ mAtAnnotation = true;
+
+ if (mHandledLen < mLine.length() && mLine.charAt(mHandledLen) == '>') {
+ yield_tag();
+ mPhase = mDataTokenizer.start();
+ mHandledLen++;
+ }
+ }
+
+ private void yield_tag() {
+ if (mName.startsWith("/")) {
+ mListener.onEnd(mName.substring(1));
+ } else if (mName.length() > 0 && Character.isDigit(mName.charAt(0))) {
+ // timestamp
+ try {
+ long timestampMs = WebVttParser.parseTimestampMs(mName);
+ mListener.onTimeStamp(timestampMs);
+ } catch (NumberFormatException e) {
+ Log.d(TAG, "invalid timestamp tag: <" + mName + ">");
+ }
+ } else {
+ mAnnotation = mAnnotation.replaceAll("\\s+", " ");
+ if (mAnnotation.startsWith(" ")) {
+ mAnnotation = mAnnotation.substring(1);
+ }
+ if (mAnnotation.endsWith(" ")) {
+ mAnnotation = mAnnotation.substring(0, mAnnotation.length() - 1);
+ }
+
+ String[] classes = null;
+ int dotAt = mName.indexOf('.');
+ if (dotAt >= 0) {
+ classes = mName.substring(dotAt + 1).split("\\.");
+ mName = mName.substring(0, dotAt);
+ }
+ mListener.onStart(mName, classes, mAnnotation);
+ }
+ }
+ }
+
+ Tokenizer(OnTokenListener listener) {
+ mDataTokenizer = new DataTokenizer();
+ mTagTokenizer = new TagTokenizer();
+ reset();
+ mListener = listener;
+ }
+
+ void reset() {
+ mPhase = mDataTokenizer.start();
+ }
+
+ void tokenize(String s) {
+ mHandledLen = 0;
+ mLine = s;
+ while (mHandledLen < mLine.length()) {
+ mPhase.tokenize();
+ }
+ /* we are finished with a line unless we are in the middle of a tag */
+ if (!(mPhase instanceof TagTokenizer)) {
+ // yield END-OF-LINE
+ mListener.onLineEnd();
+ }
+ }
+
+ interface OnTokenListener {
+ void onData(String s);
+ void onStart(String tag, String[] classes, String annotation);
+ void onEnd(String tag);
+ void onTimeStamp(long timestampMs);
+ void onLineEnd();
+ }
+}
+
+/** @hide */
+class TextTrackRegion {
+ final static int SCROLL_VALUE_NONE = 300;
+ final static int SCROLL_VALUE_SCROLL_UP = 301;
+
+ String mId;
+ float mWidth;
+ int mLines;
+ float mAnchorPointX, mAnchorPointY;
+ float mViewportAnchorPointX, mViewportAnchorPointY;
+ int mScrollValue;
+
+ TextTrackRegion() {
+ mId = "";
+ mWidth = 100;
+ mLines = 3;
+ mAnchorPointX = mViewportAnchorPointX = 0.f;
+ mAnchorPointY = mViewportAnchorPointY = 100.f;
+ mScrollValue = SCROLL_VALUE_NONE;
+ }
+
+ public String toString() {
+ StringBuilder res = new StringBuilder(" {id:\"").append(mId)
+ .append("\", width:").append(mWidth)
+ .append(", lines:").append(mLines)
+ .append(", anchorPoint:(").append(mAnchorPointX)
+ .append(", ").append(mAnchorPointY)
+ .append("), viewportAnchorPoints:").append(mViewportAnchorPointX)
+ .append(", ").append(mViewportAnchorPointY)
+ .append("), scrollValue:")
+ .append(mScrollValue == SCROLL_VALUE_NONE ? "none" :
+ mScrollValue == SCROLL_VALUE_SCROLL_UP ? "scroll_up" :
+ "INVALID")
+ .append("}");
+ return res.toString();
+ }
+}
+
+/** @hide */
+class TextTrackCue extends SubtitleTrack.Cue {
+ final static int WRITING_DIRECTION_HORIZONTAL = 100;
+ final static int WRITING_DIRECTION_VERTICAL_RL = 101;
+ final static int WRITING_DIRECTION_VERTICAL_LR = 102;
+
+ final static int ALIGNMENT_MIDDLE = 200;
+ final static int ALIGNMENT_START = 201;
+ final static int ALIGNMENT_END = 202;
+ final static int ALIGNMENT_LEFT = 203;
+ final static int ALIGNMENT_RIGHT = 204;
+ private static final String TAG = "TTCue";
+
+ String mId;
+ boolean mPauseOnExit;
+ int mWritingDirection;
+ String mRegionId;
+ boolean mSnapToLines;
+ Integer mLinePosition; // null means AUTO
+ boolean mAutoLinePosition;
+ int mTextPosition;
+ int mSize;
+ int mAlignment;
+ // Vector<String> mText;
+ String[] mStrings;
+ TextTrackCueSpan[][] mLines;
+ TextTrackRegion mRegion;
+
+ TextTrackCue() {
+ mId = "";
+ mPauseOnExit = false;
+ mWritingDirection = WRITING_DIRECTION_HORIZONTAL;
+ mRegionId = "";
+ mSnapToLines = true;
+ mLinePosition = null /* AUTO */;
+ mTextPosition = 50;
+ mSize = 100;
+ mAlignment = ALIGNMENT_MIDDLE;
+ mLines = null;
+ mRegion = null;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof TextTrackCue)) {
+ return false;
+ }
+ if (this == o) {
+ return true;
+ }
+
+ try {
+ TextTrackCue cue = (TextTrackCue) o;
+ boolean res = mId.equals(cue.mId) &&
+ mPauseOnExit == cue.mPauseOnExit &&
+ mWritingDirection == cue.mWritingDirection &&
+ mRegionId.equals(cue.mRegionId) &&
+ mSnapToLines == cue.mSnapToLines &&
+ mAutoLinePosition == cue.mAutoLinePosition &&
+ (mAutoLinePosition ||
+ ((mLinePosition != null && mLinePosition.equals(cue.mLinePosition)) ||
+ (mLinePosition == null && cue.mLinePosition == null))) &&
+ mTextPosition == cue.mTextPosition &&
+ mSize == cue.mSize &&
+ mAlignment == cue.mAlignment &&
+ mLines.length == cue.mLines.length;
+ if (res == true) {
+ for (int line = 0; line < mLines.length; line++) {
+ if (!Arrays.equals(mLines[line], cue.mLines[line])) {
+ return false;
+ }
+ }
+ }
+ return res;
+ } catch(IncompatibleClassChangeError e) {
+ return false;
+ }
+ }
+
+ public StringBuilder appendStringsToBuilder(StringBuilder builder) {
+ if (mStrings == null) {
+ builder.append("null");
+ } else {
+ builder.append("[");
+ boolean first = true;
+ for (String s: mStrings) {
+ if (!first) {
+ builder.append(", ");
+ }
+ if (s == null) {
+ builder.append("null");
+ } else {
+ builder.append("\"");
+ builder.append(s);
+ builder.append("\"");
+ }
+ first = false;
+ }
+ builder.append("]");
+ }
+ return builder;
+ }
+
+ public StringBuilder appendLinesToBuilder(StringBuilder builder) {
+ if (mLines == null) {
+ builder.append("null");
+ } else {
+ builder.append("[");
+ boolean first = true;
+ for (TextTrackCueSpan[] spans: mLines) {
+ if (!first) {
+ builder.append(", ");
+ }
+ if (spans == null) {
+ builder.append("null");
+ } else {
+ builder.append("\"");
+ boolean innerFirst = true;
+ long lastTimestamp = -1;
+ for (TextTrackCueSpan span: spans) {
+ if (!innerFirst) {
+ builder.append(" ");
+ }
+ if (span.mTimestampMs != lastTimestamp) {
+ builder.append("<")
+ .append(WebVttParser.timeToString(
+ span.mTimestampMs))
+ .append(">");
+ lastTimestamp = span.mTimestampMs;
+ }
+ builder.append(span.mText);
+ innerFirst = false;
+ }
+ builder.append("\"");
+ }
+ first = false;
+ }
+ builder.append("]");
+ }
+ return builder;
+ }
+
+ public String toString() {
+ StringBuilder res = new StringBuilder();
+
+ res.append(WebVttParser.timeToString(mStartTimeMs))
+ .append(" --> ").append(WebVttParser.timeToString(mEndTimeMs))
+ .append(" {id:\"").append(mId)
+ .append("\", pauseOnExit:").append(mPauseOnExit)
+ .append(", direction:")
+ .append(mWritingDirection == WRITING_DIRECTION_HORIZONTAL ? "horizontal" :
+ mWritingDirection == WRITING_DIRECTION_VERTICAL_LR ? "vertical_lr" :
+ mWritingDirection == WRITING_DIRECTION_VERTICAL_RL ? "vertical_rl" :
+ "INVALID")
+ .append(", regionId:\"").append(mRegionId)
+ .append("\", snapToLines:").append(mSnapToLines)
+ .append(", linePosition:").append(mAutoLinePosition ? "auto" :
+ mLinePosition)
+ .append(", textPosition:").append(mTextPosition)
+ .append(", size:").append(mSize)
+ .append(", alignment:")
+ .append(mAlignment == ALIGNMENT_END ? "end" :
+ mAlignment == ALIGNMENT_LEFT ? "left" :
+ mAlignment == ALIGNMENT_MIDDLE ? "middle" :
+ mAlignment == ALIGNMENT_RIGHT ? "right" :
+ mAlignment == ALIGNMENT_START ? "start" : "INVALID")
+ .append(", text:");
+ appendStringsToBuilder(res).append("}");
+ return res.toString();
+ }
+
+ @Override
+ public int hashCode() {
+ return toString().hashCode();
+ }
+
+ @Override
+ public void onTime(long timeMs) {
+ for (TextTrackCueSpan[] line: mLines) {
+ for (TextTrackCueSpan span: line) {
+ span.mEnabled = timeMs >= span.mTimestampMs;
+ }
+ }
+ }
+}
+
+/**
+ * Supporting July 10 2013 draft version
+ *
+ * @hide
+ */
+class WebVttParser {
+ private static final String TAG = "WebVttParser";
+ private Phase mPhase;
+ private TextTrackCue mCue;
+ private Vector<String> mCueTexts;
+ private WebVttCueListener mListener;
+ private String mBuffer;
+
+ WebVttParser(WebVttCueListener listener) {
+ mPhase = mParseStart;
+ mBuffer = ""; /* mBuffer contains up to 1 incomplete line */
+ mListener = listener;
+ mCueTexts = new Vector<String>();
+ }
+
+ /* parsePercentageString */
+ public static float parseFloatPercentage(String s)
+ throws NumberFormatException {
+ if (!s.endsWith("%")) {
+ throw new NumberFormatException("does not end in %");
+ }
+ s = s.substring(0, s.length() - 1);
+ // parseFloat allows an exponent or a sign
+ if (s.matches(".*[^0-9.].*")) {
+ throw new NumberFormatException("contains an invalid character");
+ }
+
+ try {
+ float value = Float.parseFloat(s);
+ if (value < 0.0f || value > 100.0f) {
+ throw new NumberFormatException("is out of range");
+ }
+ return value;
+ } catch (NumberFormatException e) {
+ throw new NumberFormatException("is not a number");
+ }
+ }
+
+ public static int parseIntPercentage(String s) throws NumberFormatException {
+ if (!s.endsWith("%")) {
+ throw new NumberFormatException("does not end in %");
+ }
+ s = s.substring(0, s.length() - 1);
+ // parseInt allows "-0" that returns 0, so check for non-digits
+ if (s.matches(".*[^0-9].*")) {
+ throw new NumberFormatException("contains an invalid character");
+ }
+
+ try {
+ int value = Integer.parseInt(s);
+ if (value < 0 || value > 100) {
+ throw new NumberFormatException("is out of range");
+ }
+ return value;
+ } catch (NumberFormatException e) {
+ throw new NumberFormatException("is not a number");
+ }
+ }
+
+ public static long parseTimestampMs(String s) throws NumberFormatException {
+ if (!s.matches("(\\d+:)?[0-5]\\d:[0-5]\\d\\.\\d{3}")) {
+ throw new NumberFormatException("has invalid format");
+ }
+
+ String[] parts = s.split("\\.", 2);
+ long value = 0;
+ for (String group: parts[0].split(":")) {
+ value = value * 60 + Long.parseLong(group);
+ }
+ return value * 1000 + Long.parseLong(parts[1]);
+ }
+
+ public static String timeToString(long timeMs) {
+ return String.format("%d:%02d:%02d.%03d",
+ timeMs / 3600000, (timeMs / 60000) % 60,
+ (timeMs / 1000) % 60, timeMs % 1000);
+ }
+
+ public void parse(String s) {
+ boolean trailingCR = false;
+ mBuffer = (mBuffer + s.replace("\0", "\ufffd")).replace("\r\n", "\n");
+
+ /* keep trailing '\r' in case matching '\n' arrives in next packet */
+ if (mBuffer.endsWith("\r")) {
+ trailingCR = true;
+ mBuffer = mBuffer.substring(0, mBuffer.length() - 1);
+ }
+
+ String[] lines = mBuffer.split("[\r\n]");
+ for (int i = 0; i < lines.length - 1; i++) {
+ mPhase.parse(lines[i]);
+ }
+
+ mBuffer = lines[lines.length - 1];
+ if (trailingCR)
+ mBuffer += "\r";
+ }
+
+ public void eos() {
+ if (mBuffer.endsWith("\r")) {
+ mBuffer = mBuffer.substring(0, mBuffer.length() - 1);
+ }
+
+ mPhase.parse(mBuffer);
+ mBuffer = "";
+
+ yieldCue();
+ mPhase = mParseStart;
+ }
+
+ public void yieldCue() {
+ if (mCue != null && mCueTexts.size() > 0) {
+ mCue.mStrings = new String[mCueTexts.size()];
+ mCueTexts.toArray(mCue.mStrings);
+ mCueTexts.clear();
+ mListener.onCueParsed(mCue);
+ }
+ mCue = null;
+ }
+
+ interface Phase {
+ void parse(String line);
+ }
+
+ final private Phase mSkipRest = new Phase() {
+ @Override
+ public void parse(String line) { }
+ };
+
+ final private Phase mParseStart = new Phase() { // 5-9
+ @Override
+ public void parse(String line) {
+ if (line.startsWith("\ufeff")) {
+ line = line.substring(1);
+ }
+ if (!line.equals("WEBVTT") &&
+ !line.startsWith("WEBVTT ") &&
+ !line.startsWith("WEBVTT\t")) {
+ log_warning("Not a WEBVTT header", line);
+ mPhase = mSkipRest;
+ } else {
+ mPhase = mParseHeader;
+ }
+ }
+ };
+
+ final private Phase mParseHeader = new Phase() { // 10-13
+ TextTrackRegion parseRegion(String s) {
+ TextTrackRegion region = new TextTrackRegion();
+ for (String setting: s.split(" +")) {
+ int equalAt = setting.indexOf('=');
+ if (equalAt <= 0 || equalAt == setting.length() - 1) {
+ continue;
+ }
+
+ String name = setting.substring(0, equalAt);
+ String value = setting.substring(equalAt + 1);
+ if (name.equals("id")) {
+ region.mId = value;
+ } else if (name.equals("width")) {
+ try {
+ region.mWidth = parseFloatPercentage(value);
+ } catch (NumberFormatException e) {
+ log_warning("region setting", name,
+ "has invalid value", e.getMessage(), value);
+ }
+ } else if (name.equals("lines")) {
+ if (value.matches(".*[^0-9].*")) {
+ log_warning("lines", name, "contains an invalid character", value);
+ } else {
+ try {
+ region.mLines = Integer.parseInt(value);
+ assert(region.mLines >= 0); // lines contains only digits
+ } catch (NumberFormatException e) {
+ log_warning("region setting", name, "is not numeric", value);
+ }
+ }
+ } else if (name.equals("regionanchor") ||
+ name.equals("viewportanchor")) {
+ int commaAt = value.indexOf(",");
+ if (commaAt < 0) {
+ log_warning("region setting", name, "contains no comma", value);
+ continue;
+ }
+
+ String anchorX = value.substring(0, commaAt);
+ String anchorY = value.substring(commaAt + 1);
+ float x, y;
+
+ try {
+ x = parseFloatPercentage(anchorX);
+ } catch (NumberFormatException e) {
+ log_warning("region setting", name,
+ "has invalid x component", e.getMessage(), anchorX);
+ continue;
+ }
+ try {
+ y = parseFloatPercentage(anchorY);
+ } catch (NumberFormatException e) {
+ log_warning("region setting", name,
+ "has invalid y component", e.getMessage(), anchorY);
+ continue;
+ }
+
+ if (name.charAt(0) == 'r') {
+ region.mAnchorPointX = x;
+ region.mAnchorPointY = y;
+ } else {
+ region.mViewportAnchorPointX = x;
+ region.mViewportAnchorPointY = y;
+ }
+ } else if (name.equals("scroll")) {
+ if (value.equals("up")) {
+ region.mScrollValue =
+ TextTrackRegion.SCROLL_VALUE_SCROLL_UP;
+ } else {
+ log_warning("region setting", name, "has invalid value", value);
+ }
+ }
+ }
+ return region;
+ }
+
+ @Override
+ public void parse(String line) {
+ if (line.length() == 0) {
+ mPhase = mParseCueId;
+ } else if (line.contains("-->")) {
+ mPhase = mParseCueTime;
+ mPhase.parse(line);
+ } else {
+ int colonAt = line.indexOf(':');
+ if (colonAt <= 0 || colonAt >= line.length() - 1) {
+ log_warning("meta data header has invalid format", line);
+ }
+ String name = line.substring(0, colonAt);
+ String value = line.substring(colonAt + 1);
+
+ if (name.equals("Region")) {
+ TextTrackRegion region = parseRegion(value);
+ mListener.onRegionParsed(region);
+ }
+ }
+ }
+ };
+
+ final private Phase mParseCueId = new Phase() {
+ @Override
+ public void parse(String line) {
+ if (line.length() == 0) {
+ return;
+ }
+
+ assert(mCue == null);
+
+ if (line.equals("NOTE") || line.startsWith("NOTE ")) {
+ mPhase = mParseCueText;
+ }
+
+ mCue = new TextTrackCue();
+ mCueTexts.clear();
+
+ mPhase = mParseCueTime;
+ if (line.contains("-->")) {
+ mPhase.parse(line);
+ } else {
+ mCue.mId = line;
+ }
+ }
+ };
+
+ final private Phase mParseCueTime = new Phase() {
+ @Override
+ public void parse(String line) {
+ int arrowAt = line.indexOf("-->");
+ if (arrowAt < 0) {
+ mCue = null;
+ mPhase = mParseCueId;
+ return;
+ }
+
+ String start = line.substring(0, arrowAt).trim();
+ // convert only initial and first other white-space to space
+ String rest = line.substring(arrowAt + 3)
+ .replaceFirst("^\\s+", "").replaceFirst("\\s+", " ");
+ int spaceAt = rest.indexOf(' ');
+ String end = spaceAt > 0 ? rest.substring(0, spaceAt) : rest;
+ rest = spaceAt > 0 ? rest.substring(spaceAt + 1) : "";
+
+ mCue.mStartTimeMs = parseTimestampMs(start);
+ mCue.mEndTimeMs = parseTimestampMs(end);
+ for (String setting: rest.split(" +")) {
+ int colonAt = setting.indexOf(':');
+ if (colonAt <= 0 || colonAt == setting.length() - 1) {
+ continue;
+ }
+ String name = setting.substring(0, colonAt);
+ String value = setting.substring(colonAt + 1);
+
+ if (name.equals("region")) {
+ mCue.mRegionId = value;
+ } else if (name.equals("vertical")) {
+ if (value.equals("rl")) {
+ mCue.mWritingDirection =
+ TextTrackCue.WRITING_DIRECTION_VERTICAL_RL;
+ } else if (value.equals("lr")) {
+ mCue.mWritingDirection =
+ TextTrackCue.WRITING_DIRECTION_VERTICAL_LR;
+ } else {
+ log_warning("cue setting", name, "has invalid value", value);
+ }
+ } else if (name.equals("line")) {
+ try {
+ /* TRICKY: we know that there are no spaces in value */
+ assert(value.indexOf(' ') < 0);
+ if (value.endsWith("%")) {
+ mCue.mSnapToLines = false;
+ mCue.mLinePosition = parseIntPercentage(value);
+ } else if (value.matches(".*[^0-9].*")) {
+ log_warning("cue setting", name,
+ "contains an invalid character", value);
+ } else {
+ mCue.mSnapToLines = true;
+ mCue.mLinePosition = Integer.parseInt(value);
+ }
+ } catch (NumberFormatException e) {
+ log_warning("cue setting", name,
+ "is not numeric or percentage", value);
+ }
+ // TODO: add support for optional alignment value [,start|middle|end]
+ } else if (name.equals("position")) {
+ try {
+ mCue.mTextPosition = parseIntPercentage(value);
+ } catch (NumberFormatException e) {
+ log_warning("cue setting", name,
+ "is not numeric or percentage", value);
+ }
+ } else if (name.equals("size")) {
+ try {
+ mCue.mSize = parseIntPercentage(value);
+ } catch (NumberFormatException e) {
+ log_warning("cue setting", name,
+ "is not numeric or percentage", value);
+ }
+ } else if (name.equals("align")) {
+ if (value.equals("start")) {
+ mCue.mAlignment = TextTrackCue.ALIGNMENT_START;
+ } else if (value.equals("middle")) {
+ mCue.mAlignment = TextTrackCue.ALIGNMENT_MIDDLE;
+ } else if (value.equals("end")) {
+ mCue.mAlignment = TextTrackCue.ALIGNMENT_END;
+ } else if (value.equals("left")) {
+ mCue.mAlignment = TextTrackCue.ALIGNMENT_LEFT;
+ } else if (value.equals("right")) {
+ mCue.mAlignment = TextTrackCue.ALIGNMENT_RIGHT;
+ } else {
+ log_warning("cue setting", name, "has invalid value", value);
+ continue;
+ }
+ }
+ }
+
+ if (mCue.mLinePosition != null ||
+ mCue.mSize != 100 ||
+ (mCue.mWritingDirection !=
+ TextTrackCue.WRITING_DIRECTION_HORIZONTAL)) {
+ mCue.mRegionId = "";
+ }
+
+ mPhase = mParseCueText;
+ }
+ };
+
+ /* also used for notes */
+ final private Phase mParseCueText = new Phase() {
+ @Override
+ public void parse(String line) {
+ if (line.length() == 0) {
+ yieldCue();
+ mPhase = mParseCueId;
+ return;
+ } else if (mCue != null) {
+ mCueTexts.add(line);
+ }
+ }
+ };
+
+ private void log_warning(
+ String nameType, String name, String message,
+ String subMessage, String value) {
+ Log.w(this.getClass().getName(), nameType + " '" + name + "' " +
+ message + " ('" + value + "' " + subMessage + ")");
+ }
+
+ private void log_warning(
+ String nameType, String name, String message, String value) {
+ Log.w(this.getClass().getName(), nameType + " '" + name + "' " +
+ message + " ('" + value + "')");
+ }
+
+ private void log_warning(String message, String value) {
+ Log.w(this.getClass().getName(), message + " ('" + value + "')");
+ }
+}
+
+/** @hide */
+interface WebVttCueListener {
+ void onCueParsed(TextTrackCue cue);
+ void onRegionParsed(TextTrackRegion region);
+}
+
+/** @hide */
+class WebVttTrack extends SubtitleTrack implements WebVttCueListener {
+ private static final String TAG = "WebVttTrack";
+
+ private final WebVttParser mParser = new WebVttParser(this);
+ private final UnstyledTextExtractor mExtractor =
+ new UnstyledTextExtractor();
+ private final Tokenizer mTokenizer = new Tokenizer(mExtractor);
+ private final Vector<Long> mTimestamps = new Vector<Long>();
+ private final WebVttRenderingWidget mRenderingWidget;
+
+ private final Map<String, TextTrackRegion> mRegions =
+ new HashMap<String, TextTrackRegion>();
+ private Long mCurrentRunID;
+
+ WebVttTrack(WebVttRenderingWidget renderingWidget, MediaFormat format) {
+ super(format);
+
+ mRenderingWidget = renderingWidget;
+ }
+
+ @Override
+ public WebVttRenderingWidget getRenderingWidget() {
+ return mRenderingWidget;
+ }
+
+ @Override
+ public void onData(byte[] data, boolean eos, long runID) {
+ try {
+ String str = new String(data, "UTF-8");
+
+ // implement intermixing restriction for WebVTT only for now
+ synchronized(mParser) {
+ if (mCurrentRunID != null && runID != mCurrentRunID) {
+ throw new IllegalStateException(
+ "Run #" + mCurrentRunID +
+ " in progress. Cannot process run #" + runID);
+ }
+ mCurrentRunID = runID;
+ mParser.parse(str);
+ if (eos) {
+ finishedRun(runID);
+ mParser.eos();
+ mRegions.clear();
+ mCurrentRunID = null;
+ }
+ }
+ } catch (java.io.UnsupportedEncodingException e) {
+ Log.w(TAG, "subtitle data is not UTF-8 encoded: " + e);
+ }
+ }
+
+ @Override
+ public void onCueParsed(TextTrackCue cue) {
+ synchronized (mParser) {
+ // resolve region
+ if (cue.mRegionId.length() != 0) {
+ cue.mRegion = mRegions.get(cue.mRegionId);
+ }
+
+ if (DEBUG) Log.v(TAG, "adding cue " + cue);
+
+ // tokenize text track string-lines into lines of spans
+ mTokenizer.reset();
+ for (String s: cue.mStrings) {
+ mTokenizer.tokenize(s);
+ }
+ cue.mLines = mExtractor.getText();
+ if (DEBUG) Log.v(TAG, cue.appendLinesToBuilder(
+ cue.appendStringsToBuilder(
+ new StringBuilder()).append(" simplified to: "))
+ .toString());
+
+ // extract inner timestamps
+ for (TextTrackCueSpan[] line: cue.mLines) {
+ for (TextTrackCueSpan span: line) {
+ if (span.mTimestampMs > cue.mStartTimeMs &&
+ span.mTimestampMs < cue.mEndTimeMs &&
+ !mTimestamps.contains(span.mTimestampMs)) {
+ mTimestamps.add(span.mTimestampMs);
+ }
+ }
+ }
+
+ if (mTimestamps.size() > 0) {
+ cue.mInnerTimesMs = new long[mTimestamps.size()];
+ for (int ix=0; ix < mTimestamps.size(); ++ix) {
+ cue.mInnerTimesMs[ix] = mTimestamps.get(ix);
+ }
+ mTimestamps.clear();
+ } else {
+ cue.mInnerTimesMs = null;
+ }
+
+ cue.mRunID = mCurrentRunID;
+ }
+
+ addCue(cue);
+ }
+
+ @Override
+ public void onRegionParsed(TextTrackRegion region) {
+ synchronized(mParser) {
+ mRegions.put(region.mId, region);
+ }
+ }
+
+ @Override
+ public void updateView(Vector<SubtitleTrack.Cue> activeCues) {
+ if (!mVisible) {
+ // don't keep the state if we are not visible
+ return;
+ }
+
+ if (DEBUG && mTimeProvider != null) {
+ try {
+ Log.d(TAG, "at " +
+ (mTimeProvider.getCurrentTimeUs(false, true) / 1000) +
+ " ms the active cues are:");
+ } catch (IllegalStateException e) {
+ Log.d(TAG, "at (illegal state) the active cues are:");
+ }
+ }
+
+ if (mRenderingWidget != null) {
+ mRenderingWidget.setActiveCues(activeCues);
+ }
+ }
+}
+
+/**
+ * Widget capable of rendering WebVTT captions.
+ *
+ * @hide
+ */
+class WebVttRenderingWidget extends ViewGroup implements SubtitleTrack.RenderingWidget {
+ private static final boolean DEBUG = false;
+
+ private static final CaptionStyle DEFAULT_CAPTION_STYLE = CaptionStyle.DEFAULT;
+
+ private static final int DEBUG_REGION_BACKGROUND = 0x800000FF;
+ private static final int DEBUG_CUE_BACKGROUND = 0x80FF0000;
+
+ /** WebVtt specifies line height as 5.3% of the viewport height. */
+ private static final float LINE_HEIGHT_RATIO = 0.0533f;
+
+ /** Map of active regions, used to determine enter/exit. */
+ private final ArrayMap<TextTrackRegion, RegionLayout> mRegionBoxes =
+ new ArrayMap<TextTrackRegion, RegionLayout>();
+
+ /** Map of active cues, used to determine enter/exit. */
+ private final ArrayMap<TextTrackCue, CueLayout> mCueBoxes =
+ new ArrayMap<TextTrackCue, CueLayout>();
+
+ /** Captioning manager, used to obtain and track caption properties. */
+ private final CaptioningManager mManager;
+
+ /** Callback for rendering changes. */
+ private OnChangedListener mListener;
+
+ /** Current caption style. */
+ private CaptionStyle mCaptionStyle;
+
+ /** Current font size, computed from font scaling factor and height. */
+ private float mFontSize;
+
+ /** Whether a caption style change listener is registered. */
+ private boolean mHasChangeListener;
+
+ public WebVttRenderingWidget(Context context) {
+ this(context, null);
+ }
+
+ public WebVttRenderingWidget(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public WebVttRenderingWidget(Context context, AttributeSet attrs, int defStyleAttr) {
+ this(context, attrs, defStyleAttr, 0);
+ }
+
+ public WebVttRenderingWidget(
+ Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
+ super(context, attrs, defStyleAttr, defStyleRes);
+
+ // Cannot render text over video when layer type is hardware.
+ setLayerType(View.LAYER_TYPE_SOFTWARE, null);
+
+ mManager = (CaptioningManager) context.getSystemService(Context.CAPTIONING_SERVICE);
+ mCaptionStyle = mManager.getUserStyle();
+ mFontSize = mManager.getFontScale() * getHeight() * LINE_HEIGHT_RATIO;
+ }
+
+ @Override
+ public void setSize(int width, int height) {
+ final int widthSpec = MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY);
+ final int heightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY);
+
+ measure(widthSpec, heightSpec);
+ layout(0, 0, width, height);
+ }
+
+ @Override
+ public void onAttachedToWindow() {
+ super.onAttachedToWindow();
+
+ manageChangeListener();
+ }
+
+ @Override
+ public void onDetachedFromWindow() {
+ super.onDetachedFromWindow();
+
+ manageChangeListener();
+ }
+
+ @Override
+ public void setOnChangedListener(OnChangedListener listener) {
+ mListener = listener;
+ }
+
+ @Override
+ public void setVisible(boolean visible) {
+ if (visible) {
+ setVisibility(View.VISIBLE);
+ } else {
+ setVisibility(View.GONE);
+ }
+
+ manageChangeListener();
+ }
+
+ /**
+ * Manages whether this renderer is listening for caption style changes.
+ */
+ private void manageChangeListener() {
+ final boolean needsListener = isAttachedToWindow() && getVisibility() == View.VISIBLE;
+ if (mHasChangeListener != needsListener) {
+ mHasChangeListener = needsListener;
+
+ if (needsListener) {
+ mManager.addCaptioningChangeListener(mCaptioningListener);
+
+ final CaptionStyle captionStyle = mManager.getUserStyle();
+ final float fontSize = mManager.getFontScale() * getHeight() * LINE_HEIGHT_RATIO;
+ setCaptionStyle(captionStyle, fontSize);
+ } else {
+ mManager.removeCaptioningChangeListener(mCaptioningListener);
+ }
+ }
+ }
+
+ public void setActiveCues(Vector<SubtitleTrack.Cue> activeCues) {
+ final Context context = getContext();
+ final CaptionStyle captionStyle = mCaptionStyle;
+ final float fontSize = mFontSize;
+
+ prepForPrune();
+
+ // Ensure we have all necessary cue and region boxes.
+ final int count = activeCues.size();
+ for (int i = 0; i < count; i++) {
+ final TextTrackCue cue = (TextTrackCue) activeCues.get(i);
+ final TextTrackRegion region = cue.mRegion;
+ if (region != null) {
+ RegionLayout regionBox = mRegionBoxes.get(region);
+ if (regionBox == null) {
+ regionBox = new RegionLayout(context, region, captionStyle, fontSize);
+ mRegionBoxes.put(region, regionBox);
+ addView(regionBox, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+ }
+ regionBox.put(cue);
+ } else {
+ CueLayout cueBox = mCueBoxes.get(cue);
+ if (cueBox == null) {
+ cueBox = new CueLayout(context, cue, captionStyle, fontSize);
+ mCueBoxes.put(cue, cueBox);
+ addView(cueBox, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+ }
+ cueBox.update();
+ cueBox.setOrder(i);
+ }
+ }
+
+ prune();
+
+ // Force measurement and layout.
+ final int width = getWidth();
+ final int height = getHeight();
+ setSize(width, height);
+
+ if (mListener != null) {
+ mListener.onChanged(this);
+ }
+ }
+
+ private void setCaptionStyle(CaptionStyle captionStyle, float fontSize) {
+ captionStyle = DEFAULT_CAPTION_STYLE.applyStyle(captionStyle);
+ mCaptionStyle = captionStyle;
+ mFontSize = fontSize;
+
+ final int cueCount = mCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mCueBoxes.valueAt(i);
+ cueBox.setCaptionStyle(captionStyle, fontSize);
+ }
+
+ final int regionCount = mRegionBoxes.size();
+ for (int i = 0; i < regionCount; i++) {
+ final RegionLayout regionBox = mRegionBoxes.valueAt(i);
+ regionBox.setCaptionStyle(captionStyle, fontSize);
+ }
+ }
+
+ /**
+ * Remove inactive cues and regions.
+ */
+ private void prune() {
+ int regionCount = mRegionBoxes.size();
+ for (int i = 0; i < regionCount; i++) {
+ final RegionLayout regionBox = mRegionBoxes.valueAt(i);
+ if (regionBox.prune()) {
+ removeView(regionBox);
+ mRegionBoxes.removeAt(i);
+ regionCount--;
+ i--;
+ }
+ }
+
+ int cueCount = mCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mCueBoxes.valueAt(i);
+ if (!cueBox.isActive()) {
+ removeView(cueBox);
+ mCueBoxes.removeAt(i);
+ cueCount--;
+ i--;
+ }
+ }
+ }
+
+ /**
+ * Reset active cues and regions.
+ */
+ private void prepForPrune() {
+ final int regionCount = mRegionBoxes.size();
+ for (int i = 0; i < regionCount; i++) {
+ final RegionLayout regionBox = mRegionBoxes.valueAt(i);
+ regionBox.prepForPrune();
+ }
+
+ final int cueCount = mCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mCueBoxes.valueAt(i);
+ cueBox.prepForPrune();
+ }
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+
+ final int regionCount = mRegionBoxes.size();
+ for (int i = 0; i < regionCount; i++) {
+ final RegionLayout regionBox = mRegionBoxes.valueAt(i);
+ regionBox.measureForParent(widthMeasureSpec, heightMeasureSpec);
+ }
+
+ final int cueCount = mCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mCueBoxes.valueAt(i);
+ cueBox.measureForParent(widthMeasureSpec, heightMeasureSpec);
+ }
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int l, int t, int r, int b) {
+ final int viewportWidth = r - l;
+ final int viewportHeight = b - t;
+
+ setCaptionStyle(mCaptionStyle,
+ mManager.getFontScale() * LINE_HEIGHT_RATIO * viewportHeight);
+
+ final int regionCount = mRegionBoxes.size();
+ for (int i = 0; i < regionCount; i++) {
+ final RegionLayout regionBox = mRegionBoxes.valueAt(i);
+ layoutRegion(viewportWidth, viewportHeight, regionBox);
+ }
+
+ final int cueCount = mCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mCueBoxes.valueAt(i);
+ layoutCue(viewportWidth, viewportHeight, cueBox);
+ }
+ }
+
+ /**
+ * Lays out a region within the viewport. The region handles layout for
+ * contained cues.
+ */
+ private void layoutRegion(
+ int viewportWidth, int viewportHeight,
+ RegionLayout regionBox) {
+ final TextTrackRegion region = regionBox.getRegion();
+ final int regionHeight = regionBox.getMeasuredHeight();
+ final int regionWidth = regionBox.getMeasuredWidth();
+
+ // TODO: Account for region anchor point.
+ final float x = region.mViewportAnchorPointX;
+ final float y = region.mViewportAnchorPointY;
+ final int left = (int) (x * (viewportWidth - regionWidth) / 100);
+ final int top = (int) (y * (viewportHeight - regionHeight) / 100);
+
+ regionBox.layout(left, top, left + regionWidth, top + regionHeight);
+ }
+
+ /**
+ * Lays out a cue within the viewport.
+ */
+ private void layoutCue(
+ int viewportWidth, int viewportHeight, CueLayout cueBox) {
+ final TextTrackCue cue = cueBox.getCue();
+ final int direction = getLayoutDirection();
+ final int absAlignment = resolveCueAlignment(direction, cue.mAlignment);
+ final boolean cueSnapToLines = cue.mSnapToLines;
+
+ int size = 100 * cueBox.getMeasuredWidth() / viewportWidth;
+
+ // Determine raw x-position.
+ int xPosition;
+ switch (absAlignment) {
+ case TextTrackCue.ALIGNMENT_LEFT:
+ xPosition = cue.mTextPosition;
+ break;
+ case TextTrackCue.ALIGNMENT_RIGHT:
+ xPosition = cue.mTextPosition - size;
+ break;
+ case TextTrackCue.ALIGNMENT_MIDDLE:
+ default:
+ xPosition = cue.mTextPosition - size / 2;
+ break;
+ }
+
+ // Adjust x-position for layout.
+ if (direction == LAYOUT_DIRECTION_RTL) {
+ xPosition = 100 - xPosition;
+ }
+
+ // If the text track cue snap-to-lines flag is set, adjust
+ // x-position and size for padding. This is equivalent to placing the
+ // cue within the title-safe area.
+ if (cueSnapToLines) {
+ final int paddingLeft = 100 * getPaddingLeft() / viewportWidth;
+ final int paddingRight = 100 * getPaddingRight() / viewportWidth;
+ if (xPosition < paddingLeft && xPosition + size > paddingLeft) {
+ xPosition += paddingLeft;
+ size -= paddingLeft;
+ }
+ final float rightEdge = 100 - paddingRight;
+ if (xPosition < rightEdge && xPosition + size > rightEdge) {
+ size -= paddingRight;
+ }
+ }
+
+ // Compute absolute left position and width.
+ final int left = xPosition * viewportWidth / 100;
+ final int width = size * viewportWidth / 100;
+
+ // Determine initial y-position.
+ final int yPosition = calculateLinePosition(cueBox);
+
+ // Compute absolute final top position and height.
+ final int height = cueBox.getMeasuredHeight();
+ final int top;
+ if (yPosition < 0) {
+ // TODO: This needs to use the actual height of prior boxes.
+ top = viewportHeight + yPosition * height;
+ } else {
+ top = yPosition * (viewportHeight - height) / 100;
+ }
+
+ // Layout cue in final position.
+ cueBox.layout(left, top, left + width, top + height);
+ }
+
+ /**
+ * Calculates the line position for a cue.
+ * <p>
+ * If the resulting position is negative, it represents a bottom-aligned
+ * position relative to the number of active cues. Otherwise, it represents
+ * a percentage [0-100] of the viewport height.
+ */
+ private int calculateLinePosition(CueLayout cueBox) {
+ final TextTrackCue cue = cueBox.getCue();
+ final Integer linePosition = cue.mLinePosition;
+ final boolean snapToLines = cue.mSnapToLines;
+ final boolean autoPosition = (linePosition == null);
+
+ if (!snapToLines && !autoPosition && (linePosition < 0 || linePosition > 100)) {
+ // Invalid line position defaults to 100.
+ return 100;
+ } else if (!autoPosition) {
+ // Use the valid, supplied line position.
+ return linePosition;
+ } else if (!snapToLines) {
+ // Automatic, non-snapped line position defaults to 100.
+ return 100;
+ } else {
+ // Automatic snapped line position uses active cue order.
+ return -(cueBox.mOrder + 1);
+ }
+ }
+
+ /**
+ * Resolves cue alignment according to the specified layout direction.
+ */
+ private static int resolveCueAlignment(int layoutDirection, int alignment) {
+ switch (alignment) {
+ case TextTrackCue.ALIGNMENT_START:
+ return layoutDirection == View.LAYOUT_DIRECTION_LTR ?
+ TextTrackCue.ALIGNMENT_LEFT : TextTrackCue.ALIGNMENT_RIGHT;
+ case TextTrackCue.ALIGNMENT_END:
+ return layoutDirection == View.LAYOUT_DIRECTION_LTR ?
+ TextTrackCue.ALIGNMENT_RIGHT : TextTrackCue.ALIGNMENT_LEFT;
+ }
+ return alignment;
+ }
+
+ private final CaptioningChangeListener mCaptioningListener = new CaptioningChangeListener() {
+ @Override
+ public void onFontScaleChanged(float fontScale) {
+ final float fontSize = fontScale * getHeight() * LINE_HEIGHT_RATIO;
+ setCaptionStyle(mCaptionStyle, fontSize);
+ }
+
+ @Override
+ public void onUserStyleChanged(CaptionStyle userStyle) {
+ setCaptionStyle(userStyle, mFontSize);
+ }
+ };
+
+ /**
+ * A text track region represents a portion of the video viewport and
+ * provides a rendering area for text track cues.
+ */
+ private static class RegionLayout extends LinearLayout {
+ private final ArrayList<CueLayout> mRegionCueBoxes = new ArrayList<CueLayout>();
+ private final TextTrackRegion mRegion;
+
+ private CaptionStyle mCaptionStyle;
+ private float mFontSize;
+
+ public RegionLayout(Context context, TextTrackRegion region, CaptionStyle captionStyle,
+ float fontSize) {
+ super(context);
+
+ mRegion = region;
+ mCaptionStyle = captionStyle;
+ mFontSize = fontSize;
+
+ // TODO: Add support for vertical text
+ setOrientation(VERTICAL);
+
+ if (DEBUG) {
+ setBackgroundColor(DEBUG_REGION_BACKGROUND);
+ } else {
+ setBackgroundColor(captionStyle.windowColor);
+ }
+ }
+
+ public void setCaptionStyle(CaptionStyle captionStyle, float fontSize) {
+ mCaptionStyle = captionStyle;
+ mFontSize = fontSize;
+
+ final int cueCount = mRegionCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mRegionCueBoxes.get(i);
+ cueBox.setCaptionStyle(captionStyle, fontSize);
+ }
+
+ setBackgroundColor(captionStyle.windowColor);
+ }
+
+ /**
+ * Performs the parent's measurement responsibilities, then
+ * automatically performs its own measurement.
+ */
+ public void measureForParent(int widthMeasureSpec, int heightMeasureSpec) {
+ final TextTrackRegion region = mRegion;
+ final int specWidth = MeasureSpec.getSize(widthMeasureSpec);
+ final int specHeight = MeasureSpec.getSize(heightMeasureSpec);
+ final int width = (int) region.mWidth;
+
+ // Determine the absolute maximum region size as the requested size.
+ final int size = width * specWidth / 100;
+
+ widthMeasureSpec = MeasureSpec.makeMeasureSpec(size, MeasureSpec.AT_MOST);
+ heightMeasureSpec = MeasureSpec.makeMeasureSpec(specHeight, MeasureSpec.AT_MOST);
+ measure(widthMeasureSpec, heightMeasureSpec);
+ }
+
+ /**
+ * Prepares this region for pruning by setting all tracks as inactive.
+ * <p>
+ * Tracks that are added or updated using {@link #put(TextTrackCue)}
+ * after this calling this method will be marked as active.
+ */
+ public void prepForPrune() {
+ final int cueCount = mRegionCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mRegionCueBoxes.get(i);
+ cueBox.prepForPrune();
+ }
+ }
+
+ /**
+ * Adds a {@link TextTrackCue} to this region. If the track had already
+ * been added, updates its active state.
+ *
+ * @param cue
+ */
+ public void put(TextTrackCue cue) {
+ final int cueCount = mRegionCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mRegionCueBoxes.get(i);
+ if (cueBox.getCue() == cue) {
+ cueBox.update();
+ return;
+ }
+ }
+
+ final CueLayout cueBox = new CueLayout(getContext(), cue, mCaptionStyle, mFontSize);
+ mRegionCueBoxes.add(cueBox);
+ addView(cueBox, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+
+ if (getChildCount() > mRegion.mLines) {
+ removeViewAt(0);
+ }
+ }
+
+ /**
+ * Remove all inactive tracks from this region.
+ *
+ * @return true if this region is empty and should be pruned
+ */
+ public boolean prune() {
+ int cueCount = mRegionCueBoxes.size();
+ for (int i = 0; i < cueCount; i++) {
+ final CueLayout cueBox = mRegionCueBoxes.get(i);
+ if (!cueBox.isActive()) {
+ mRegionCueBoxes.remove(i);
+ removeView(cueBox);
+ cueCount--;
+ i--;
+ }
+ }
+
+ return mRegionCueBoxes.isEmpty();
+ }
+
+ /**
+ * @return the region data backing this layout
+ */
+ public TextTrackRegion getRegion() {
+ return mRegion;
+ }
+ }
+
+ /**
+ * A text track cue is the unit of time-sensitive data in a text track,
+ * corresponding for instance for subtitles and captions to the text that
+ * appears at a particular time and disappears at another time.
+ * <p>
+ * A single cue may contain multiple {@link SpanLayout}s, each representing a
+ * single line of text.
+ */
+ private static class CueLayout extends LinearLayout {
+ public final TextTrackCue mCue;
+
+ private CaptionStyle mCaptionStyle;
+ private float mFontSize;
+
+ private boolean mActive;
+ private int mOrder;
+
+ public CueLayout(
+ Context context, TextTrackCue cue, CaptionStyle captionStyle, float fontSize) {
+ super(context);
+
+ mCue = cue;
+ mCaptionStyle = captionStyle;
+ mFontSize = fontSize;
+
+ // TODO: Add support for vertical text.
+ final boolean horizontal = cue.mWritingDirection
+ == TextTrackCue.WRITING_DIRECTION_HORIZONTAL;
+ setOrientation(horizontal ? VERTICAL : HORIZONTAL);
+
+ switch (cue.mAlignment) {
+ case TextTrackCue.ALIGNMENT_END:
+ setGravity(Gravity.END);
+ break;
+ case TextTrackCue.ALIGNMENT_LEFT:
+ setGravity(Gravity.LEFT);
+ break;
+ case TextTrackCue.ALIGNMENT_MIDDLE:
+ setGravity(horizontal
+ ? Gravity.CENTER_HORIZONTAL : Gravity.CENTER_VERTICAL);
+ break;
+ case TextTrackCue.ALIGNMENT_RIGHT:
+ setGravity(Gravity.RIGHT);
+ break;
+ case TextTrackCue.ALIGNMENT_START:
+ setGravity(Gravity.START);
+ break;
+ }
+
+ if (DEBUG) {
+ setBackgroundColor(DEBUG_CUE_BACKGROUND);
+ }
+
+ update();
+ }
+
+ public void setCaptionStyle(CaptionStyle style, float fontSize) {
+ mCaptionStyle = style;
+ mFontSize = fontSize;
+
+ final int n = getChildCount();
+ for (int i = 0; i < n; i++) {
+ final View child = getChildAt(i);
+ if (child instanceof SpanLayout) {
+ ((SpanLayout) child).setCaptionStyle(style, fontSize);
+ }
+ }
+ }
+
+ public void prepForPrune() {
+ mActive = false;
+ }
+
+ public void update() {
+ mActive = true;
+
+ removeAllViews();
+
+ final int cueAlignment = resolveCueAlignment(getLayoutDirection(), mCue.mAlignment);
+ final Alignment alignment;
+ switch (cueAlignment) {
+ case TextTrackCue.ALIGNMENT_LEFT:
+ alignment = Alignment.ALIGN_LEFT;
+ break;
+ case TextTrackCue.ALIGNMENT_RIGHT:
+ alignment = Alignment.ALIGN_RIGHT;
+ break;
+ case TextTrackCue.ALIGNMENT_MIDDLE:
+ default:
+ alignment = Alignment.ALIGN_CENTER;
+ }
+
+ final CaptionStyle captionStyle = mCaptionStyle;
+ final float fontSize = mFontSize;
+ final TextTrackCueSpan[][] lines = mCue.mLines;
+ final int lineCount = lines.length;
+ for (int i = 0; i < lineCount; i++) {
+ final SpanLayout lineBox = new SpanLayout(getContext(), lines[i]);
+ lineBox.setAlignment(alignment);
+ lineBox.setCaptionStyle(captionStyle, fontSize);
+
+ addView(lineBox, LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT);
+ }
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
+ }
+
+ /**
+ * Performs the parent's measurement responsibilities, then
+ * automatically performs its own measurement.
+ */
+ public void measureForParent(int widthMeasureSpec, int heightMeasureSpec) {
+ final TextTrackCue cue = mCue;
+ final int specWidth = MeasureSpec.getSize(widthMeasureSpec);
+ final int specHeight = MeasureSpec.getSize(heightMeasureSpec);
+ final int direction = getLayoutDirection();
+ final int absAlignment = resolveCueAlignment(direction, cue.mAlignment);
+
+ // Determine the maximum size of cue based on its starting position
+ // and the direction in which it grows.
+ final int maximumSize;
+ switch (absAlignment) {
+ case TextTrackCue.ALIGNMENT_LEFT:
+ maximumSize = 100 - cue.mTextPosition;
+ break;
+ case TextTrackCue.ALIGNMENT_RIGHT:
+ maximumSize = cue.mTextPosition;
+ break;
+ case TextTrackCue.ALIGNMENT_MIDDLE:
+ if (cue.mTextPosition <= 50) {
+ maximumSize = cue.mTextPosition * 2;
+ } else {
+ maximumSize = (100 - cue.mTextPosition) * 2;
+ }
+ break;
+ default:
+ maximumSize = 0;
+ }
+
+ // Determine absolute maximum cue size as the smaller of the
+ // requested size and the maximum theoretical size.
+ final int size = Math.min(cue.mSize, maximumSize) * specWidth / 100;
+ widthMeasureSpec = MeasureSpec.makeMeasureSpec(size, MeasureSpec.AT_MOST);
+ heightMeasureSpec = MeasureSpec.makeMeasureSpec(specHeight, MeasureSpec.AT_MOST);
+ measure(widthMeasureSpec, heightMeasureSpec);
+ }
+
+ /**
+ * Sets the order of this cue in the list of active cues.
+ *
+ * @param order the order of this cue in the list of active cues
+ */
+ public void setOrder(int order) {
+ mOrder = order;
+ }
+
+ /**
+ * @return whether this cue is marked as active
+ */
+ public boolean isActive() {
+ return mActive;
+ }
+
+ /**
+ * @return the cue data backing this layout
+ */
+ public TextTrackCue getCue() {
+ return mCue;
+ }
+ }
+
+ /**
+ * A text track line represents a single line of text within a cue.
+ * <p>
+ * A single line may contain multiple spans, each representing a section of
+ * text that may be enabled or disabled at a particular time.
+ */
+ private static class SpanLayout extends SubtitleView {
+ private final SpannableStringBuilder mBuilder = new SpannableStringBuilder();
+ private final TextTrackCueSpan[] mSpans;
+
+ public SpanLayout(Context context, TextTrackCueSpan[] spans) {
+ super(context);
+
+ mSpans = spans;
+
+ update();
+ }
+
+ public void update() {
+ final SpannableStringBuilder builder = mBuilder;
+ final TextTrackCueSpan[] spans = mSpans;
+
+ builder.clear();
+ builder.clearSpans();
+
+ final int spanCount = spans.length;
+ for (int i = 0; i < spanCount; i++) {
+ final TextTrackCueSpan span = spans[i];
+ if (span.mEnabled) {
+ builder.append(spans[i].mText);
+ }
+ }
+
+ setText(builder);
+ }
+
+ public void setCaptionStyle(CaptionStyle captionStyle, float fontSize) {
+ setBackgroundColor(captionStyle.backgroundColor);
+ setForegroundColor(captionStyle.foregroundColor);
+ setEdgeColor(captionStyle.edgeColor);
+ setEdgeType(captionStyle.edgeType);
+ setTypeface(captionStyle.getTypeface());
+ setTextSize(fontSize);
+ }
+ }
+}
diff --git a/android/media/audiofx/AcousticEchoCanceler.java b/android/media/audiofx/AcousticEchoCanceler.java
new file mode 100644
index 00000000..3a44df4d
--- /dev/null
+++ b/android/media/audiofx/AcousticEchoCanceler.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.util.Log;
+
+/**
+ * Acoustic Echo Canceler (AEC).
+ * <p>Acoustic Echo Canceler (AEC) is an audio pre-processor which removes the contribution of the
+ * signal received from the remote party from the captured audio signal.
+ * <p>AEC is used by voice communication applications (voice chat, video conferencing, SIP calls)
+ * where the presence of echo with significant delay in the signal received from the remote party
+ * is highly disturbing. AEC is often used in conjunction with noise suppression (NS).
+ * <p>An application creates an AcousticEchoCanceler object to instantiate and control an AEC
+ * engine in the audio capture path.
+ * <p>To attach the AcousticEchoCanceler to a particular {@link android.media.AudioRecord},
+ * specify the audio session ID of this AudioRecord when creating the AcousticEchoCanceler.
+ * The audio session is retrieved by calling
+ * {@link android.media.AudioRecord#getAudioSessionId()} on the AudioRecord instance.
+ * <p>On some devices, an AEC can be inserted by default in the capture path by the platform
+ * according to the {@link android.media.MediaRecorder.AudioSource} used. The application should
+ * call AcousticEchoCanceler.getEnable() after creating the AEC to check the default AEC activation
+ * state on a particular AudioRecord session.
+ * <p>See {@link android.media.audiofx.AudioEffect} class for more details on
+ * controlling audio effects.
+ */
+
+public class AcousticEchoCanceler extends AudioEffect {
+
+ private final static String TAG = "AcousticEchoCanceler";
+
+ /**
+ * Checks if the device implements acoustic echo cancellation.
+ * @return true if the device implements acoustic echo cancellation, false otherwise.
+ */
+ public static boolean isAvailable() {
+ return AudioEffect.isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AEC);
+ }
+
+ /**
+ * Creates an AcousticEchoCanceler and attaches it to the AudioRecord on the audio
+ * session specified.
+ * @param audioSession system wide unique audio session identifier. The AcousticEchoCanceler
+ * will be applied to the AudioRecord with the same audio session.
+ * @return AcousticEchoCanceler created or null if the device does not implement AEC.
+ */
+ public static AcousticEchoCanceler create(int audioSession) {
+ AcousticEchoCanceler aec = null;
+ try {
+ aec = new AcousticEchoCanceler(audioSession);
+ } catch (IllegalArgumentException e) {
+ Log.w(TAG, "not implemented on this device"+ aec);
+ } catch (UnsupportedOperationException e) {
+ Log.w(TAG, "not enough resources");
+ } catch (RuntimeException e) {
+ Log.w(TAG, "not enough memory");
+ }
+ return aec;
+ }
+
+ /**
+ * Class constructor.
+ * <p> The constructor is not guarantied to succeed and throws the following exceptions:
+ * <ul>
+ * <li>IllegalArgumentException is thrown if the device does not implement an AEC</li>
+ * <li>UnsupportedOperationException is thrown is the resources allocated to audio
+ * pre-procesing are currently exceeded.</li>
+ * <li>RuntimeException is thrown if a memory allocation error occurs.</li>
+ * </ul>
+ *
+ * @param audioSession system wide unique audio session identifier. The AcousticEchoCanceler
+ * will be applied to the AudioRecord with the same audio session.
+ *
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ private AcousticEchoCanceler(int audioSession)
+ throws IllegalArgumentException, UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_AEC, EFFECT_TYPE_NULL, 0, audioSession);
+ }
+}
diff --git a/android/media/audiofx/AudioEffect.java b/android/media/audiofx/AudioEffect.java
new file mode 100644
index 00000000..7dbca3b9
--- /dev/null
+++ b/android/media/audiofx/AudioEffect.java
@@ -0,0 +1,1361 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.annotation.SdkConstant;
+import android.annotation.SdkConstant.SdkConstantType;
+import android.app.ActivityThread;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+import java.lang.ref.WeakReference;
+import java.nio.ByteOrder;
+import java.nio.ByteBuffer;
+import java.util.UUID;
+
+/**
+ * AudioEffect is the base class for controlling audio effects provided by the android audio
+ * framework.
+ * <p>Applications should not use the AudioEffect class directly but one of its derived classes to
+ * control specific effects:
+ * <ul>
+ * <li> {@link android.media.audiofx.Equalizer}</li>
+ * <li> {@link android.media.audiofx.Virtualizer}</li>
+ * <li> {@link android.media.audiofx.BassBoost}</li>
+ * <li> {@link android.media.audiofx.PresetReverb}</li>
+ * <li> {@link android.media.audiofx.EnvironmentalReverb}</li>
+ * </ul>
+ * <p>To apply the audio effect to a specific AudioTrack or MediaPlayer instance,
+ * the application must specify the audio session ID of that instance when creating the AudioEffect.
+ * (see {@link android.media.MediaPlayer#getAudioSessionId()} for details on audio sessions).
+ * <p>NOTE: attaching insert effects (equalizer, bass boost, virtualizer) to the global audio output
+ * mix by use of session 0 is deprecated.
+ * <p>Creating an AudioEffect object will create the corresponding effect engine in the audio
+ * framework if no instance of the same effect type exists in the specified audio session.
+ * If one exists, this instance will be used.
+ * <p>The application creating the AudioEffect object (or a derived class) will either receive
+ * control of the effect engine or not depending on the priority parameter. If priority is higher
+ * than the priority used by the current effect engine owner, the control will be transfered to the
+ * new object. Otherwise control will remain with the previous object. In this case, the new
+ * application will be notified of changes in effect engine state or control ownership by the
+ * appropriate listener.
+ */
+
+public class AudioEffect {
+ static {
+ System.loadLibrary("audioeffect_jni");
+ native_init();
+ }
+
+ private final static String TAG = "AudioEffect-JAVA";
+
+ // effect type UUIDs are taken from hardware/libhardware/include/hardware/audio_effect.h
+
+ /**
+ * The following UUIDs define effect types corresponding to standard audio
+ * effects whose implementation and interface conform to the OpenSL ES
+ * specification. The definitions match the corresponding interface IDs in
+ * OpenSLES_IID.h
+ */
+ /**
+ * UUID for environmental reverberation effect
+ */
+ public static final UUID EFFECT_TYPE_ENV_REVERB = UUID
+ .fromString("c2e5d5f0-94bd-4763-9cac-4e234d06839e");
+ /**
+ * UUID for preset reverberation effect
+ */
+ public static final UUID EFFECT_TYPE_PRESET_REVERB = UUID
+ .fromString("47382d60-ddd8-11db-bf3a-0002a5d5c51b");
+ /**
+ * UUID for equalizer effect
+ */
+ public static final UUID EFFECT_TYPE_EQUALIZER = UUID
+ .fromString("0bed4300-ddd6-11db-8f34-0002a5d5c51b");
+ /**
+ * UUID for bass boost effect
+ */
+ public static final UUID EFFECT_TYPE_BASS_BOOST = UUID
+ .fromString("0634f220-ddd4-11db-a0fc-0002a5d5c51b");
+ /**
+ * UUID for virtualizer effect
+ */
+ public static final UUID EFFECT_TYPE_VIRTUALIZER = UUID
+ .fromString("37cc2c00-dddd-11db-8577-0002a5d5c51b");
+
+ /**
+ * UUIDs for effect types not covered by OpenSL ES.
+ */
+ /**
+ * UUID for Automatic Gain Control (AGC)
+ */
+ public static final UUID EFFECT_TYPE_AGC = UUID
+ .fromString("0a8abfe0-654c-11e0-ba26-0002a5d5c51b");
+
+ /**
+ * UUID for Acoustic Echo Canceler (AEC)
+ */
+ public static final UUID EFFECT_TYPE_AEC = UUID
+ .fromString("7b491460-8d4d-11e0-bd61-0002a5d5c51b");
+
+ /**
+ * UUID for Noise Suppressor (NS)
+ */
+ public static final UUID EFFECT_TYPE_NS = UUID
+ .fromString("58b4b260-8e06-11e0-aa8e-0002a5d5c51b");
+
+ /**
+ * UUID for Loudness Enhancer
+ */
+ public static final UUID EFFECT_TYPE_LOUDNESS_ENHANCER = UUID
+ .fromString("fe3199be-aed0-413f-87bb-11260eb63cf1");
+
+ /**
+ * Null effect UUID. Used when the UUID for effect type of
+ * @hide
+ */
+ public static final UUID EFFECT_TYPE_NULL = UUID
+ .fromString("ec7178ec-e5e1-4432-a3f4-4657e6795210");
+
+ /**
+ * State of an AudioEffect object that was not successfully initialized upon
+ * creation
+ * @hide
+ */
+ public static final int STATE_UNINITIALIZED = 0;
+ /**
+ * State of an AudioEffect object that is ready to be used.
+ * @hide
+ */
+ public static final int STATE_INITIALIZED = 1;
+
+ // to keep in sync with
+ // frameworks/base/include/media/AudioEffect.h
+ /**
+ * Event id for engine control ownership change notification.
+ * @hide
+ */
+ public static final int NATIVE_EVENT_CONTROL_STATUS = 0;
+ /**
+ * Event id for engine state change notification.
+ * @hide
+ */
+ public static final int NATIVE_EVENT_ENABLED_STATUS = 1;
+ /**
+ * Event id for engine parameter change notification.
+ * @hide
+ */
+ public static final int NATIVE_EVENT_PARAMETER_CHANGED = 2;
+
+ /**
+ * Successful operation.
+ */
+ public static final int SUCCESS = 0;
+ /**
+ * Unspecified error.
+ */
+ public static final int ERROR = -1;
+ /**
+ * Internal operation status. Not returned by any method.
+ */
+ public static final int ALREADY_EXISTS = -2;
+ /**
+ * Operation failed due to bad object initialization.
+ */
+ public static final int ERROR_NO_INIT = -3;
+ /**
+ * Operation failed due to bad parameter value.
+ */
+ public static final int ERROR_BAD_VALUE = -4;
+ /**
+ * Operation failed because it was requested in wrong state.
+ */
+ public static final int ERROR_INVALID_OPERATION = -5;
+ /**
+ * Operation failed due to lack of memory.
+ */
+ public static final int ERROR_NO_MEMORY = -6;
+ /**
+ * Operation failed due to dead remote object.
+ */
+ public static final int ERROR_DEAD_OBJECT = -7;
+
+ /**
+ * The effect descriptor contains information on a particular effect implemented in the
+ * audio framework:<br>
+ * <ul>
+ * <li>type: UUID identifying the effect type. May be one of:
+ * {@link AudioEffect#EFFECT_TYPE_AEC}, {@link AudioEffect#EFFECT_TYPE_AGC},
+ * {@link AudioEffect#EFFECT_TYPE_BASS_BOOST}, {@link AudioEffect#EFFECT_TYPE_ENV_REVERB},
+ * {@link AudioEffect#EFFECT_TYPE_EQUALIZER}, {@link AudioEffect#EFFECT_TYPE_NS},
+ * {@link AudioEffect#EFFECT_TYPE_PRESET_REVERB}, {@link AudioEffect#EFFECT_TYPE_VIRTUALIZER}.
+ * </li>
+ * <li>uuid: UUID for this particular implementation</li>
+ * <li>connectMode: {@link #EFFECT_INSERT} or {@link #EFFECT_AUXILIARY}</li>
+ * <li>name: human readable effect name</li>
+ * <li>implementor: human readable effect implementor name</li>
+ * </ul>
+ * The method {@link #queryEffects()} returns an array of Descriptors to facilitate effects
+ * enumeration.
+ */
+ public static class Descriptor {
+
+ public Descriptor() {
+ }
+
+ /**
+ * @param type UUID identifying the effect type. May be one of:
+ * {@link AudioEffect#EFFECT_TYPE_AEC}, {@link AudioEffect#EFFECT_TYPE_AGC},
+ * {@link AudioEffect#EFFECT_TYPE_BASS_BOOST}, {@link AudioEffect#EFFECT_TYPE_ENV_REVERB},
+ * {@link AudioEffect#EFFECT_TYPE_EQUALIZER}, {@link AudioEffect#EFFECT_TYPE_NS},
+ * {@link AudioEffect#EFFECT_TYPE_PRESET_REVERB},
+ * {@link AudioEffect#EFFECT_TYPE_VIRTUALIZER}.
+ * @param uuid UUID for this particular implementation
+ * @param connectMode {@link #EFFECT_INSERT} or {@link #EFFECT_AUXILIARY}
+ * @param name human readable effect name
+ * @param implementor human readable effect implementor name
+ *
+ */
+ public Descriptor(String type, String uuid, String connectMode,
+ String name, String implementor) {
+ this.type = UUID.fromString(type);
+ this.uuid = UUID.fromString(uuid);
+ this.connectMode = connectMode;
+ this.name = name;
+ this.implementor = implementor;
+ }
+
+ /**
+ * Indicates the generic type of the effect (Equalizer, Bass boost ...).
+ * One of {@link AudioEffect#EFFECT_TYPE_AEC},
+ * {@link AudioEffect#EFFECT_TYPE_AGC}, {@link AudioEffect#EFFECT_TYPE_BASS_BOOST},
+ * {@link AudioEffect#EFFECT_TYPE_ENV_REVERB}, {@link AudioEffect#EFFECT_TYPE_EQUALIZER},
+ * {@link AudioEffect#EFFECT_TYPE_NS}, {@link AudioEffect#EFFECT_TYPE_PRESET_REVERB}
+ * or {@link AudioEffect#EFFECT_TYPE_VIRTUALIZER}.<br>
+ * For reverberation, bass boost, EQ and virtualizer, the UUID
+ * corresponds to the OpenSL ES Interface ID.
+ */
+ public UUID type;
+ /**
+ * Indicates the particular implementation of the effect in that type. Several effects
+ * can have the same type but this uuid is unique to a given implementation.
+ */
+ public UUID uuid;
+ /**
+ * Indicates if the effect is of insert category {@link #EFFECT_INSERT} or auxiliary
+ * category {@link #EFFECT_AUXILIARY}.
+ * Insert effects (typically an {@link Equalizer}) are applied
+ * to the entire audio source and usually not shared by several sources. Auxiliary effects
+ * (typically a reverberator) are applied to part of the signal (wet) and the effect output
+ * is added to the original signal (dry).
+ * Audio pre processing are applied to audio captured on a particular
+ * {@link android.media.AudioRecord}.
+ */
+ public String connectMode;
+ /**
+ * Human readable effect name
+ */
+ public String name;
+ /**
+ * Human readable effect implementor name
+ */
+ public String implementor;
+ };
+
+ /**
+ * Effect connection mode is insert. Specifying an audio session ID when creating the effect
+ * will insert this effect after all players in the same audio session.
+ */
+ public static final String EFFECT_INSERT = "Insert";
+ /**
+ * Effect connection mode is auxiliary.
+ * <p>Auxiliary effects must be created on session 0 (global output mix). In order for a
+ * MediaPlayer or AudioTrack to be fed into this effect, they must be explicitely attached to
+ * this effect and a send level must be specified.
+ * <p>Use the effect ID returned by {@link #getId()} to designate this particular effect when
+ * attaching it to the MediaPlayer or AudioTrack.
+ */
+ public static final String EFFECT_AUXILIARY = "Auxiliary";
+ /**
+ * Effect connection mode is pre processing.
+ * The audio pre processing effects are attached to an audio input (AudioRecord).
+ * @hide
+ */
+ public static final String EFFECT_PRE_PROCESSING = "Pre Processing";
+
+ // --------------------------------------------------------------------------
+ // Member variables
+ // --------------------
+ /**
+ * Indicates the state of the AudioEffect instance
+ */
+ private int mState = STATE_UNINITIALIZED;
+ /**
+ * Lock to synchronize access to mState
+ */
+ private final Object mStateLock = new Object();
+ /**
+ * System wide unique effect ID
+ */
+ private int mId;
+
+ // accessed by native methods
+ private long mNativeAudioEffect;
+ private long mJniData;
+
+ /**
+ * Effect descriptor
+ */
+ private Descriptor mDescriptor;
+
+ /**
+ * Listener for effect engine state change notifications.
+ *
+ * @see #setEnableStatusListener(OnEnableStatusChangeListener)
+ */
+ private OnEnableStatusChangeListener mEnableStatusChangeListener = null;
+ /**
+ * Listener for effect engine control ownership change notifications.
+ *
+ * @see #setControlStatusListener(OnControlStatusChangeListener)
+ */
+ private OnControlStatusChangeListener mControlChangeStatusListener = null;
+ /**
+ * Listener for effect engine control ownership change notifications.
+ *
+ * @see #setParameterListener(OnParameterChangeListener)
+ */
+ private OnParameterChangeListener mParameterChangeListener = null;
+ /**
+ * Lock to protect listeners updates against event notifications
+ * @hide
+ */
+ public final Object mListenerLock = new Object();
+ /**
+ * Handler for events coming from the native code
+ * @hide
+ */
+ public NativeEventHandler mNativeEventHandler = null;
+
+ // --------------------------------------------------------------------------
+ // Constructor, Finalize
+ // --------------------
+ /**
+ * Class constructor.
+ *
+ * @param type type of effect engine created. See {@link #EFFECT_TYPE_ENV_REVERB},
+ * {@link #EFFECT_TYPE_EQUALIZER} ... Types corresponding to
+ * built-in effects are defined by AudioEffect class. Other types
+ * can be specified provided they correspond an existing OpenSL
+ * ES interface ID and the corresponsing effect is available on
+ * the platform. If an unspecified effect type is requested, the
+ * constructor with throw the IllegalArgumentException. This
+ * parameter can be set to {@link #EFFECT_TYPE_NULL} in which
+ * case only the uuid will be used to select the effect.
+ * @param uuid unique identifier of a particular effect implementation.
+ * Must be specified if the caller wants to use a particular
+ * implementation of an effect type. This parameter can be set to
+ * {@link #EFFECT_TYPE_NULL} in which case only the type will
+ * be used to select the effect.
+ * @param priority the priority level requested by the application for
+ * controlling the effect engine. As the same effect engine can
+ * be shared by several applications, this parameter indicates
+ * how much the requesting application needs control of effect
+ * parameters. The normal priority is 0, above normal is a
+ * positive number, below normal a negative number.
+ * @param audioSession system wide unique audio session identifier.
+ * The effect will be attached to the MediaPlayer or AudioTrack in
+ * the same audio session.
+ *
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ * @hide
+ */
+
+ public AudioEffect(UUID type, UUID uuid, int priority, int audioSession)
+ throws IllegalArgumentException, UnsupportedOperationException,
+ RuntimeException {
+ int[] id = new int[1];
+ Descriptor[] desc = new Descriptor[1];
+ // native initialization
+ int initResult = native_setup(new WeakReference<AudioEffect>(this),
+ type.toString(), uuid.toString(), priority, audioSession, id,
+ desc, ActivityThread.currentOpPackageName());
+ if (initResult != SUCCESS && initResult != ALREADY_EXISTS) {
+ Log.e(TAG, "Error code " + initResult
+ + " when initializing AudioEffect.");
+ switch (initResult) {
+ case ERROR_BAD_VALUE:
+ throw (new IllegalArgumentException("Effect type: " + type
+ + " not supported."));
+ case ERROR_INVALID_OPERATION:
+ throw (new UnsupportedOperationException(
+ "Effect library not loaded"));
+ default:
+ throw (new RuntimeException(
+ "Cannot initialize effect engine for type: " + type
+ + " Error: " + initResult));
+ }
+ }
+ mId = id[0];
+ mDescriptor = desc[0];
+ synchronized (mStateLock) {
+ mState = STATE_INITIALIZED;
+ }
+ }
+
+ /**
+ * Releases the native AudioEffect resources. It is a good practice to
+ * release the effect engine when not in use as control can be returned to
+ * other applications or the native resources released.
+ */
+ public void release() {
+ synchronized (mStateLock) {
+ native_release();
+ mState = STATE_UNINITIALIZED;
+ }
+ }
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ /**
+ * Get the effect descriptor.
+ *
+ * @see android.media.audiofx.AudioEffect.Descriptor
+ * @throws IllegalStateException
+ */
+ public Descriptor getDescriptor() throws IllegalStateException {
+ checkState("getDescriptor()");
+ return mDescriptor;
+ }
+
+ // --------------------------------------------------------------------------
+ // Effects Enumeration
+ // --------------------
+
+ /**
+ * Query all effects available on the platform. Returns an array of
+ * {@link android.media.audiofx.AudioEffect.Descriptor} objects
+ *
+ * @throws IllegalStateException
+ */
+
+ static public Descriptor[] queryEffects() {
+ return (Descriptor[]) native_query_effects();
+ }
+
+ /**
+ * Query all audio pre-processing effects applied to the AudioRecord with the supplied
+ * audio session ID. Returns an array of {@link android.media.audiofx.AudioEffect.Descriptor}
+ * objects.
+ * @param audioSession system wide unique audio session identifier.
+ * @throws IllegalStateException
+ * @hide
+ */
+
+ static public Descriptor[] queryPreProcessings(int audioSession) {
+ return (Descriptor[]) native_query_pre_processing(audioSession);
+ }
+
+ /**
+ * Checks if the device implements the specified effect type.
+ * @param type the requested effect type.
+ * @return true if the device implements the specified effect type, false otherwise.
+ * @hide
+ */
+ public static boolean isEffectTypeAvailable(UUID type) {
+ AudioEffect.Descriptor[] desc = AudioEffect.queryEffects();
+ if (desc == null) {
+ return false;
+ }
+
+ for (int i = 0; i < desc.length; i++) {
+ if (desc[i].type.equals(type)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ // --------------------------------------------------------------------------
+ // Control methods
+ // --------------------
+
+ /**
+ * Enable or disable the effect.
+ * Creating an audio effect does not automatically apply this effect on the audio source. It
+ * creates the resources necessary to process this effect but the audio signal is still bypassed
+ * through the effect engine. Calling this method will make that the effect is actually applied
+ * or not to the audio content being played in the corresponding audio session.
+ *
+ * @param enabled the requested enable state
+ * @return {@link #SUCCESS} in case of success, {@link #ERROR_INVALID_OPERATION}
+ * or {@link #ERROR_DEAD_OBJECT} in case of failure.
+ * @throws IllegalStateException
+ */
+ public int setEnabled(boolean enabled) throws IllegalStateException {
+ checkState("setEnabled()");
+ return native_setEnabled(enabled);
+ }
+
+ /**
+ * Set effect parameter. The setParameter method is provided in several
+ * forms addressing most common parameter formats. This form is the most
+ * generic one where the parameter and its value are both specified as an
+ * array of bytes. The parameter and value type and length are therefore
+ * totally free. For standard effect defined by OpenSL ES, the parameter
+ * format and values must match the definitions in the corresponding OpenSL
+ * ES interface.
+ *
+ * @param param the identifier of the parameter to set
+ * @param value the new value for the specified parameter
+ * @return {@link #SUCCESS} in case of success, {@link #ERROR_BAD_VALUE},
+ * {@link #ERROR_NO_MEMORY}, {@link #ERROR_INVALID_OPERATION} or
+ * {@link #ERROR_DEAD_OBJECT} in case of failure
+ * @throws IllegalStateException
+ * @hide
+ */
+ public int setParameter(byte[] param, byte[] value)
+ throws IllegalStateException {
+ checkState("setParameter()");
+ return native_setParameter(param.length, param, value.length, value);
+ }
+
+ /**
+ * Set effect parameter. The parameter and its value are integers.
+ *
+ * @see #setParameter(byte[], byte[])
+ * @hide
+ */
+ public int setParameter(int param, int value) throws IllegalStateException {
+ byte[] p = intToByteArray(param);
+ byte[] v = intToByteArray(value);
+ return setParameter(p, v);
+ }
+
+ /**
+ * Set effect parameter. The parameter is an integer and the value is a
+ * short integer.
+ *
+ * @see #setParameter(byte[], byte[])
+ * @hide
+ */
+ public int setParameter(int param, short value)
+ throws IllegalStateException {
+ byte[] p = intToByteArray(param);
+ byte[] v = shortToByteArray(value);
+ return setParameter(p, v);
+ }
+
+ /**
+ * Set effect parameter. The parameter is an integer and the value is an
+ * array of bytes.
+ *
+ * @see #setParameter(byte[], byte[])
+ * @hide
+ */
+ public int setParameter(int param, byte[] value)
+ throws IllegalStateException {
+ byte[] p = intToByteArray(param);
+ return setParameter(p, value);
+ }
+
+ /**
+ * Set effect parameter. The parameter is an array of 1 or 2 integers and
+ * the value is also an array of 1 or 2 integers
+ *
+ * @see #setParameter(byte[], byte[])
+ * @hide
+ */
+ public int setParameter(int[] param, int[] value)
+ throws IllegalStateException {
+ if (param.length > 2 || value.length > 2) {
+ return ERROR_BAD_VALUE;
+ }
+ byte[] p = intToByteArray(param[0]);
+ if (param.length > 1) {
+ byte[] p2 = intToByteArray(param[1]);
+ p = concatArrays(p, p2);
+ }
+ byte[] v = intToByteArray(value[0]);
+ if (value.length > 1) {
+ byte[] v2 = intToByteArray(value[1]);
+ v = concatArrays(v, v2);
+ }
+ return setParameter(p, v);
+ }
+
+ /**
+ * Set effect parameter. The parameter is an array of 1 or 2 integers and
+ * the value is an array of 1 or 2 short integers
+ *
+ * @see #setParameter(byte[], byte[])
+ * @hide
+ */
+ public int setParameter(int[] param, short[] value)
+ throws IllegalStateException {
+ if (param.length > 2 || value.length > 2) {
+ return ERROR_BAD_VALUE;
+ }
+ byte[] p = intToByteArray(param[0]);
+ if (param.length > 1) {
+ byte[] p2 = intToByteArray(param[1]);
+ p = concatArrays(p, p2);
+ }
+
+ byte[] v = shortToByteArray(value[0]);
+ if (value.length > 1) {
+ byte[] v2 = shortToByteArray(value[1]);
+ v = concatArrays(v, v2);
+ }
+ return setParameter(p, v);
+ }
+
+ /**
+ * Set effect parameter. The parameter is an array of 1 or 2 integers and
+ * the value is an array of bytes
+ *
+ * @see #setParameter(byte[], byte[])
+ * @hide
+ */
+ public int setParameter(int[] param, byte[] value)
+ throws IllegalStateException {
+ if (param.length > 2) {
+ return ERROR_BAD_VALUE;
+ }
+ byte[] p = intToByteArray(param[0]);
+ if (param.length > 1) {
+ byte[] p2 = intToByteArray(param[1]);
+ p = concatArrays(p, p2);
+ }
+ return setParameter(p, value);
+ }
+
+ /**
+ * Get effect parameter. The getParameter method is provided in several
+ * forms addressing most common parameter formats. This form is the most
+ * generic one where the parameter and its value are both specified as an
+ * array of bytes. The parameter and value type and length are therefore
+ * totally free.
+ *
+ * @param param the identifier of the parameter to set
+ * @param value the new value for the specified parameter
+ * @return the number of meaningful bytes in value array in case of success or
+ * {@link #ERROR_BAD_VALUE}, {@link #ERROR_NO_MEMORY}, {@link #ERROR_INVALID_OPERATION}
+ * or {@link #ERROR_DEAD_OBJECT} in case of failure.
+ * @throws IllegalStateException
+ * @hide
+ */
+ public int getParameter(byte[] param, byte[] value)
+ throws IllegalStateException {
+ checkState("getParameter()");
+ return native_getParameter(param.length, param, value.length, value);
+ }
+
+ /**
+ * Get effect parameter. The parameter is an integer and the value is an
+ * array of bytes.
+ *
+ * @see #getParameter(byte[], byte[])
+ * @hide
+ */
+ public int getParameter(int param, byte[] value)
+ throws IllegalStateException {
+ byte[] p = intToByteArray(param);
+
+ return getParameter(p, value);
+ }
+
+ /**
+ * Get effect parameter. The parameter is an integer and the value is an
+ * array of 1 or 2 integers
+ *
+ * @see #getParameter(byte[], byte[])
+ * In case of success, returns the number of meaningful integers in value array.
+ * @hide
+ */
+ public int getParameter(int param, int[] value)
+ throws IllegalStateException {
+ if (value.length > 2) {
+ return ERROR_BAD_VALUE;
+ }
+ byte[] p = intToByteArray(param);
+
+ byte[] v = new byte[value.length * 4];
+
+ int status = getParameter(p, v);
+
+ if (status == 4 || status == 8) {
+ value[0] = byteArrayToInt(v);
+ if (status == 8) {
+ value[1] = byteArrayToInt(v, 4);
+ }
+ status /= 4;
+ } else {
+ status = ERROR;
+ }
+ return status;
+ }
+
+ /**
+ * Get effect parameter. The parameter is an integer and the value is an
+ * array of 1 or 2 short integers
+ *
+ * @see #getParameter(byte[], byte[])
+ * In case of success, returns the number of meaningful short integers in value array.
+ * @hide
+ */
+ public int getParameter(int param, short[] value)
+ throws IllegalStateException {
+ if (value.length > 2) {
+ return ERROR_BAD_VALUE;
+ }
+ byte[] p = intToByteArray(param);
+
+ byte[] v = new byte[value.length * 2];
+
+ int status = getParameter(p, v);
+
+ if (status == 2 || status == 4) {
+ value[0] = byteArrayToShort(v);
+ if (status == 4) {
+ value[1] = byteArrayToShort(v, 2);
+ }
+ status /= 2;
+ } else {
+ status = ERROR;
+ }
+ return status;
+ }
+
+ /**
+ * Get effect parameter. The parameter is an array of 1 or 2 integers and
+ * the value is also an array of 1 or 2 integers
+ *
+ * @see #getParameter(byte[], byte[])
+ * In case of success, the returns the number of meaningful integers in value array.
+ * @hide
+ */
+ public int getParameter(int[] param, int[] value)
+ throws IllegalStateException {
+ if (param.length > 2 || value.length > 2) {
+ return ERROR_BAD_VALUE;
+ }
+ byte[] p = intToByteArray(param[0]);
+ if (param.length > 1) {
+ byte[] p2 = intToByteArray(param[1]);
+ p = concatArrays(p, p2);
+ }
+ byte[] v = new byte[value.length * 4];
+
+ int status = getParameter(p, v);
+
+ if (status == 4 || status == 8) {
+ value[0] = byteArrayToInt(v);
+ if (status == 8) {
+ value[1] = byteArrayToInt(v, 4);
+ }
+ status /= 4;
+ } else {
+ status = ERROR;
+ }
+ return status;
+ }
+
+ /**
+ * Get effect parameter. The parameter is an array of 1 or 2 integers and
+ * the value is an array of 1 or 2 short integers
+ *
+ * @see #getParameter(byte[], byte[])
+ * In case of success, returns the number of meaningful short integers in value array.
+ * @hide
+ */
+ public int getParameter(int[] param, short[] value)
+ throws IllegalStateException {
+ if (param.length > 2 || value.length > 2) {
+ return ERROR_BAD_VALUE;
+ }
+ byte[] p = intToByteArray(param[0]);
+ if (param.length > 1) {
+ byte[] p2 = intToByteArray(param[1]);
+ p = concatArrays(p, p2);
+ }
+ byte[] v = new byte[value.length * 2];
+
+ int status = getParameter(p, v);
+
+ if (status == 2 || status == 4) {
+ value[0] = byteArrayToShort(v);
+ if (status == 4) {
+ value[1] = byteArrayToShort(v, 2);
+ }
+ status /= 2;
+ } else {
+ status = ERROR;
+ }
+ return status;
+ }
+
+ /**
+ * Get effect parameter. The parameter is an array of 1 or 2 integers and
+ * the value is an array of bytes
+ *
+ * @see #getParameter(byte[], byte[])
+ * @hide
+ */
+ public int getParameter(int[] param, byte[] value)
+ throws IllegalStateException {
+ if (param.length > 2) {
+ return ERROR_BAD_VALUE;
+ }
+ byte[] p = intToByteArray(param[0]);
+ if (param.length > 1) {
+ byte[] p2 = intToByteArray(param[1]);
+ p = concatArrays(p, p2);
+ }
+
+ return getParameter(p, value);
+ }
+
+ /**
+ * Send a command to the effect engine. This method is intended to send
+ * proprietary commands to a particular effect implementation.
+ * In case of success, returns the number of meaningful bytes in reply array.
+ * In case of failure, the returned value is negative and implementation specific.
+ * @hide
+ */
+ public int command(int cmdCode, byte[] command, byte[] reply)
+ throws IllegalStateException {
+ checkState("command()");
+ return native_command(cmdCode, command.length, command, reply.length, reply);
+ }
+
+ // --------------------------------------------------------------------------
+ // Getters
+ // --------------------
+
+ /**
+ * Returns effect unique identifier. This system wide unique identifier can
+ * be used to attach this effect to a MediaPlayer or an AudioTrack when the
+ * effect is an auxiliary effect (Reverb)
+ *
+ * @return the effect identifier.
+ * @throws IllegalStateException
+ */
+ public int getId() throws IllegalStateException {
+ checkState("getId()");
+ return mId;
+ }
+
+ /**
+ * Returns effect enabled state
+ *
+ * @return true if the effect is enabled, false otherwise.
+ * @throws IllegalStateException
+ */
+ public boolean getEnabled() throws IllegalStateException {
+ checkState("getEnabled()");
+ return native_getEnabled();
+ }
+
+ /**
+ * Checks if this AudioEffect object is controlling the effect engine.
+ *
+ * @return true if this instance has control of effect engine, false
+ * otherwise.
+ * @throws IllegalStateException
+ */
+ public boolean hasControl() throws IllegalStateException {
+ checkState("hasControl()");
+ return native_hasControl();
+ }
+
+ // --------------------------------------------------------------------------
+ // Initialization / configuration
+ // --------------------
+ /**
+ * Sets the listener AudioEffect notifies when the effect engine is enabled
+ * or disabled.
+ *
+ * @param listener
+ */
+ public void setEnableStatusListener(OnEnableStatusChangeListener listener) {
+ synchronized (mListenerLock) {
+ mEnableStatusChangeListener = listener;
+ }
+ if ((listener != null) && (mNativeEventHandler == null)) {
+ createNativeEventHandler();
+ }
+ }
+
+ /**
+ * Sets the listener AudioEffect notifies when the effect engine control is
+ * taken or returned.
+ *
+ * @param listener
+ */
+ public void setControlStatusListener(OnControlStatusChangeListener listener) {
+ synchronized (mListenerLock) {
+ mControlChangeStatusListener = listener;
+ }
+ if ((listener != null) && (mNativeEventHandler == null)) {
+ createNativeEventHandler();
+ }
+ }
+
+ /**
+ * Sets the listener AudioEffect notifies when a parameter is changed.
+ *
+ * @param listener
+ * @hide
+ */
+ public void setParameterListener(OnParameterChangeListener listener) {
+ synchronized (mListenerLock) {
+ mParameterChangeListener = listener;
+ }
+ if ((listener != null) && (mNativeEventHandler == null)) {
+ createNativeEventHandler();
+ }
+ }
+
+ // Convenience method for the creation of the native event handler
+ // It is called only when a non-null event listener is set.
+ // precondition:
+ // mNativeEventHandler is null
+ private void createNativeEventHandler() {
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mNativeEventHandler = new NativeEventHandler(this, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mNativeEventHandler = new NativeEventHandler(this, looper);
+ } else {
+ mNativeEventHandler = null;
+ }
+ }
+
+ // ---------------------------------------------------------
+ // Interface definitions
+ // --------------------
+ /**
+ * The OnEnableStatusChangeListener interface defines a method called by the AudioEffect
+ * when a the enabled state of the effect engine was changed by the controlling application.
+ */
+ public interface OnEnableStatusChangeListener {
+ /**
+ * Called on the listener to notify it that the effect engine has been
+ * enabled or disabled.
+ * @param effect the effect on which the interface is registered.
+ * @param enabled new effect state.
+ */
+ void onEnableStatusChange(AudioEffect effect, boolean enabled);
+ }
+
+ /**
+ * The OnControlStatusChangeListener interface defines a method called by the AudioEffect
+ * when a the control of the effect engine is gained or lost by the application
+ */
+ public interface OnControlStatusChangeListener {
+ /**
+ * Called on the listener to notify it that the effect engine control
+ * has been taken or returned.
+ * @param effect the effect on which the interface is registered.
+ * @param controlGranted true if the application has been granted control of the effect
+ * engine, false otherwise.
+ */
+ void onControlStatusChange(AudioEffect effect, boolean controlGranted);
+ }
+
+ /**
+ * The OnParameterChangeListener interface defines a method called by the AudioEffect
+ * when a parameter is changed in the effect engine by the controlling application.
+ * @hide
+ */
+ public interface OnParameterChangeListener {
+ /**
+ * Called on the listener to notify it that a parameter value has changed.
+ * @param effect the effect on which the interface is registered.
+ * @param status status of the set parameter operation.
+ * @param param ID of the modified parameter.
+ * @param value the new parameter value.
+ */
+ void onParameterChange(AudioEffect effect, int status, byte[] param,
+ byte[] value);
+ }
+
+
+ // -------------------------------------------------------------------------
+ // Audio Effect Control panel intents
+ // -------------------------------------------------------------------------
+
+ /**
+ * Intent to launch an audio effect control panel UI.
+ * <p>The goal of this intent is to enable separate implementations of music/media player
+ * applications and audio effect control application or services.
+ * This will allow platform vendors to offer more advanced control options for standard effects
+ * or control for platform specific effects.
+ * <p>The intent carries a number of extras used by the player application to communicate
+ * necessary pieces of information to the control panel application.
+ * <p>The calling application must use the
+ * {@link android.app.Activity#startActivityForResult(Intent, int)} method to launch the
+ * control panel so that its package name is indicated and used by the control panel
+ * application to keep track of changes for this particular application.
+ * <p>The {@link #EXTRA_AUDIO_SESSION} extra will indicate an audio session to which the
+ * audio effects should be applied. If no audio session is specified, either one of the
+ * follownig will happen:
+ * <p>- If an audio session was previously opened by the calling application with
+ * {@link #ACTION_OPEN_AUDIO_EFFECT_CONTROL_SESSION} intent, the effect changes will
+ * be applied to that session.
+ * <p>- If no audio session is opened, the changes will be stored in the package specific
+ * storage area and applied whenever a new audio session is opened by this application.
+ * <p>The {@link #EXTRA_CONTENT_TYPE} extra will help the control panel application
+ * customize both the UI layout and the default audio effect settings if none are already
+ * stored for the calling application.
+ */
+ @SdkConstant(SdkConstantType.ACTIVITY_INTENT_ACTION)
+ public static final String ACTION_DISPLAY_AUDIO_EFFECT_CONTROL_PANEL =
+ "android.media.action.DISPLAY_AUDIO_EFFECT_CONTROL_PANEL";
+
+ /**
+ * Intent to signal to the effect control application or service that a new audio session
+ * is opened and requires audio effects to be applied.
+ * <p>This is different from {@link #ACTION_DISPLAY_AUDIO_EFFECT_CONTROL_PANEL} in that no
+ * UI should be displayed in this case. Music player applications can broadcast this intent
+ * before starting playback to make sure that any audio effect settings previously selected
+ * by the user are applied.
+ * <p>The effect control application receiving this intent will look for previously stored
+ * settings for the calling application, create all required audio effects and apply the
+ * effect settings to the specified audio session.
+ * <p>The calling package name is indicated by the {@link #EXTRA_PACKAGE_NAME} extra and the
+ * audio session ID by the {@link #EXTRA_AUDIO_SESSION} extra. Both extras are mandatory.
+ * <p>If no stored settings are found for the calling application, default settings for the
+ * content type indicated by {@link #EXTRA_CONTENT_TYPE} will be applied. The default settings
+ * for a given content type are platform specific.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_OPEN_AUDIO_EFFECT_CONTROL_SESSION =
+ "android.media.action.OPEN_AUDIO_EFFECT_CONTROL_SESSION";
+
+ /**
+ * Intent to signal to the effect control application or service that an audio session
+ * is closed and that effects should not be applied anymore.
+ * <p>The effect control application receiving this intent will delete all effects on
+ * this session and store current settings in package specific storage.
+ * <p>The calling package name is indicated by the {@link #EXTRA_PACKAGE_NAME} extra and the
+ * audio session ID by the {@link #EXTRA_AUDIO_SESSION} extra. Both extras are mandatory.
+ * <p>It is good practice for applications to broadcast this intent when music playback stops
+ * and/or when exiting to free system resources consumed by audio effect engines.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_CLOSE_AUDIO_EFFECT_CONTROL_SESSION =
+ "android.media.action.CLOSE_AUDIO_EFFECT_CONTROL_SESSION";
+
+ /**
+ * Contains the ID of the audio session the effects should be applied to.
+ * <p>This extra is for use with {@link #ACTION_DISPLAY_AUDIO_EFFECT_CONTROL_PANEL},
+ * {@link #ACTION_OPEN_AUDIO_EFFECT_CONTROL_SESSION} and
+ * {@link #ACTION_CLOSE_AUDIO_EFFECT_CONTROL_SESSION} intents.
+ * <p>The extra value is of type int and is the audio session ID.
+ * @see android.media.MediaPlayer#getAudioSessionId() for details on audio sessions.
+ */
+ public static final String EXTRA_AUDIO_SESSION = "android.media.extra.AUDIO_SESSION";
+
+ /**
+ * Contains the package name of the calling application.
+ * <p>This extra is for use with {@link #ACTION_OPEN_AUDIO_EFFECT_CONTROL_SESSION} and
+ * {@link #ACTION_CLOSE_AUDIO_EFFECT_CONTROL_SESSION} intents.
+ * <p>The extra value is a string containing the full package name.
+ */
+ public static final String EXTRA_PACKAGE_NAME = "android.media.extra.PACKAGE_NAME";
+
+ /**
+ * Indicates which type of content is played by the application.
+ * <p>This extra is for use with {@link #ACTION_DISPLAY_AUDIO_EFFECT_CONTROL_PANEL} and
+ * {@link #ACTION_OPEN_AUDIO_EFFECT_CONTROL_SESSION} intents.
+ * <p>This information is used by the effect control application to customize UI and select
+ * appropriate default effect settings. The content type is one of the following:
+ * <ul>
+ * <li>{@link #CONTENT_TYPE_MUSIC}</li>
+ * <li>{@link #CONTENT_TYPE_MOVIE}</li>
+ * <li>{@link #CONTENT_TYPE_GAME}</li>
+ * <li>{@link #CONTENT_TYPE_VOICE}</li>
+ * </ul>
+ * If omitted, the content type defaults to {@link #CONTENT_TYPE_MUSIC}.
+ */
+ public static final String EXTRA_CONTENT_TYPE = "android.media.extra.CONTENT_TYPE";
+
+ /**
+ * Value for {@link #EXTRA_CONTENT_TYPE} when the type of content played is music
+ */
+ public static final int CONTENT_TYPE_MUSIC = 0;
+ /**
+ * Value for {@link #EXTRA_CONTENT_TYPE} when the type of content played is video or movie
+ */
+ public static final int CONTENT_TYPE_MOVIE = 1;
+ /**
+ * Value for {@link #EXTRA_CONTENT_TYPE} when the type of content played is game audio
+ */
+ public static final int CONTENT_TYPE_GAME = 2;
+ /**
+ * Value for {@link #EXTRA_CONTENT_TYPE} when the type of content played is voice audio
+ */
+ public static final int CONTENT_TYPE_VOICE = 3;
+
+
+ // ---------------------------------------------------------
+ // Inner classes
+ // --------------------
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate
+ * listeners
+ */
+ private class NativeEventHandler extends Handler {
+ private AudioEffect mAudioEffect;
+
+ public NativeEventHandler(AudioEffect ae, Looper looper) {
+ super(looper);
+ mAudioEffect = ae;
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (mAudioEffect == null) {
+ return;
+ }
+ switch (msg.what) {
+ case NATIVE_EVENT_ENABLED_STATUS:
+ OnEnableStatusChangeListener enableStatusChangeListener = null;
+ synchronized (mListenerLock) {
+ enableStatusChangeListener = mAudioEffect.mEnableStatusChangeListener;
+ }
+ if (enableStatusChangeListener != null) {
+ enableStatusChangeListener.onEnableStatusChange(
+ mAudioEffect, (boolean) (msg.arg1 != 0));
+ }
+ break;
+ case NATIVE_EVENT_CONTROL_STATUS:
+ OnControlStatusChangeListener controlStatusChangeListener = null;
+ synchronized (mListenerLock) {
+ controlStatusChangeListener = mAudioEffect.mControlChangeStatusListener;
+ }
+ if (controlStatusChangeListener != null) {
+ controlStatusChangeListener.onControlStatusChange(
+ mAudioEffect, (boolean) (msg.arg1 != 0));
+ }
+ break;
+ case NATIVE_EVENT_PARAMETER_CHANGED:
+ OnParameterChangeListener parameterChangeListener = null;
+ synchronized (mListenerLock) {
+ parameterChangeListener = mAudioEffect.mParameterChangeListener;
+ }
+ if (parameterChangeListener != null) {
+ // arg1 contains offset of parameter value from start of
+ // byte array
+ int vOffset = msg.arg1;
+ byte[] p = (byte[]) msg.obj;
+ // See effect_param_t in EffectApi.h for psize and vsize
+ // fields offsets
+ int status = byteArrayToInt(p, 0);
+ int psize = byteArrayToInt(p, 4);
+ int vsize = byteArrayToInt(p, 8);
+ byte[] param = new byte[psize];
+ byte[] value = new byte[vsize];
+ System.arraycopy(p, 12, param, 0, psize);
+ System.arraycopy(p, vOffset, value, 0, vsize);
+
+ parameterChangeListener.onParameterChange(mAudioEffect,
+ status, param, value);
+ }
+ break;
+
+ default:
+ Log.e(TAG, "handleMessage() Unknown event type: " + msg.what);
+ break;
+ }
+ }
+ }
+
+ // ---------------------------------------------------------
+ // Java methods called from the native side
+ // --------------------
+ @SuppressWarnings("unused")
+ private static void postEventFromNative(Object effect_ref, int what,
+ int arg1, int arg2, Object obj) {
+ AudioEffect effect = (AudioEffect) ((WeakReference) effect_ref).get();
+ if (effect == null) {
+ return;
+ }
+ if (effect.mNativeEventHandler != null) {
+ Message m = effect.mNativeEventHandler.obtainMessage(what, arg1,
+ arg2, obj);
+ effect.mNativeEventHandler.sendMessage(m);
+ }
+
+ }
+
+ // ---------------------------------------------------------
+ // Native methods called from the Java side
+ // --------------------
+
+ private static native final void native_init();
+
+ private native final int native_setup(Object audioeffect_this, String type,
+ String uuid, int priority, int audioSession, int[] id, Object[] desc,
+ String opPackageName);
+
+ private native final void native_finalize();
+
+ private native final void native_release();
+
+ private native final int native_setEnabled(boolean enabled);
+
+ private native final boolean native_getEnabled();
+
+ private native final boolean native_hasControl();
+
+ private native final int native_setParameter(int psize, byte[] param,
+ int vsize, byte[] value);
+
+ private native final int native_getParameter(int psize, byte[] param,
+ int vsize, byte[] value);
+
+ private native final int native_command(int cmdCode, int cmdSize,
+ byte[] cmdData, int repSize, byte[] repData);
+
+ private static native Object[] native_query_effects();
+
+ private static native Object[] native_query_pre_processing(int audioSession);
+
+ // ---------------------------------------------------------
+ // Utility methods
+ // ------------------
+
+ /**
+ * @hide
+ */
+ public void checkState(String methodName) throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState != STATE_INITIALIZED) {
+ throw (new IllegalStateException(methodName
+ + " called on uninitialized AudioEffect."));
+ }
+ }
+ }
+
+ /**
+ * @hide
+ */
+ public void checkStatus(int status) {
+ if (isError(status)) {
+ switch (status) {
+ case AudioEffect.ERROR_BAD_VALUE:
+ throw (new IllegalArgumentException(
+ "AudioEffect: bad parameter value"));
+ case AudioEffect.ERROR_INVALID_OPERATION:
+ throw (new UnsupportedOperationException(
+ "AudioEffect: invalid parameter operation"));
+ default:
+ throw (new RuntimeException("AudioEffect: set/get parameter error"));
+ }
+ }
+ }
+
+ /**
+ * @hide
+ */
+ public static boolean isError(int status) {
+ return (status < 0);
+ }
+
+ /**
+ * @hide
+ */
+ public static int byteArrayToInt(byte[] valueBuf) {
+ return byteArrayToInt(valueBuf, 0);
+
+ }
+
+ /**
+ * @hide
+ */
+ public static int byteArrayToInt(byte[] valueBuf, int offset) {
+ ByteBuffer converter = ByteBuffer.wrap(valueBuf);
+ converter.order(ByteOrder.nativeOrder());
+ return converter.getInt(offset);
+
+ }
+
+ /**
+ * @hide
+ */
+ public static byte[] intToByteArray(int value) {
+ ByteBuffer converter = ByteBuffer.allocate(4);
+ converter.order(ByteOrder.nativeOrder());
+ converter.putInt(value);
+ return converter.array();
+ }
+
+ /**
+ * @hide
+ */
+ public static short byteArrayToShort(byte[] valueBuf) {
+ return byteArrayToShort(valueBuf, 0);
+ }
+
+ /**
+ * @hide
+ */
+ public static short byteArrayToShort(byte[] valueBuf, int offset) {
+ ByteBuffer converter = ByteBuffer.wrap(valueBuf);
+ converter.order(ByteOrder.nativeOrder());
+ return converter.getShort(offset);
+
+ }
+
+ /**
+ * @hide
+ */
+ public static byte[] shortToByteArray(short value) {
+ ByteBuffer converter = ByteBuffer.allocate(2);
+ converter.order(ByteOrder.nativeOrder());
+ short sValue = (short) value;
+ converter.putShort(sValue);
+ return converter.array();
+ }
+
+ /**
+ * @hide
+ */
+ public static byte[] concatArrays(byte[]... arrays) {
+ int len = 0;
+ for (byte[] a : arrays) {
+ len += a.length;
+ }
+ byte[] b = new byte[len];
+
+ int offs = 0;
+ for (byte[] a : arrays) {
+ System.arraycopy(a, 0, b, offs, a.length);
+ offs += a.length;
+ }
+ return b;
+ }
+}
diff --git a/android/media/audiofx/AutomaticGainControl.java b/android/media/audiofx/AutomaticGainControl.java
new file mode 100644
index 00000000..a76b4de7
--- /dev/null
+++ b/android/media/audiofx/AutomaticGainControl.java
@@ -0,0 +1,96 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.util.Log;
+
+/**
+ * Automatic Gain Control (AGC).
+ * <p>Automatic Gain Control (AGC) is an audio pre-processor which automatically normalizes the
+ * output of the captured signal by boosting or lowering input from the microphone to match a preset
+ * level so that the output signal level is virtually constant.
+ * AGC can be used by applications where the input signal dynamic range is not important but where
+ * a constant strong capture level is desired.
+ * <p>An application creates a AutomaticGainControl object to instantiate and control an AGC
+ * engine in the audio framework.
+ * <p>To attach the AutomaticGainControl to a particular {@link android.media.AudioRecord},
+ * specify the audio session ID of this AudioRecord when creating the AutomaticGainControl.
+ * The audio session is retrieved by calling
+ * {@link android.media.AudioRecord#getAudioSessionId()} on the AudioRecord instance.
+ * <p>On some devices, an AGC can be inserted by default in the capture path by the platform
+ * according to the {@link android.media.MediaRecorder.AudioSource} used. The application should
+ * call AutomaticGainControl.getEnable() after creating the AGC to check the default AGC activation
+ * state on a particular AudioRecord session.
+ * <p>See {@link android.media.audiofx.AudioEffect} class for more details on
+ * controlling audio effects.
+ */
+
+public class AutomaticGainControl extends AudioEffect {
+
+ private final static String TAG = "AutomaticGainControl";
+
+ /**
+ * Checks if the device implements automatic gain control.
+ * @return true if the device implements automatic gain control, false otherwise.
+ */
+ public static boolean isAvailable() {
+ return AudioEffect.isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_AGC);
+ }
+
+ /**
+ * Creates an AutomaticGainControl and attaches it to the AudioRecord on the audio
+ * session specified.
+ * @param audioSession system wide unique audio session identifier. The AutomaticGainControl
+ * will be applied to the AudioRecord with the same audio session.
+ * @return AutomaticGainControl created or null if the device does not implement AGC.
+ */
+ public static AutomaticGainControl create(int audioSession) {
+ AutomaticGainControl agc = null;
+ try {
+ agc = new AutomaticGainControl(audioSession);
+ } catch (IllegalArgumentException e) {
+ Log.w(TAG, "not implemented on this device "+agc);
+ } catch (UnsupportedOperationException e) {
+ Log.w(TAG, "not enough resources");
+ } catch (RuntimeException e) {
+ Log.w(TAG, "not enough memory");
+ }
+ return agc;
+ }
+
+ /**
+ * Class constructor.
+ * <p> The constructor is not guarantied to succeed and throws the following exceptions:
+ * <ul>
+ * <li>IllegalArgumentException is thrown if the device does not implement an AGC</li>
+ * <li>UnsupportedOperationException is thrown is the resources allocated to audio
+ * pre-procesing are currently exceeded.</li>
+ * <li>RuntimeException is thrown if a memory allocation error occurs.</li>
+ * </ul>
+ *
+ * @param audioSession system wide unique audio session identifier. The AutomaticGainControl
+ * will be applied to the AudioRecord with the same audio session.
+ *
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ private AutomaticGainControl(int audioSession)
+ throws IllegalArgumentException, UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_AGC, EFFECT_TYPE_NULL, 0, audioSession);
+ }
+}
diff --git a/android/media/audiofx/BassBoost.java b/android/media/audiofx/BassBoost.java
new file mode 100644
index 00000000..a46cc223
--- /dev/null
+++ b/android/media/audiofx/BassBoost.java
@@ -0,0 +1,287 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.media.audiofx.AudioEffect;
+import android.util.Log;
+
+import java.util.StringTokenizer;
+
+
+/**
+ * Bass boost is an audio effect to boost or amplify low frequencies of the sound. It is comparable
+ * to a simple equalizer but limited to one band amplification in the low frequency range.
+ * <p>An application creates a BassBoost object to instantiate and control a bass boost engine in
+ * the audio framework.
+ * <p>The methods, parameter types and units exposed by the BassBoost implementation are directly
+ * mapping those defined by the OpenSL ES 1.0.1 Specification (http://www.khronos.org/opensles/)
+ * for the SLBassBoostItf interface. Please refer to this specification for more details.
+ * <p>To attach the BassBoost to a particular AudioTrack or MediaPlayer, specify the audio session
+ * ID of this AudioTrack or MediaPlayer when constructing the BassBoost.
+ * <p>NOTE: attaching a BassBoost to the global audio output mix by use of session 0 is deprecated.
+ * <p>See {@link android.media.MediaPlayer#getAudioSessionId()} for details on audio sessions.
+ * <p>See {@link android.media.audiofx.AudioEffect} class for more details on
+ * controlling audio effects.
+ */
+
+public class BassBoost extends AudioEffect {
+
+ private final static String TAG = "BassBoost";
+
+ // These constants must be synchronized with those in
+ // frameworks/base/include/media/EffectBassBoostApi.h
+ /**
+ * Is strength parameter supported by bass boost engine. Parameter ID for getParameter().
+ */
+ public static final int PARAM_STRENGTH_SUPPORTED = 0;
+ /**
+ * Bass boost effect strength. Parameter ID for
+ * {@link android.media.audiofx.BassBoost.OnParameterChangeListener}
+ */
+ public static final int PARAM_STRENGTH = 1;
+
+ /**
+ * Indicates if strength parameter is supported by the bass boost engine
+ */
+ private boolean mStrengthSupported = false;
+
+ /**
+ * Registered listener for parameter changes.
+ */
+ private OnParameterChangeListener mParamListener = null;
+
+ /**
+ * Listener used internally to to receive raw parameter change event from AudioEffect super class
+ */
+ private BaseParameterListener mBaseParamListener = null;
+
+ /**
+ * Lock for access to mParamListener
+ */
+ private final Object mParamListenerLock = new Object();
+
+ /**
+ * Class constructor.
+ * @param priority the priority level requested by the application for controlling the BassBoost
+ * engine. As the same engine can be shared by several applications, this parameter indicates
+ * how much the requesting application needs control of effect parameters. The normal priority
+ * is 0, above normal is a positive number, below normal a negative number.
+ * @param audioSession system wide unique audio session identifier. The BassBoost will be
+ * attached to the MediaPlayer or AudioTrack in the same audio session.
+ *
+ * @throws java.lang.IllegalStateException
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ public BassBoost(int priority, int audioSession)
+ throws IllegalStateException, IllegalArgumentException,
+ UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_BASS_BOOST, EFFECT_TYPE_NULL, priority, audioSession);
+
+ if (audioSession == 0) {
+ Log.w(TAG, "WARNING: attaching a BassBoost to global output mix is deprecated!");
+ }
+
+ int[] value = new int[1];
+ checkStatus(getParameter(PARAM_STRENGTH_SUPPORTED, value));
+ mStrengthSupported = (value[0] != 0);
+ }
+
+ /**
+ * Indicates whether setting strength is supported. If this method returns false, only one
+ * strength is supported and the setStrength() method always rounds to that value.
+ * @return true is strength parameter is supported, false otherwise
+ */
+ public boolean getStrengthSupported() {
+ return mStrengthSupported;
+ }
+
+ /**
+ * Sets the strength of the bass boost effect. If the implementation does not support per mille
+ * accuracy for setting the strength, it is allowed to round the given strength to the nearest
+ * supported value. You can use the {@link #getRoundedStrength()} method to query the
+ * (possibly rounded) value that was actually set.
+ * @param strength strength of the effect. The valid range for strength strength is [0, 1000],
+ * where 0 per mille designates the mildest effect and 1000 per mille designates the strongest.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setStrength(short strength)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_STRENGTH, strength));
+ }
+
+ /**
+ * Gets the current strength of the effect.
+ * @return the strength of the effect. The valid range for strength is [0, 1000], where 0 per
+ * mille designates the mildest effect and 1000 per mille the strongest
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getRoundedStrength()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ short[] value = new short[1];
+ checkStatus(getParameter(PARAM_STRENGTH, value));
+ return value[0];
+ }
+
+ /**
+ * The OnParameterChangeListener interface defines a method called by the BassBoost when a
+ * parameter value has changed.
+ */
+ public interface OnParameterChangeListener {
+ /**
+ * Method called when a parameter value has changed. The method is called only if the
+ * parameter was changed by another application having the control of the same
+ * BassBoost engine.
+ * @param effect the BassBoost on which the interface is registered.
+ * @param status status of the set parameter operation.
+ * @param param ID of the modified parameter. See {@link #PARAM_STRENGTH} ...
+ * @param value the new parameter value.
+ */
+ void onParameterChange(BassBoost effect, int status, int param, short value);
+ }
+
+ /**
+ * Listener used internally to receive unformatted parameter change events from AudioEffect
+ * super class.
+ */
+ private class BaseParameterListener implements AudioEffect.OnParameterChangeListener {
+ private BaseParameterListener() {
+
+ }
+ public void onParameterChange(AudioEffect effect, int status, byte[] param, byte[] value) {
+ OnParameterChangeListener l = null;
+
+ synchronized (mParamListenerLock) {
+ if (mParamListener != null) {
+ l = mParamListener;
+ }
+ }
+ if (l != null) {
+ int p = -1;
+ short v = -1;
+
+ if (param.length == 4) {
+ p = byteArrayToInt(param, 0);
+ }
+ if (value.length == 2) {
+ v = byteArrayToShort(value, 0);
+ }
+ if (p != -1 && v != -1) {
+ l.onParameterChange(BassBoost.this, status, p, v);
+ }
+ }
+ }
+ }
+
+ /**
+ * Registers an OnParameterChangeListener interface.
+ * @param listener OnParameterChangeListener interface registered
+ */
+ public void setParameterListener(OnParameterChangeListener listener) {
+ synchronized (mParamListenerLock) {
+ if (mParamListener == null) {
+ mParamListener = listener;
+ mBaseParamListener = new BaseParameterListener();
+ super.setParameterListener(mBaseParamListener);
+ }
+ }
+ }
+
+ /**
+ * The Settings class regroups all bass boost parameters. It is used in
+ * conjuntion with getProperties() and setProperties() methods to backup and restore
+ * all parameters in a single call.
+ */
+ public static class Settings {
+ public short strength;
+
+ public Settings() {
+ }
+
+ /**
+ * Settings class constructor from a key=value; pairs formatted string. The string is
+ * typically returned by Settings.toString() method.
+ * @throws IllegalArgumentException if the string is not correctly formatted.
+ */
+ public Settings(String settings) {
+ StringTokenizer st = new StringTokenizer(settings, "=;");
+ int tokens = st.countTokens();
+ if (st.countTokens() != 3) {
+ throw new IllegalArgumentException("settings: " + settings);
+ }
+ String key = st.nextToken();
+ if (!key.equals("BassBoost")) {
+ throw new IllegalArgumentException(
+ "invalid settings for BassBoost: " + key);
+ }
+ try {
+ key = st.nextToken();
+ if (!key.equals("strength")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ strength = Short.parseShort(st.nextToken());
+ } catch (NumberFormatException nfe) {
+ throw new IllegalArgumentException("invalid value for key: " + key);
+ }
+ }
+
+ @Override
+ public String toString() {
+ String str = new String (
+ "BassBoost"+
+ ";strength="+Short.toString(strength)
+ );
+ return str;
+ }
+ };
+
+
+ /**
+ * Gets the bass boost properties. This method is useful when a snapshot of current
+ * bass boost settings must be saved by the application.
+ * @return a BassBoost.Settings object containing all current parameters values
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public BassBoost.Settings getProperties()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ Settings settings = new Settings();
+ short[] value = new short[1];
+ checkStatus(getParameter(PARAM_STRENGTH, value));
+ settings.strength = value[0];
+ return settings;
+ }
+
+ /**
+ * Sets the bass boost properties. This method is useful when bass boost settings have to
+ * be applied from a previous backup.
+ * @param settings a BassBoost.Settings object containing the properties to apply
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setProperties(BassBoost.Settings settings)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_STRENGTH, settings.strength));
+ }
+}
diff --git a/android/media/audiofx/EnvironmentalReverb.java b/android/media/audiofx/EnvironmentalReverb.java
new file mode 100644
index 00000000..ef1c4c3e
--- /dev/null
+++ b/android/media/audiofx/EnvironmentalReverb.java
@@ -0,0 +1,661 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.media.audiofx.AudioEffect;
+import java.util.StringTokenizer;
+
+/**
+ * A sound generated within a room travels in many directions. The listener first hears the direct
+ * sound from the source itself. Later, he or she hears discrete echoes caused by sound bouncing off
+ * nearby walls, the ceiling and the floor. As sound waves arrive after undergoing more and more
+ * reflections, individual reflections become indistinguishable and the listener hears continuous
+ * reverberation that decays over time.
+ * Reverb is vital for modeling a listener's environment. It can be used in music applications
+ * to simulate music being played back in various environments, or in games to immerse the
+ * listener within the game's environment.
+ * The EnvironmentalReverb class allows an application to control each reverb engine property in a
+ * global reverb environment and is more suitable for games. For basic control, more suitable for
+ * music applications, it is recommended to use the
+ * {@link android.media.audiofx.PresetReverb} class.
+ * <p>An application creates a EnvironmentalReverb object to instantiate and control a reverb engine
+ * in the audio framework.
+ * <p>The methods, parameter types and units exposed by the EnvironmentalReverb implementation are
+ * directly mapping those defined by the OpenSL ES 1.0.1 Specification
+ * (http://www.khronos.org/opensles/) for the SLEnvironmentalReverbItf interface.
+ * Please refer to this specification for more details.
+ * <p>The EnvironmentalReverb is an output mix auxiliary effect and should be created on
+ * Audio session 0. In order for a MediaPlayer or AudioTrack to be fed into this effect,
+ * they must be explicitely attached to it and a send level must be specified. Use the effect ID
+ * returned by getId() method to designate this particular effect when attaching it to the
+ * MediaPlayer or AudioTrack.
+ * <p>Creating a reverb on the output mix (audio session 0) requires permission
+ * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}
+ * <p>See {@link android.media.audiofx.AudioEffect} class for more details on controlling
+ * audio effects.
+ */
+
+public class EnvironmentalReverb extends AudioEffect {
+
+ private final static String TAG = "EnvironmentalReverb";
+
+ // These constants must be synchronized with those in
+ // frameworks/base/include/media/EffectEnvironmentalReverbApi.h
+
+ /**
+ * Room level. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_ROOM_LEVEL = 0;
+ /**
+ * Room HF level. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_ROOM_HF_LEVEL = 1;
+ /**
+ * Decay time. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_DECAY_TIME = 2;
+ /**
+ * Decay HF ratio. Parameter ID for
+ * {@link android.media.audiofx.EnvironmentalReverb.OnParameterChangeListener}
+ */
+ public static final int PARAM_DECAY_HF_RATIO = 3;
+ /**
+ * Early reflections level. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_REFLECTIONS_LEVEL = 4;
+ /**
+ * Early reflections delay. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_REFLECTIONS_DELAY = 5;
+ /**
+ * Reverb level. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_REVERB_LEVEL = 6;
+ /**
+ * Reverb delay. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_REVERB_DELAY = 7;
+ /**
+ * Diffusion. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_DIFFUSION = 8;
+ /**
+ * Density. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_DENSITY = 9;
+
+ // used by setProperties()/getProperties
+ private static final int PARAM_PROPERTIES = 10;
+
+ /**
+ * Registered listener for parameter changes
+ */
+ private OnParameterChangeListener mParamListener = null;
+
+ /**
+ * Listener used internally to to receive raw parameter change event from AudioEffect super
+ * class
+ */
+ private BaseParameterListener mBaseParamListener = null;
+
+ /**
+ * Lock for access to mParamListener
+ */
+ private final Object mParamListenerLock = new Object();
+
+ /**
+ * Class constructor.
+ * @param priority the priority level requested by the application for controlling the
+ * EnvironmentalReverb engine. As the same engine can be shared by several applications, this
+ * parameter indicates how much the requesting application needs control of effect parameters.
+ * The normal priority is 0, above normal is a positive number, below normal a negative number.
+ * @param audioSession system wide unique audio session identifier. If audioSession
+ * is not 0, the EnvironmentalReverb will be attached to the MediaPlayer or AudioTrack in the
+ * same audio session. Otherwise, the EnvironmentalReverb will apply to the output mix.
+ * As the EnvironmentalReverb is an auxiliary effect it is recommended to instantiate it on
+ * audio session 0 and to attach it to the MediaPLayer auxiliary output.
+ *
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ public EnvironmentalReverb(int priority, int audioSession)
+ throws IllegalArgumentException, UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_ENV_REVERB, EFFECT_TYPE_NULL, priority, audioSession);
+ }
+
+ /**
+ * Sets the master volume level of the environmental reverb effect.
+ * @param room room level in millibels. The valid range is [-9000, 0].
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setRoomLevel(short room)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = shortToByteArray(room);
+ checkStatus(setParameter(PARAM_ROOM_LEVEL, param));
+ }
+
+ /**
+ * Gets the master volume level of the environmental reverb effect.
+ * @return the room level in millibels.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getRoomLevel()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[2];
+ checkStatus(getParameter(PARAM_ROOM_LEVEL, param));
+ return byteArrayToShort(param);
+ }
+
+ /**
+ * Sets the volume level at 5 kHz relative to the volume level at low frequencies of the
+ * overall reverb effect.
+ * <p>This controls a low-pass filter that will reduce the level of the high-frequency.
+ * @param roomHF high frequency attenuation level in millibels. The valid range is [-9000, 0].
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setRoomHFLevel(short roomHF)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = shortToByteArray(roomHF);
+ checkStatus(setParameter(PARAM_ROOM_HF_LEVEL, param));
+ }
+
+ /**
+ * Gets the room HF level.
+ * @return the room HF level in millibels.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getRoomHFLevel()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[2];
+ checkStatus(getParameter(PARAM_ROOM_HF_LEVEL, param));
+ return byteArrayToShort(param);
+ }
+
+ /**
+ * Sets the time taken for the level of reverberation to decay by 60 dB.
+ * @param decayTime decay time in milliseconds. The valid range is [100, 20000].
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setDecayTime(int decayTime)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = intToByteArray(decayTime);
+ checkStatus(setParameter(PARAM_DECAY_TIME, param));
+ }
+
+ /**
+ * Gets the decay time.
+ * @return the decay time in milliseconds.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public int getDecayTime()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[4];
+ checkStatus(getParameter(PARAM_DECAY_TIME, param));
+ return byteArrayToInt(param);
+ }
+
+ /**
+ * Sets the ratio of high frequency decay time (at 5 kHz) relative to the decay time at low
+ * frequencies.
+ * @param decayHFRatio high frequency decay ratio using a permille scale. The valid range is
+ * [100, 2000]. A ratio of 1000 indicates that all frequencies decay at the same rate.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setDecayHFRatio(short decayHFRatio)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = shortToByteArray(decayHFRatio);
+ checkStatus(setParameter(PARAM_DECAY_HF_RATIO, param));
+ }
+
+ /**
+ * Gets the ratio of high frequency decay time (at 5 kHz) relative to low frequencies.
+ * @return the decay HF ration. See {@link #setDecayHFRatio(short)} for units.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getDecayHFRatio()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[2];
+ checkStatus(getParameter(PARAM_DECAY_HF_RATIO, param));
+ return byteArrayToShort(param);
+ }
+
+ /**
+ * Sets the volume level of the early reflections.
+ * <p>This level is combined with the overall room level
+ * (set using {@link #setRoomLevel(short)}).
+ * @param reflectionsLevel reflection level in millibels. The valid range is [-9000, 1000].
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setReflectionsLevel(short reflectionsLevel)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = shortToByteArray(reflectionsLevel);
+ checkStatus(setParameter(PARAM_REFLECTIONS_LEVEL, param));
+ }
+
+ /**
+ * Gets the volume level of the early reflections.
+ * @return the early reflections level in millibels.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getReflectionsLevel()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[2];
+ checkStatus(getParameter(PARAM_REFLECTIONS_LEVEL, param));
+ return byteArrayToShort(param);
+ }
+
+ /**
+ * Sets the delay time for the early reflections.
+ * <p>This method sets the time between when the direct path is heard and when the first
+ * reflection is heard.
+ * @param reflectionsDelay reflections delay in milliseconds. The valid range is [0, 300].
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setReflectionsDelay(int reflectionsDelay)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = intToByteArray(reflectionsDelay);
+ checkStatus(setParameter(PARAM_REFLECTIONS_DELAY, param));
+ }
+
+ /**
+ * Gets the reflections delay.
+ * @return the early reflections delay in milliseconds.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public int getReflectionsDelay()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[4];
+ checkStatus(getParameter(PARAM_REFLECTIONS_DELAY, param));
+ return byteArrayToInt(param);
+ }
+
+ /**
+ * Sets the volume level of the late reverberation.
+ * <p>This level is combined with the overall room level (set using {@link #setRoomLevel(short)}).
+ * @param reverbLevel reverb level in millibels. The valid range is [-9000, 2000].
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setReverbLevel(short reverbLevel)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = shortToByteArray(reverbLevel);
+ checkStatus(setParameter(PARAM_REVERB_LEVEL, param));
+ }
+
+ /**
+ * Gets the reverb level.
+ * @return the reverb level in millibels.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getReverbLevel()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[2];
+ checkStatus(getParameter(PARAM_REVERB_LEVEL, param));
+ return byteArrayToShort(param);
+ }
+
+ /**
+ * Sets the time between the first reflection and the reverberation.
+ * @param reverbDelay reverb delay in milliseconds. The valid range is [0, 100].
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setReverbDelay(int reverbDelay)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = intToByteArray(reverbDelay);
+ checkStatus(setParameter(PARAM_REVERB_DELAY, param));
+ }
+
+ /**
+ * Gets the reverb delay.
+ * @return the reverb delay in milliseconds.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public int getReverbDelay()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[4];
+ checkStatus(getParameter(PARAM_REVERB_DELAY, param));
+ return byteArrayToInt(param);
+ }
+
+ /**
+ * Sets the echo density in the late reverberation decay.
+ * <p>The scale should approximately map linearly to the perceived change in reverberation.
+ * @param diffusion diffusion specified using a permille scale. The diffusion valid range is
+ * [0, 1000]. A value of 1000 o/oo indicates a smooth reverberation decay.
+ * Values below this level give a more <i>grainy</i> character.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setDiffusion(short diffusion)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = shortToByteArray(diffusion);
+ checkStatus(setParameter(PARAM_DIFFUSION, param));
+ }
+
+ /**
+ * Gets diffusion level.
+ * @return the diffusion level. See {@link #setDiffusion(short)} for units.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getDiffusion()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[2];
+ checkStatus(getParameter(PARAM_DIFFUSION, param));
+ return byteArrayToShort(param);
+ }
+
+
+ /**
+ * Controls the modal density of the late reverberation decay.
+ * <p> The scale should approximately map linearly to the perceived change in reverberation.
+ * A lower density creates a hollow sound that is useful for simulating small reverberation
+ * spaces such as bathrooms.
+ * @param density density specified using a permille scale. The valid range is [0, 1000].
+ * A value of 1000 o/oo indicates a natural sounding reverberation. Values below this level
+ * produce a more colored effect.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setDensity(short density)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = shortToByteArray(density);
+ checkStatus(setParameter(PARAM_DENSITY, param));
+ }
+
+ /**
+ * Gets the density level.
+ * @return the density level. See {@link #setDiffusion(short)} for units.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getDensity()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[2];
+ checkStatus(getParameter(PARAM_DENSITY, param));
+ return byteArrayToShort(param);
+ }
+
+
+ /**
+ * The OnParameterChangeListener interface defines a method called by the EnvironmentalReverb
+ * when a parameter value has changed.
+ */
+ public interface OnParameterChangeListener {
+ /**
+ * Method called when a parameter value has changed. The method is called only if the
+ * parameter was changed by another application having the control of the same
+ * EnvironmentalReverb engine.
+ * @param effect the EnvironmentalReverb on which the interface is registered.
+ * @param status status of the set parameter operation.
+ * @param param ID of the modified parameter. See {@link #PARAM_ROOM_LEVEL} ...
+ * @param value the new parameter value.
+ */
+ void onParameterChange(EnvironmentalReverb effect, int status, int param, int value);
+ }
+
+ /**
+ * Listener used internally to receive unformatted parameter change events from AudioEffect
+ * super class.
+ */
+ private class BaseParameterListener implements AudioEffect.OnParameterChangeListener {
+ private BaseParameterListener() {
+
+ }
+ public void onParameterChange(AudioEffect effect, int status, byte[] param, byte[] value) {
+ OnParameterChangeListener l = null;
+
+ synchronized (mParamListenerLock) {
+ if (mParamListener != null) {
+ l = mParamListener;
+ }
+ }
+ if (l != null) {
+ int p = -1;
+ int v = -1;
+
+ if (param.length == 4) {
+ p = byteArrayToInt(param, 0);
+ }
+ if (value.length == 2) {
+ v = (int)byteArrayToShort(value, 0);
+ } else if (value.length == 4) {
+ v = byteArrayToInt(value, 0);
+ }
+ if (p != -1 && v != -1) {
+ l.onParameterChange(EnvironmentalReverb.this, status, p, v);
+ }
+ }
+ }
+ }
+
+ /**
+ * Registers an OnParameterChangeListener interface.
+ * @param listener OnParameterChangeListener interface registered
+ */
+ public void setParameterListener(OnParameterChangeListener listener) {
+ synchronized (mParamListenerLock) {
+ if (mParamListener == null) {
+ mParamListener = listener;
+ mBaseParamListener = new BaseParameterListener();
+ super.setParameterListener(mBaseParamListener);
+ }
+ }
+ }
+
+ /**
+ * The Settings class regroups all environmental reverb parameters. It is used in
+ * conjuntion with getProperties() and setProperties() methods to backup and restore
+ * all parameters in a single call.
+ */
+ public static class Settings {
+ public short roomLevel;
+ public short roomHFLevel;
+ public int decayTime;
+ public short decayHFRatio;
+ public short reflectionsLevel;
+ public int reflectionsDelay;
+ public short reverbLevel;
+ public int reverbDelay;
+ public short diffusion;
+ public short density;
+
+ public Settings() {
+ }
+
+ /**
+ * Settings class constructor from a key=value; pairs formatted string. The string is
+ * typically returned by Settings.toString() method.
+ * @throws IllegalArgumentException if the string is not correctly formatted.
+ */
+ public Settings(String settings) {
+ StringTokenizer st = new StringTokenizer(settings, "=;");
+ int tokens = st.countTokens();
+ if (st.countTokens() != 21) {
+ throw new IllegalArgumentException("settings: " + settings);
+ }
+ String key = st.nextToken();
+ if (!key.equals("EnvironmentalReverb")) {
+ throw new IllegalArgumentException(
+ "invalid settings for EnvironmentalReverb: " + key);
+ }
+
+ try {
+ key = st.nextToken();
+ if (!key.equals("roomLevel")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ roomLevel = Short.parseShort(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("roomHFLevel")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ roomHFLevel = Short.parseShort(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("decayTime")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ decayTime = Integer.parseInt(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("decayHFRatio")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ decayHFRatio = Short.parseShort(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("reflectionsLevel")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ reflectionsLevel = Short.parseShort(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("reflectionsDelay")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ reflectionsDelay = Integer.parseInt(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("reverbLevel")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ reverbLevel = Short.parseShort(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("reverbDelay")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ reverbDelay = Integer.parseInt(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("diffusion")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ diffusion = Short.parseShort(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("density")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ density = Short.parseShort(st.nextToken());
+ } catch (NumberFormatException nfe) {
+ throw new IllegalArgumentException("invalid value for key: " + key);
+ }
+ }
+
+ @Override
+ public String toString() {
+ return new String (
+ "EnvironmentalReverb"+
+ ";roomLevel="+Short.toString(roomLevel)+
+ ";roomHFLevel="+Short.toString(roomHFLevel)+
+ ";decayTime="+Integer.toString(decayTime)+
+ ";decayHFRatio="+Short.toString(decayHFRatio)+
+ ";reflectionsLevel="+Short.toString(reflectionsLevel)+
+ ";reflectionsDelay="+Integer.toString(reflectionsDelay)+
+ ";reverbLevel="+Short.toString(reverbLevel)+
+ ";reverbDelay="+Integer.toString(reverbDelay)+
+ ";diffusion="+Short.toString(diffusion)+
+ ";density="+Short.toString(density)
+ );
+ }
+ };
+
+ // Keep this in sync with sizeof(s_reverb_settings) defined in
+ // frameworks/base/include/media/EffectEnvironmentalReverbApi.h
+ static private int PROPERTY_SIZE = 26;
+
+ /**
+ * Gets the environmental reverb properties. This method is useful when a snapshot of current
+ * reverb settings must be saved by the application.
+ * @return an EnvironmentalReverb.Settings object containing all current parameters values
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public EnvironmentalReverb.Settings getProperties()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[PROPERTY_SIZE];
+ checkStatus(getParameter(PARAM_PROPERTIES, param));
+ Settings settings = new Settings();
+ settings.roomLevel = byteArrayToShort(param, 0);
+ settings.roomHFLevel = byteArrayToShort(param, 2);
+ settings.decayTime = byteArrayToInt(param, 4);
+ settings.decayHFRatio = byteArrayToShort(param, 8);
+ settings.reflectionsLevel = byteArrayToShort(param, 10);
+ settings.reflectionsDelay = byteArrayToInt(param, 12);
+ settings.reverbLevel = byteArrayToShort(param, 16);
+ settings.reverbDelay = byteArrayToInt(param, 18);
+ settings.diffusion = byteArrayToShort(param, 22);
+ settings.density = byteArrayToShort(param, 24);
+ return settings;
+ }
+
+ /**
+ * Sets the environmental reverb properties. This method is useful when reverb settings have to
+ * be applied from a previous backup.
+ * @param settings a EnvironmentalReverb.Settings object containing the properties to apply
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setProperties(EnvironmentalReverb.Settings settings)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+
+ byte[] param = concatArrays(shortToByteArray(settings.roomLevel),
+ shortToByteArray(settings.roomHFLevel),
+ intToByteArray(settings.decayTime),
+ shortToByteArray(settings.decayHFRatio),
+ shortToByteArray(settings.reflectionsLevel),
+ intToByteArray(settings.reflectionsDelay),
+ shortToByteArray(settings.reverbLevel),
+ intToByteArray(settings.reverbDelay),
+ shortToByteArray(settings.diffusion),
+ shortToByteArray(settings.density));
+
+ checkStatus(setParameter(PARAM_PROPERTIES, param));
+ }
+}
diff --git a/android/media/audiofx/Equalizer.java b/android/media/audiofx/Equalizer.java
new file mode 100644
index 00000000..7abada07
--- /dev/null
+++ b/android/media/audiofx/Equalizer.java
@@ -0,0 +1,559 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.media.audiofx.AudioEffect;
+import android.util.Log;
+
+import java.util.StringTokenizer;
+
+
+/**
+ * An Equalizer is used to alter the frequency response of a particular music source or of the main
+ * output mix.
+ * <p>An application creates an Equalizer object to instantiate and control an Equalizer engine
+ * in the audio framework. The application can either simply use predefined presets or have a more
+ * precise control of the gain in each frequency band controlled by the equalizer.
+ * <p>The methods, parameter types and units exposed by the Equalizer implementation are directly
+ * mapping those defined by the OpenSL ES 1.0.1 Specification (http://www.khronos.org/opensles/)
+ * for the SLEqualizerItf interface. Please refer to this specification for more details.
+ * <p>To attach the Equalizer to a particular AudioTrack or MediaPlayer, specify the audio session
+ * ID of this AudioTrack or MediaPlayer when constructing the Equalizer.
+ * <p>NOTE: attaching an Equalizer to the global audio output mix by use of session 0 is deprecated.
+ * <p>See {@link android.media.MediaPlayer#getAudioSessionId()} for details on audio sessions.
+ * <p>See {@link android.media.audiofx.AudioEffect} class for more details on controlling audio
+ * effects.
+ */
+
+public class Equalizer extends AudioEffect {
+
+ private final static String TAG = "Equalizer";
+
+ // These constants must be synchronized with those in
+ // frameworks/base/include/media/EffectEqualizerApi.h
+ /**
+ * Number of bands. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_NUM_BANDS = 0;
+ /**
+ * Band level range. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_LEVEL_RANGE = 1;
+ /**
+ * Band level. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_BAND_LEVEL = 2;
+ /**
+ * Band center frequency. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_CENTER_FREQ = 3;
+ /**
+ * Band frequency range. Parameter ID for
+ * {@link android.media.audiofx.Equalizer.OnParameterChangeListener}
+ */
+ public static final int PARAM_BAND_FREQ_RANGE = 4;
+ /**
+ * Band for a given frequency. Parameter ID for OnParameterChangeListener
+ *
+ */
+ public static final int PARAM_GET_BAND = 5;
+ /**
+ * Current preset. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_CURRENT_PRESET = 6;
+ /**
+ * Request number of presets. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_GET_NUM_OF_PRESETS = 7;
+ /**
+ * Request preset name. Parameter ID for OnParameterChangeListener
+ */
+ public static final int PARAM_GET_PRESET_NAME = 8;
+ // used by setProperties()/getProperties
+ private static final int PARAM_PROPERTIES = 9;
+ /**
+ * Maximum size for preset name
+ */
+ public static final int PARAM_STRING_SIZE_MAX = 32;
+
+ /**
+ * Number of bands implemented by Equalizer engine
+ */
+ private short mNumBands = 0;
+
+ /**
+ * Number of presets implemented by Equalizer engine
+ */
+ private int mNumPresets;
+ /**
+ * Names of presets implemented by Equalizer engine
+ */
+ private String[] mPresetNames;
+
+ /**
+ * Registered listener for parameter changes.
+ */
+ private OnParameterChangeListener mParamListener = null;
+
+ /**
+ * Listener used internally to to receive raw parameter change event from AudioEffect super class
+ */
+ private BaseParameterListener mBaseParamListener = null;
+
+ /**
+ * Lock for access to mParamListener
+ */
+ private final Object mParamListenerLock = new Object();
+
+ /**
+ * Class constructor.
+ * @param priority the priority level requested by the application for controlling the Equalizer
+ * engine. As the same engine can be shared by several applications, this parameter indicates
+ * how much the requesting application needs control of effect parameters. The normal priority
+ * is 0, above normal is a positive number, below normal a negative number.
+ * @param audioSession system wide unique audio session identifier. The Equalizer will be
+ * attached to the MediaPlayer or AudioTrack in the same audio session.
+ *
+ * @throws java.lang.IllegalStateException
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ public Equalizer(int priority, int audioSession)
+ throws IllegalStateException, IllegalArgumentException,
+ UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_EQUALIZER, EFFECT_TYPE_NULL, priority, audioSession);
+
+ if (audioSession == 0) {
+ Log.w(TAG, "WARNING: attaching an Equalizer to global output mix is deprecated!");
+ }
+
+ getNumberOfBands();
+
+ mNumPresets = (int)getNumberOfPresets();
+
+ if (mNumPresets != 0) {
+ mPresetNames = new String[mNumPresets];
+ byte[] value = new byte[PARAM_STRING_SIZE_MAX];
+ int[] param = new int[2];
+ param[0] = PARAM_GET_PRESET_NAME;
+ for (int i = 0; i < mNumPresets; i++) {
+ param[1] = i;
+ checkStatus(getParameter(param, value));
+ int length = 0;
+ while (value[length] != 0) length++;
+ try {
+ mPresetNames[i] = new String(value, 0, length, "ISO-8859-1");
+ } catch (java.io.UnsupportedEncodingException e) {
+ Log.e(TAG, "preset name decode error");
+ }
+ }
+ }
+ }
+
+ /**
+ * Gets the number of frequency bands supported by the Equalizer engine.
+ * @return the number of bands
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getNumberOfBands()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ if (mNumBands != 0) {
+ return mNumBands;
+ }
+ int[] param = new int[1];
+ param[0] = PARAM_NUM_BANDS;
+ short[] result = new short[1];
+ checkStatus(getParameter(param, result));
+ mNumBands = result[0];
+ return mNumBands;
+ }
+
+ /**
+ * Gets the level range for use by {@link #setBandLevel(short,short)}. The level is expressed in
+ * milliBel.
+ * @return the band level range in an array of short integers. The first element is the lower
+ * limit of the range, the second element the upper limit.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short[] getBandLevelRange()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ short[] result = new short[2];
+ checkStatus(getParameter(PARAM_LEVEL_RANGE, result));
+ return result;
+ }
+
+ /**
+ * Sets the given equalizer band to the given gain value.
+ * @param band frequency band that will have the new gain. The numbering of the bands starts
+ * from 0 and ends at (number of bands - 1).
+ * @param level new gain in millibels that will be set to the given band. getBandLevelRange()
+ * will define the maximum and minimum values.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ * @see #getNumberOfBands()
+ */
+ public void setBandLevel(short band, short level)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ int[] param = new int[2];
+ short[] value = new short[1];
+
+ param[0] = PARAM_BAND_LEVEL;
+ param[1] = (int)band;
+ value[0] = level;
+ checkStatus(setParameter(param, value));
+ }
+
+ /**
+ * Gets the gain set for the given equalizer band.
+ * @param band frequency band whose gain is requested. The numbering of the bands starts
+ * from 0 and ends at (number of bands - 1).
+ * @return the gain in millibels of the given band.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getBandLevel(short band)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ int[] param = new int[2];
+ short[] result = new short[1];
+
+ param[0] = PARAM_BAND_LEVEL;
+ param[1] = (int)band;
+ checkStatus(getParameter(param, result));
+
+ return result[0];
+ }
+
+
+ /**
+ * Gets the center frequency of the given band.
+ * @param band frequency band whose center frequency is requested. The numbering of the bands
+ * starts from 0 and ends at (number of bands - 1).
+ * @return the center frequency in milliHertz
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public int getCenterFreq(short band)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ int[] param = new int[2];
+ int[] result = new int[1];
+
+ param[0] = PARAM_CENTER_FREQ;
+ param[1] = (int)band;
+ checkStatus(getParameter(param, result));
+
+ return result[0];
+ }
+
+ /**
+ * Gets the frequency range of the given frequency band.
+ * @param band frequency band whose frequency range is requested. The numbering of the bands
+ * starts from 0 and ends at (number of bands - 1).
+ * @return the frequency range in millHertz in an array of integers. The first element is the
+ * lower limit of the range, the second element the upper limit.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public int[] getBandFreqRange(short band)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ int[] param = new int[2];
+ int[] result = new int[2];
+ param[0] = PARAM_BAND_FREQ_RANGE;
+ param[1] = (int)band;
+ checkStatus(getParameter(param, result));
+
+ return result;
+ }
+
+ /**
+ * Gets the band that has the most effect on the given frequency.
+ * @param frequency frequency in milliHertz which is to be equalized via the returned band.
+ * @return the frequency band that has most effect on the given frequency.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getBand(int frequency)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ int[] param = new int[2];
+ short[] result = new short[1];
+
+ param[0] = PARAM_GET_BAND;
+ param[1] = frequency;
+ checkStatus(getParameter(param, result));
+
+ return result[0];
+ }
+
+ /**
+ * Gets current preset.
+ * @return the preset that is set at the moment.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getCurrentPreset()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ short[] result = new short[1];
+ checkStatus(getParameter(PARAM_CURRENT_PRESET, result));
+ return result[0];
+ }
+
+ /**
+ * Sets the equalizer according to the given preset.
+ * @param preset new preset that will be taken into use. The valid range is [0,
+ * number of presets-1].
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ * @see #getNumberOfPresets()
+ */
+ public void usePreset(short preset)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_CURRENT_PRESET, preset));
+ }
+
+ /**
+ * Gets the total number of presets the equalizer supports. The presets will have indices
+ * [0, number of presets-1].
+ * @return the number of presets the equalizer supports.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getNumberOfPresets()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ short[] result = new short[1];
+ checkStatus(getParameter(PARAM_GET_NUM_OF_PRESETS, result));
+ return result[0];
+ }
+
+ /**
+ * Gets the preset name based on the index.
+ * @param preset index of the preset. The valid range is [0, number of presets-1].
+ * @return a string containing the name of the given preset.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public String getPresetName(short preset)
+ {
+ if (preset >= 0 && preset < mNumPresets) {
+ return mPresetNames[preset];
+ } else {
+ return "";
+ }
+ }
+
+ /**
+ * The OnParameterChangeListener interface defines a method called by the Equalizer when a
+ * parameter value has changed.
+ */
+ public interface OnParameterChangeListener {
+ /**
+ * Method called when a parameter value has changed. The method is called only if the
+ * parameter was changed by another application having the control of the same
+ * Equalizer engine.
+ * @param effect the Equalizer on which the interface is registered.
+ * @param status status of the set parameter operation.
+ * @param param1 ID of the modified parameter. See {@link #PARAM_BAND_LEVEL} ...
+ * @param param2 additional parameter qualifier (e.g the band for band level parameter).
+ * @param value the new parameter value.
+ */
+ void onParameterChange(Equalizer effect, int status, int param1, int param2, int value);
+ }
+
+ /**
+ * Listener used internally to receive unformatted parameter change events from AudioEffect
+ * super class.
+ */
+ private class BaseParameterListener implements AudioEffect.OnParameterChangeListener {
+ private BaseParameterListener() {
+
+ }
+ public void onParameterChange(AudioEffect effect, int status, byte[] param, byte[] value) {
+ OnParameterChangeListener l = null;
+
+ synchronized (mParamListenerLock) {
+ if (mParamListener != null) {
+ l = mParamListener;
+ }
+ }
+ if (l != null) {
+ int p1 = -1;
+ int p2 = -1;
+ int v = -1;
+
+ if (param.length >= 4) {
+ p1 = byteArrayToInt(param, 0);
+ if (param.length >= 8) {
+ p2 = byteArrayToInt(param, 4);
+ }
+ }
+ if (value.length == 2) {
+ v = (int)byteArrayToShort(value, 0);;
+ } else if (value.length == 4) {
+ v = byteArrayToInt(value, 0);
+ }
+
+ if (p1 != -1 && v != -1) {
+ l.onParameterChange(Equalizer.this, status, p1, p2, v);
+ }
+ }
+ }
+ }
+
+ /**
+ * Registers an OnParameterChangeListener interface.
+ * @param listener OnParameterChangeListener interface registered
+ */
+ public void setParameterListener(OnParameterChangeListener listener) {
+ synchronized (mParamListenerLock) {
+ if (mParamListener == null) {
+ mParamListener = listener;
+ mBaseParamListener = new BaseParameterListener();
+ super.setParameterListener(mBaseParamListener);
+ }
+ }
+ }
+
+ /**
+ * The Settings class regroups all equalizer parameters. It is used in
+ * conjuntion with getProperties() and setProperties() methods to backup and restore
+ * all parameters in a single call.
+ */
+ public static class Settings {
+ public short curPreset;
+ public short numBands = 0;
+ public short[] bandLevels = null;
+
+ public Settings() {
+ }
+
+ /**
+ * Settings class constructor from a key=value; pairs formatted string. The string is
+ * typically returned by Settings.toString() method.
+ * @throws IllegalArgumentException if the string is not correctly formatted.
+ */
+ public Settings(String settings) {
+ StringTokenizer st = new StringTokenizer(settings, "=;");
+ int tokens = st.countTokens();
+ if (st.countTokens() < 5) {
+ throw new IllegalArgumentException("settings: " + settings);
+ }
+ String key = st.nextToken();
+ if (!key.equals("Equalizer")) {
+ throw new IllegalArgumentException(
+ "invalid settings for Equalizer: " + key);
+ }
+ try {
+ key = st.nextToken();
+ if (!key.equals("curPreset")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ curPreset = Short.parseShort(st.nextToken());
+ key = st.nextToken();
+ if (!key.equals("numBands")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ numBands = Short.parseShort(st.nextToken());
+ if (st.countTokens() != numBands*2) {
+ throw new IllegalArgumentException("settings: " + settings);
+ }
+ bandLevels = new short[numBands];
+ for (int i = 0; i < numBands; i++) {
+ key = st.nextToken();
+ if (!key.equals("band"+(i+1)+"Level")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ bandLevels[i] = Short.parseShort(st.nextToken());
+ }
+ } catch (NumberFormatException nfe) {
+ throw new IllegalArgumentException("invalid value for key: " + key);
+ }
+ }
+
+ @Override
+ public String toString() {
+
+ String str = new String (
+ "Equalizer"+
+ ";curPreset="+Short.toString(curPreset)+
+ ";numBands="+Short.toString(numBands)
+ );
+ for (int i = 0; i < numBands; i++) {
+ str = str.concat(";band"+(i+1)+"Level="+Short.toString(bandLevels[i]));
+ }
+ return str;
+ }
+ };
+
+
+ /**
+ * Gets the equalizer properties. This method is useful when a snapshot of current
+ * equalizer settings must be saved by the application.
+ * @return an Equalizer.Settings object containing all current parameters values
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public Equalizer.Settings getProperties()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ byte[] param = new byte[4 + mNumBands * 2];
+ checkStatus(getParameter(PARAM_PROPERTIES, param));
+ Settings settings = new Settings();
+ settings.curPreset = byteArrayToShort(param, 0);
+ settings.numBands = byteArrayToShort(param, 2);
+ settings.bandLevels = new short[mNumBands];
+ for (int i = 0; i < mNumBands; i++) {
+ settings.bandLevels[i] = byteArrayToShort(param, 4 + 2*i);
+ }
+ return settings;
+ }
+
+ /**
+ * Sets the equalizer properties. This method is useful when equalizer settings have to
+ * be applied from a previous backup.
+ * @param settings an Equalizer.Settings object containing the properties to apply
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setProperties(Equalizer.Settings settings)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ if (settings.numBands != settings.bandLevels.length ||
+ settings.numBands != mNumBands) {
+ throw new IllegalArgumentException("settings invalid band count: " +settings.numBands);
+ }
+
+ byte[] param = concatArrays(shortToByteArray(settings.curPreset),
+ shortToByteArray(mNumBands));
+ for (int i = 0; i < mNumBands; i++) {
+ param = concatArrays(param,
+ shortToByteArray(settings.bandLevels[i]));
+ }
+ checkStatus(setParameter(PARAM_PROPERTIES, param));
+ }
+}
diff --git a/android/media/audiofx/LoudnessEnhancer.java b/android/media/audiofx/LoudnessEnhancer.java
new file mode 100644
index 00000000..7dc41753
--- /dev/null
+++ b/android/media/audiofx/LoudnessEnhancer.java
@@ -0,0 +1,290 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.media.AudioTrack;
+import android.media.MediaPlayer;
+import android.media.audiofx.AudioEffect;
+import android.util.Log;
+
+import java.util.StringTokenizer;
+
+
+/**
+ * LoudnessEnhancer is an audio effect for increasing audio loudness.
+ * The processing is parametrized by a target gain value, which determines the maximum amount
+ * by which an audio signal will be amplified; signals amplified outside of the sample
+ * range supported by the platform are compressed.
+ * An application creates a LoudnessEnhancer object to instantiate and control a
+ * this audio effect in the audio framework.
+ * To attach the LoudnessEnhancer to a particular AudioTrack or MediaPlayer,
+ * specify the audio session ID of this AudioTrack or MediaPlayer when constructing the effect
+ * (see {@link AudioTrack#getAudioSessionId()} and {@link MediaPlayer#getAudioSessionId()}).
+ */
+
+public class LoudnessEnhancer extends AudioEffect {
+
+ private final static String TAG = "LoudnessEnhancer";
+
+ // These parameter constants must be synchronized with those in
+ // /system/media/audio_effects/include/audio_effects/effect_loudnessenhancer.h
+ /**
+ * The maximum gain applied applied to the signal to process.
+ * It is expressed in millibels (100mB = 1dB) where 0mB corresponds to no amplification.
+ */
+ public static final int PARAM_TARGET_GAIN_MB = 0;
+
+ /**
+ * Registered listener for parameter changes.
+ */
+ private OnParameterChangeListener mParamListener = null;
+
+ /**
+ * Listener used internally to to receive raw parameter change events
+ * from AudioEffect super class
+ */
+ private BaseParameterListener mBaseParamListener = null;
+
+ /**
+ * Lock for access to mParamListener
+ */
+ private final Object mParamListenerLock = new Object();
+
+ /**
+ * Class constructor.
+ * @param audioSession system-wide unique audio session identifier. The LoudnessEnhancer
+ * will be attached to the MediaPlayer or AudioTrack in the same audio session.
+ *
+ * @throws java.lang.IllegalStateException
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ public LoudnessEnhancer(int audioSession)
+ throws IllegalStateException, IllegalArgumentException,
+ UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_LOUDNESS_ENHANCER, EFFECT_TYPE_NULL, 0, audioSession);
+
+ if (audioSession == 0) {
+ Log.w(TAG, "WARNING: attaching a LoudnessEnhancer to global output mix is deprecated!");
+ }
+ }
+
+ /**
+ * @hide
+ * Class constructor for the LoudnessEnhancer audio effect.
+ * @param priority the priority level requested by the application for controlling the
+ * LoudnessEnhancer engine. As the same engine can be shared by several applications,
+ * this parameter indicates how much the requesting application needs control of effect
+ * parameters. The normal priority is 0, above normal is a positive number, below normal a
+ * negative number.
+ * @param audioSession system-wide unique audio session identifier. The LoudnessEnhancer
+ * will be attached to the MediaPlayer or AudioTrack in the same audio session.
+ *
+ * @throws java.lang.IllegalStateException
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ public LoudnessEnhancer(int priority, int audioSession)
+ throws IllegalStateException, IllegalArgumentException,
+ UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_LOUDNESS_ENHANCER, EFFECT_TYPE_NULL, priority, audioSession);
+
+ if (audioSession == 0) {
+ Log.w(TAG, "WARNING: attaching a LoudnessEnhancer to global output mix is deprecated!");
+ }
+ }
+
+ /**
+ * Set the target gain for the audio effect.
+ * The target gain is the maximum value by which a sample value will be amplified when the
+ * effect is enabled.
+ * @param gainmB the effect target gain expressed in mB. 0mB corresponds to no amplification.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setTargetGain(int gainmB)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_TARGET_GAIN_MB, gainmB));
+ }
+
+ /**
+ * Return the target gain.
+ * @return the effect target gain expressed in mB.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public float getTargetGain()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ int[] value = new int[1];
+ checkStatus(getParameter(PARAM_TARGET_GAIN_MB, value));
+ return value[0];
+ }
+
+ /**
+ * @hide
+ * The OnParameterChangeListener interface defines a method called by the LoudnessEnhancer
+ * when a parameter value has changed.
+ */
+ public interface OnParameterChangeListener {
+ /**
+ * Method called when a parameter value has changed. The method is called only if the
+ * parameter was changed by another application having the control of the same
+ * LoudnessEnhancer engine.
+ * @param effect the LoudnessEnhancer on which the interface is registered.
+ * @param param ID of the modified parameter. See {@link #PARAM_GENERIC_PARAM1} ...
+ * @param value the new parameter value.
+ */
+ void onParameterChange(LoudnessEnhancer effect, int param, int value);
+ }
+
+ /**
+ * Listener used internally to receive unformatted parameter change events from AudioEffect
+ * super class.
+ */
+ private class BaseParameterListener implements AudioEffect.OnParameterChangeListener {
+ private BaseParameterListener() {
+
+ }
+ public void onParameterChange(AudioEffect effect, int status, byte[] param, byte[] value) {
+ // only notify when the parameter was successfully change
+ if (status != AudioEffect.SUCCESS) {
+ return;
+ }
+ OnParameterChangeListener l = null;
+ synchronized (mParamListenerLock) {
+ if (mParamListener != null) {
+ l = mParamListener;
+ }
+ }
+ if (l != null) {
+ int p = -1;
+ int v = Integer.MIN_VALUE;
+
+ if (param.length == 4) {
+ p = byteArrayToInt(param, 0);
+ }
+ if (value.length == 4) {
+ v = byteArrayToInt(value, 0);
+ }
+ if (p != -1 && v != Integer.MIN_VALUE) {
+ l.onParameterChange(LoudnessEnhancer.this, p, v);
+ }
+ }
+ }
+ }
+
+ /**
+ * @hide
+ * Registers an OnParameterChangeListener interface.
+ * @param listener OnParameterChangeListener interface registered
+ */
+ public void setParameterListener(OnParameterChangeListener listener) {
+ synchronized (mParamListenerLock) {
+ if (mParamListener == null) {
+ mBaseParamListener = new BaseParameterListener();
+ super.setParameterListener(mBaseParamListener);
+ }
+ mParamListener = listener;
+ }
+ }
+
+ /**
+ * @hide
+ * The Settings class regroups the LoudnessEnhancer parameters. It is used in
+ * conjunction with the getProperties() and setProperties() methods to backup and restore
+ * all parameters in a single call.
+ */
+ public static class Settings {
+ public int targetGainmB;
+
+ public Settings() {
+ }
+
+ /**
+ * Settings class constructor from a key=value; pairs formatted string. The string is
+ * typically returned by Settings.toString() method.
+ * @throws IllegalArgumentException if the string is not correctly formatted.
+ */
+ public Settings(String settings) {
+ StringTokenizer st = new StringTokenizer(settings, "=;");
+ //int tokens = st.countTokens();
+ if (st.countTokens() != 3) {
+ throw new IllegalArgumentException("settings: " + settings);
+ }
+ String key = st.nextToken();
+ if (!key.equals("LoudnessEnhancer")) {
+ throw new IllegalArgumentException(
+ "invalid settings for LoudnessEnhancer: " + key);
+ }
+ try {
+ key = st.nextToken();
+ if (!key.equals("targetGainmB")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ targetGainmB = Integer.parseInt(st.nextToken());
+ } catch (NumberFormatException nfe) {
+ throw new IllegalArgumentException("invalid value for key: " + key);
+ }
+ }
+
+ @Override
+ public String toString() {
+ String str = new String (
+ "LoudnessEnhancer"+
+ ";targetGainmB="+Integer.toString(targetGainmB)
+ );
+ return str;
+ }
+ };
+
+
+ /**
+ * @hide
+ * Gets the LoudnessEnhancer properties. This method is useful when a snapshot of current
+ * effect settings must be saved by the application.
+ * @return a LoudnessEnhancer.Settings object containing all current parameters values
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public LoudnessEnhancer.Settings getProperties()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ Settings settings = new Settings();
+ int[] value = new int[1];
+ checkStatus(getParameter(PARAM_TARGET_GAIN_MB, value));
+ settings.targetGainmB = value[0];
+ return settings;
+ }
+
+ /**
+ * @hide
+ * Sets the LoudnessEnhancer properties. This method is useful when bass boost settings
+ * have to be applied from a previous backup.
+ * @param settings a LoudnessEnhancer.Settings object containing the properties to apply
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setProperties(LoudnessEnhancer.Settings settings)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_TARGET_GAIN_MB, settings.targetGainmB));
+ }
+}
diff --git a/android/media/audiofx/NoiseSuppressor.java b/android/media/audiofx/NoiseSuppressor.java
new file mode 100644
index 00000000..70cc87cc
--- /dev/null
+++ b/android/media/audiofx/NoiseSuppressor.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.util.Log;
+
+/**
+ * Noise Suppressor (NS).
+ * <p>Noise suppression (NS) is an audio pre-processor which removes background noise from the
+ * captured signal. The component of the signal considered as noise can be either stationary
+ * (car/airplane engine, AC system) or non-stationary (other peoples conversations, car horn) for
+ * more advanced implementations.
+ * <p>NS is mostly used by voice communication applications (voice chat, video conferencing,
+ * SIP calls).
+ * <p>An application creates a NoiseSuppressor object to instantiate and control an NS
+ * engine in the audio framework.
+ * <p>To attach the NoiseSuppressor to a particular {@link android.media.AudioRecord},
+ * specify the audio session ID of this AudioRecord when creating the NoiseSuppressor.
+ * The audio session is retrieved by calling
+ * {@link android.media.AudioRecord#getAudioSessionId()} on the AudioRecord instance.
+ * <p>On some devices, NS can be inserted by default in the capture path by the platform
+ * according to the {@link android.media.MediaRecorder.AudioSource} used. The application should
+ * call NoiseSuppressor.getEnable() after creating the NS to check the default NS activation
+ * state on a particular AudioRecord session.
+ * <p>See {@link android.media.audiofx.AudioEffect} class for more details on
+ * controlling audio effects.
+ */
+
+public class NoiseSuppressor extends AudioEffect {
+
+ private final static String TAG = "NoiseSuppressor";
+
+ /**
+ * Checks if the device implements noise suppression.
+ * @return true if the device implements noise suppression, false otherwise.
+ */
+ public static boolean isAvailable() {
+ return AudioEffect.isEffectTypeAvailable(AudioEffect.EFFECT_TYPE_NS);
+ }
+
+ /**
+ * Creates a NoiseSuppressor and attaches it to the AudioRecord on the audio
+ * session specified.
+ * @param audioSession system wide unique audio session identifier. The NoiseSuppressor
+ * will be applied to the AudioRecord with the same audio session.
+ * @return NoiseSuppressor created or null if the device does not implement noise
+ * suppression.
+ */
+ public static NoiseSuppressor create(int audioSession) {
+ NoiseSuppressor ns = null;
+ try {
+ ns = new NoiseSuppressor(audioSession);
+ } catch (IllegalArgumentException e) {
+ Log.w(TAG, "not implemented on this device "+ns);
+ } catch (UnsupportedOperationException e) {
+ Log.w(TAG, "not enough resources");
+ } catch (RuntimeException e) {
+ Log.w(TAG, "not enough memory");
+ }
+ return ns;
+ }
+
+ /**
+ * Class constructor.
+ * <p> The constructor is not guarantied to succeed and throws the following exceptions:
+ * <ul>
+ * <li>IllegalArgumentException is thrown if the device does not implement an NS</li>
+ * <li>UnsupportedOperationException is thrown is the resources allocated to audio
+ * pre-procesing are currently exceeded.</li>
+ * <li>RuntimeException is thrown if a memory allocation error occurs.</li>
+ * </ul>
+ *
+ * @param audioSession system wide unique audio session identifier. The NoiseSuppressor
+ * will be applied to the AudioRecord with the same audio session.
+ *
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ private NoiseSuppressor(int audioSession)
+ throws IllegalArgumentException, UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_NS, EFFECT_TYPE_NULL, 0, audioSession);
+ }
+}
diff --git a/android/media/audiofx/PresetReverb.java b/android/media/audiofx/PresetReverb.java
new file mode 100644
index 00000000..ef916678
--- /dev/null
+++ b/android/media/audiofx/PresetReverb.java
@@ -0,0 +1,303 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.media.audiofx.AudioEffect;
+import java.util.StringTokenizer;
+
+
+/**
+ * A sound generated within a room travels in many directions. The listener first hears the
+ * direct sound from the source itself. Later, he or she hears discrete echoes caused by sound
+ * bouncing off nearby walls, the ceiling and the floor. As sound waves arrive after
+ * undergoing more and more reflections, individual reflections become indistinguishable and
+ * the listener hears continuous reverberation that decays over time.
+ * Reverb is vital for modeling a listener's environment. It can be used in music applications
+ * to simulate music being played back in various environments, or in games to immerse the
+ * listener within the game's environment.
+ * The PresetReverb class allows an application to configure the global reverb using a reverb preset.
+ * This is primarily used for adding some reverb in a music playback context. Applications
+ * requiring control over a more advanced environmental reverb are advised to use the
+ * {@link android.media.audiofx.EnvironmentalReverb} class.
+ * <p>An application creates a PresetReverb object to instantiate and control a reverb engine in the
+ * audio framework.
+ * <p>The methods, parameter types and units exposed by the PresetReverb implementation are
+ * directly mapping those defined by the OpenSL ES 1.0.1 Specification
+ * (http://www.khronos.org/opensles/) for the SLPresetReverbItf interface.
+ * Please refer to this specification for more details.
+ * <p>The PresetReverb is an output mix auxiliary effect and should be created on
+ * Audio session 0. In order for a MediaPlayer or AudioTrack to be fed into this effect,
+ * they must be explicitely attached to it and a send level must be specified. Use the effect ID
+ * returned by getId() method to designate this particular effect when attaching it to the
+ * MediaPlayer or AudioTrack.
+ * <p>Creating a reverb on the output mix (audio session 0) requires permission
+ * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}
+ * <p>See {@link android.media.audiofx.AudioEffect} class for more details on controlling
+ * audio effects.
+ */
+
+public class PresetReverb extends AudioEffect {
+
+ private final static String TAG = "PresetReverb";
+
+ // These constants must be synchronized with those in
+ // frameworks/base/include/media/EffectPresetReverbApi.h
+
+ /**
+ * Preset. Parameter ID for
+ * {@link android.media.audiofx.PresetReverb.OnParameterChangeListener}
+ */
+ public static final int PARAM_PRESET = 0;
+
+ /**
+ * No reverb or reflections
+ */
+ public static final short PRESET_NONE = 0;
+ /**
+ * Reverb preset representing a small room less than five meters in length
+ */
+ public static final short PRESET_SMALLROOM = 1;
+ /**
+ * Reverb preset representing a medium room with a length of ten meters or less
+ */
+ public static final short PRESET_MEDIUMROOM = 2;
+ /**
+ * Reverb preset representing a large-sized room suitable for live performances
+ */
+ public static final short PRESET_LARGEROOM = 3;
+ /**
+ * Reverb preset representing a medium-sized hall
+ */
+ public static final short PRESET_MEDIUMHALL = 4;
+ /**
+ * Reverb preset representing a large-sized hall suitable for a full orchestra
+ */
+ public static final short PRESET_LARGEHALL = 5;
+ /**
+ * Reverb preset representing a synthesis of the traditional plate reverb
+ */
+ public static final short PRESET_PLATE = 6;
+
+ /**
+ * Registered listener for parameter changes.
+ */
+ private OnParameterChangeListener mParamListener = null;
+
+ /**
+ * Listener used internally to to receive raw parameter change event from AudioEffect super class
+ */
+ private BaseParameterListener mBaseParamListener = null;
+
+ /**
+ * Lock for access to mParamListener
+ */
+ private final Object mParamListenerLock = new Object();
+
+ /**
+ * Class constructor.
+ * @param priority the priority level requested by the application for controlling the
+ * PresetReverb engine. As the same engine can be shared by several applications, this
+ * parameter indicates how much the requesting application needs control of effect parameters.
+ * The normal priority is 0, above normal is a positive number, below normal a negative number.
+ * @param audioSession system wide unique audio session identifier. If audioSession
+ * is not 0, the PresetReverb will be attached to the MediaPlayer or AudioTrack in the
+ * same audio session. Otherwise, the PresetReverb will apply to the output mix.
+ * As the PresetReverb is an auxiliary effect it is recommended to instantiate it on
+ * audio session 0 and to attach it to the MediaPLayer auxiliary output.
+ *
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ public PresetReverb(int priority, int audioSession)
+ throws IllegalArgumentException, UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_PRESET_REVERB, EFFECT_TYPE_NULL, priority, audioSession);
+ }
+
+ /**
+ * Enables a preset on the reverb.
+ * <p>The reverb PRESET_NONE disables any reverb from the current output but does not free the
+ * resources associated with the reverb. For an application to signal to the implementation
+ * to free the resources, it must call the release() method.
+ * @param preset this must be one of the the preset constants defined in this class.
+ * e.g. {@link #PRESET_SMALLROOM}
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setPreset(short preset)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_PRESET, preset));
+ }
+
+ /**
+ * Gets current reverb preset.
+ * @return the preset that is set at the moment.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getPreset()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ short[] value = new short[1];
+ checkStatus(getParameter(PARAM_PRESET, value));
+ return value[0];
+ }
+
+ /**
+ * The OnParameterChangeListener interface defines a method called by the PresetReverb
+ * when a parameter value has changed.
+ */
+ public interface OnParameterChangeListener {
+ /**
+ * Method called when a parameter value has changed. The method is called only if the
+ * parameter was changed by another application having the control of the same
+ * PresetReverb engine.
+ * @param effect the PresetReverb on which the interface is registered.
+ * @param status status of the set parameter operation.
+ * @param param ID of the modified parameter. See {@link #PARAM_PRESET} ...
+ * @param value the new parameter value.
+ */
+ void onParameterChange(PresetReverb effect, int status, int param, short value);
+ }
+
+ /**
+ * Listener used internally to receive unformatted parameter change events from AudioEffect
+ * super class.
+ */
+ private class BaseParameterListener implements AudioEffect.OnParameterChangeListener {
+ private BaseParameterListener() {
+
+ }
+ public void onParameterChange(AudioEffect effect, int status, byte[] param, byte[] value) {
+ OnParameterChangeListener l = null;
+
+ synchronized (mParamListenerLock) {
+ if (mParamListener != null) {
+ l = mParamListener;
+ }
+ }
+ if (l != null) {
+ int p = -1;
+ short v = -1;
+
+ if (param.length == 4) {
+ p = byteArrayToInt(param, 0);
+ }
+ if (value.length == 2) {
+ v = byteArrayToShort(value, 0);
+ }
+ if (p != -1 && v != -1) {
+ l.onParameterChange(PresetReverb.this, status, p, v);
+ }
+ }
+ }
+ }
+
+ /**
+ * Registers an OnParameterChangeListener interface.
+ * @param listener OnParameterChangeListener interface registered
+ */
+ public void setParameterListener(OnParameterChangeListener listener) {
+ synchronized (mParamListenerLock) {
+ if (mParamListener == null) {
+ mParamListener = listener;
+ mBaseParamListener = new BaseParameterListener();
+ super.setParameterListener(mBaseParamListener);
+ }
+ }
+ }
+
+ /**
+ * The Settings class regroups all preset reverb parameters. It is used in
+ * conjuntion with getProperties() and setProperties() methods to backup and restore
+ * all parameters in a single call.
+ */
+ public static class Settings {
+ public short preset;
+
+ public Settings() {
+ }
+
+ /**
+ * Settings class constructor from a key=value; pairs formatted string. The string is
+ * typically returned by Settings.toString() method.
+ * @throws IllegalArgumentException if the string is not correctly formatted.
+ */
+ public Settings(String settings) {
+ StringTokenizer st = new StringTokenizer(settings, "=;");
+ int tokens = st.countTokens();
+ if (st.countTokens() != 3) {
+ throw new IllegalArgumentException("settings: " + settings);
+ }
+ String key = st.nextToken();
+ if (!key.equals("PresetReverb")) {
+ throw new IllegalArgumentException(
+ "invalid settings for PresetReverb: " + key);
+ }
+ try {
+ key = st.nextToken();
+ if (!key.equals("preset")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ preset = Short.parseShort(st.nextToken());
+ } catch (NumberFormatException nfe) {
+ throw new IllegalArgumentException("invalid value for key: " + key);
+ }
+ }
+
+ @Override
+ public String toString() {
+ String str = new String (
+ "PresetReverb"+
+ ";preset="+Short.toString(preset)
+ );
+ return str;
+ }
+ };
+
+
+ /**
+ * Gets the preset reverb properties. This method is useful when a snapshot of current
+ * preset reverb settings must be saved by the application.
+ * @return a PresetReverb.Settings object containing all current parameters values
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public PresetReverb.Settings getProperties()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ Settings settings = new Settings();
+ short[] value = new short[1];
+ checkStatus(getParameter(PARAM_PRESET, value));
+ settings.preset = value[0];
+ return settings;
+ }
+
+ /**
+ * Sets the preset reverb properties. This method is useful when preset reverb settings have to
+ * be applied from a previous backup.
+ * @param settings a PresetReverb.Settings object containing the properties to apply
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setProperties(PresetReverb.Settings settings)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_PRESET, settings.preset));
+ }
+}
diff --git a/android/media/audiofx/Virtualizer.java b/android/media/audiofx/Virtualizer.java
new file mode 100644
index 00000000..74b6fc13
--- /dev/null
+++ b/android/media/audiofx/Virtualizer.java
@@ -0,0 +1,629 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.annotation.IntDef;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.audiofx.AudioEffect;
+import android.util.Log;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.StringTokenizer;
+
+
+/**
+ * An audio virtualizer is a general name for an effect to spatialize audio channels. The exact
+ * behavior of this effect is dependent on the number of audio input channels and the types and
+ * number of audio output channels of the device. For example, in the case of a stereo input and
+ * stereo headphone output, a stereo widening effect is used when this effect is turned on.
+ * <p>An application creates a Virtualizer object to instantiate and control a virtualizer engine
+ * in the audio framework.
+ * <p>The methods, parameter types and units exposed by the Virtualizer implementation are directly
+ * mapping those defined by the OpenSL ES 1.0.1 Specification (http://www.khronos.org/opensles/)
+ * for the SLVirtualizerItf interface. Please refer to this specification for more details.
+ * <p>To attach the Virtualizer to a particular AudioTrack or MediaPlayer, specify the audio session
+ * ID of this AudioTrack or MediaPlayer when constructing the Virtualizer.
+ * <p>NOTE: attaching a Virtualizer to the global audio output mix by use of session 0 is
+ * deprecated.
+ * <p>See {@link android.media.MediaPlayer#getAudioSessionId()} for details on audio sessions.
+ * <p>See {@link android.media.audiofx.AudioEffect} class for more details on controlling
+ * audio effects.
+ */
+
+public class Virtualizer extends AudioEffect {
+
+ private final static String TAG = "Virtualizer";
+ private final static boolean DEBUG = false;
+
+ // These constants must be synchronized with those in
+ // system/media/audio_effects/include/audio_effects/effect_virtualizer.h
+ /**
+ * Is strength parameter supported by virtualizer engine. Parameter ID for getParameter().
+ */
+ public static final int PARAM_STRENGTH_SUPPORTED = 0;
+ /**
+ * Virtualizer effect strength. Parameter ID for
+ * {@link android.media.audiofx.Virtualizer.OnParameterChangeListener}
+ */
+ public static final int PARAM_STRENGTH = 1;
+ /**
+ * @hide
+ * Parameter ID to query the virtual speaker angles for a channel mask / device configuration.
+ */
+ public static final int PARAM_VIRTUAL_SPEAKER_ANGLES = 2;
+ /**
+ * @hide
+ * Parameter ID to force the virtualization mode to be that of a specific device
+ */
+ public static final int PARAM_FORCE_VIRTUALIZATION_MODE = 3;
+ /**
+ * @hide
+ * Parameter ID to query the current virtualization mode.
+ */
+ public static final int PARAM_VIRTUALIZATION_MODE = 4;
+
+ /**
+ * Indicates if strength parameter is supported by the virtualizer engine
+ */
+ private boolean mStrengthSupported = false;
+
+ /**
+ * Registered listener for parameter changes.
+ */
+ private OnParameterChangeListener mParamListener = null;
+
+ /**
+ * Listener used internally to to receive raw parameter change event from AudioEffect super class
+ */
+ private BaseParameterListener mBaseParamListener = null;
+
+ /**
+ * Lock for access to mParamListener
+ */
+ private final Object mParamListenerLock = new Object();
+
+ /**
+ * Class constructor.
+ * @param priority the priority level requested by the application for controlling the Virtualizer
+ * engine. As the same engine can be shared by several applications, this parameter indicates
+ * how much the requesting application needs control of effect parameters. The normal priority
+ * is 0, above normal is a positive number, below normal a negative number.
+ * @param audioSession system wide unique audio session identifier. The Virtualizer will
+ * be attached to the MediaPlayer or AudioTrack in the same audio session.
+ *
+ * @throws java.lang.IllegalStateException
+ * @throws java.lang.IllegalArgumentException
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+ public Virtualizer(int priority, int audioSession)
+ throws IllegalStateException, IllegalArgumentException,
+ UnsupportedOperationException, RuntimeException {
+ super(EFFECT_TYPE_VIRTUALIZER, EFFECT_TYPE_NULL, priority, audioSession);
+
+ if (audioSession == 0) {
+ Log.w(TAG, "WARNING: attaching a Virtualizer to global output mix is deprecated!");
+ }
+
+ int[] value = new int[1];
+ checkStatus(getParameter(PARAM_STRENGTH_SUPPORTED, value));
+ mStrengthSupported = (value[0] != 0);
+ }
+
+ /**
+ * Indicates whether setting strength is supported. If this method returns false, only one
+ * strength is supported and the setStrength() method always rounds to that value.
+ * @return true is strength parameter is supported, false otherwise
+ */
+ public boolean getStrengthSupported() {
+ return mStrengthSupported;
+ }
+
+ /**
+ * Sets the strength of the virtualizer effect. If the implementation does not support per mille
+ * accuracy for setting the strength, it is allowed to round the given strength to the nearest
+ * supported value. You can use the {@link #getRoundedStrength()} method to query the
+ * (possibly rounded) value that was actually set.
+ * @param strength strength of the effect. The valid range for strength strength is [0, 1000],
+ * where 0 per mille designates the mildest effect and 1000 per mille designates the strongest.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setStrength(short strength)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_STRENGTH, strength));
+ }
+
+ /**
+ * Gets the current strength of the effect.
+ * @return the strength of the effect. The valid range for strength is [0, 1000], where 0 per
+ * mille designates the mildest effect and 1000 per mille the strongest
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public short getRoundedStrength()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ short[] value = new short[1];
+ checkStatus(getParameter(PARAM_STRENGTH, value));
+ return value[0];
+ }
+
+ /**
+ * Checks if a configuration is supported, and query the virtual speaker angles.
+ * @param inputChannelMask
+ * @param deviceType
+ * @param angles if non-null: array in which the angles will be written. If null, no angles
+ * are returned
+ * @return true if the combination of channel mask and output device type is supported, false
+ * otherwise
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ private boolean getAnglesInt(int inputChannelMask, int deviceType, int[] angles)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ // parameter check
+ if (inputChannelMask == AudioFormat.CHANNEL_INVALID) {
+ throw (new IllegalArgumentException(
+ "Virtualizer: illegal CHANNEL_INVALID channel mask"));
+ }
+ int channelMask = inputChannelMask == AudioFormat.CHANNEL_OUT_DEFAULT ?
+ AudioFormat.CHANNEL_OUT_STEREO : inputChannelMask;
+ int nbChannels = AudioFormat.channelCountFromOutChannelMask(channelMask);
+ if ((angles != null) && (angles.length < (nbChannels * 3))) {
+ Log.e(TAG, "Size of array for angles cannot accomodate number of channels in mask ("
+ + nbChannels + ")");
+ throw (new IllegalArgumentException(
+ "Virtualizer: array for channel / angle pairs is too small: is " + angles.length
+ + ", should be " + (nbChannels * 3)));
+ }
+
+ ByteBuffer paramsConverter = ByteBuffer.allocate(3 /* param + mask + device*/ * 4);
+ paramsConverter.order(ByteOrder.nativeOrder());
+ paramsConverter.putInt(PARAM_VIRTUAL_SPEAKER_ANGLES);
+ // convert channel mask to internal native representation
+ paramsConverter.putInt(AudioFormat.convertChannelOutMaskToNativeMask(channelMask));
+ // convert Java device type to internal representation
+ paramsConverter.putInt(AudioDeviceInfo.convertDeviceTypeToInternalDevice(deviceType));
+ // allocate an array to store the results
+ byte[] result = new byte[nbChannels * 4/*int to byte*/ * 3/*for mask, azimuth, elevation*/];
+
+ // call into the effect framework
+ int status = getParameter(paramsConverter.array(), result);
+ if (DEBUG) {
+ Log.v(TAG, "getAngles(0x" + Integer.toHexString(inputChannelMask) + ", 0x"
+ + Integer.toHexString(deviceType) + ") returns " + status);
+ }
+
+ if (status >= 0) {
+ if (angles != null) {
+ // convert and copy the results
+ ByteBuffer resultConverter = ByteBuffer.wrap(result);
+ resultConverter.order(ByteOrder.nativeOrder());
+ for (int i = 0 ; i < nbChannels ; i++) {
+ // write the channel mask
+ angles[3 * i] = AudioFormat.convertNativeChannelMaskToOutMask(
+ resultConverter.getInt((i * 4 * 3)));
+ // write the azimuth
+ angles[3 * i + 1] = resultConverter.getInt(i * 4 * 3 + 4);
+ // write the elevation
+ angles[3 * i + 2] = resultConverter.getInt(i * 4 * 3 + 8);
+ if (DEBUG) {
+ Log.v(TAG, "channel 0x" + Integer.toHexString(angles[3*i]).toUpperCase()
+ + " at az=" + angles[3*i+1] + "deg"
+ + " elev=" + angles[3*i+2] + "deg");
+ }
+ }
+ }
+ return true;
+ } else if (status == AudioEffect.ERROR_BAD_VALUE) {
+ // a BAD_VALUE return from getParameter indicates the configuration is not supported
+ // don't throw an exception, just return false
+ return false;
+ } else {
+ // something wrong may have happened
+ checkStatus(status);
+ }
+ // unexpected virtualizer behavior
+ Log.e(TAG, "unexpected status code " + status
+ + " after getParameter(PARAM_VIRTUAL_SPEAKER_ANGLES)");
+ return false;
+ }
+
+ /**
+ * A virtualization mode indicating virtualization processing is not active.
+ * See {@link #getVirtualizationMode()} as one of the possible return value.
+ */
+ public static final int VIRTUALIZATION_MODE_OFF = 0;
+
+ /**
+ * A virtualization mode used to indicate the virtualizer effect must stop forcing the
+ * processing to a particular mode in {@link #forceVirtualizationMode(int)}.
+ */
+ public static final int VIRTUALIZATION_MODE_AUTO = 1;
+ /**
+ * A virtualization mode typically used over headphones.
+ * Binaural virtualization describes an audio processing configuration for virtualization
+ * where the left and right channels are respectively reaching the left and right ear of the
+ * user, without also feeding the opposite ear (as is the case when listening over speakers).
+ * <p>Such a mode is therefore meant to be used when audio is playing over stereo wired
+ * headphones or headsets, but also stereo headphones through a wireless A2DP Bluetooth link.
+ * <p>See {@link #canVirtualize(int, int)} to verify this mode is supported by this Virtualizer.
+ */
+ public final static int VIRTUALIZATION_MODE_BINAURAL = 2;
+
+ /**
+ * A virtualization mode typically used over speakers.
+ * Transaural virtualization describes an audio processing configuration that differs from
+ * binaural (as described in {@link #VIRTUALIZATION_MODE_BINAURAL} in that cross-talk is
+ * present, i.e. audio played from the left channel also reaches the right ear of the user,
+ * and vice-versa.
+ * <p>When supported, such a mode is therefore meant to be used when audio is playing over the
+ * built-in stereo speakers of a device, if they are featured.
+ * <p>See {@link #canVirtualize(int, int)} to verify this mode is supported by this Virtualizer.
+ */
+ public final static int VIRTUALIZATION_MODE_TRANSAURAL = 3;
+
+ /** @hide */
+ @IntDef( {
+ VIRTUALIZATION_MODE_BINAURAL,
+ VIRTUALIZATION_MODE_TRANSAURAL
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface VirtualizationMode {}
+
+ /** @hide */
+ @IntDef( {
+ VIRTUALIZATION_MODE_AUTO,
+ VIRTUALIZATION_MODE_BINAURAL,
+ VIRTUALIZATION_MODE_TRANSAURAL
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ForceVirtualizationMode {}
+
+ private static int getDeviceForModeQuery(@VirtualizationMode int virtualizationMode)
+ throws IllegalArgumentException {
+ switch (virtualizationMode) {
+ case VIRTUALIZATION_MODE_BINAURAL:
+ return AudioDeviceInfo.TYPE_WIRED_HEADPHONES;
+ case VIRTUALIZATION_MODE_TRANSAURAL:
+ return AudioDeviceInfo.TYPE_BUILTIN_SPEAKER;
+ default:
+ throw (new IllegalArgumentException(
+ "Virtualizer: illegal virtualization mode " + virtualizationMode));
+ }
+ }
+
+ private static int getDeviceForModeForce(@ForceVirtualizationMode int virtualizationMode)
+ throws IllegalArgumentException {
+ if (virtualizationMode == VIRTUALIZATION_MODE_AUTO) {
+ return AudioDeviceInfo.TYPE_UNKNOWN;
+ } else {
+ return getDeviceForModeQuery(virtualizationMode);
+ }
+ }
+
+ private static int deviceToMode(int deviceType) {
+ switch (deviceType) {
+ case AudioDeviceInfo.TYPE_WIRED_HEADSET:
+ case AudioDeviceInfo.TYPE_WIRED_HEADPHONES:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_SCO:
+ case AudioDeviceInfo.TYPE_BUILTIN_EARPIECE:
+ case AudioDeviceInfo.TYPE_USB_HEADSET:
+ return VIRTUALIZATION_MODE_BINAURAL;
+ case AudioDeviceInfo.TYPE_BUILTIN_SPEAKER:
+ case AudioDeviceInfo.TYPE_LINE_ANALOG:
+ case AudioDeviceInfo.TYPE_LINE_DIGITAL:
+ case AudioDeviceInfo.TYPE_BLUETOOTH_A2DP:
+ case AudioDeviceInfo.TYPE_HDMI:
+ case AudioDeviceInfo.TYPE_HDMI_ARC:
+ case AudioDeviceInfo.TYPE_USB_DEVICE:
+ case AudioDeviceInfo.TYPE_USB_ACCESSORY:
+ case AudioDeviceInfo.TYPE_DOCK:
+ case AudioDeviceInfo.TYPE_FM:
+ case AudioDeviceInfo.TYPE_AUX_LINE:
+ return VIRTUALIZATION_MODE_TRANSAURAL;
+ case AudioDeviceInfo.TYPE_UNKNOWN:
+ default:
+ return VIRTUALIZATION_MODE_OFF;
+ }
+ }
+
+ /**
+ * Checks if the combination of a channel mask and virtualization mode is supported by this
+ * virtualizer.
+ * Some virtualizer implementations may only support binaural processing (i.e. only support
+ * headphone output, see {@link #VIRTUALIZATION_MODE_BINAURAL}), some may support transaural
+ * processing (i.e. for speaker output, see {@link #VIRTUALIZATION_MODE_TRANSAURAL}) for the
+ * built-in speakers. Use this method to query the virtualizer implementation capabilities.
+ * @param inputChannelMask the channel mask of the content to virtualize.
+ * @param virtualizationMode the mode for which virtualization processing is to be performed,
+ * one of {@link #VIRTUALIZATION_MODE_BINAURAL}, {@link #VIRTUALIZATION_MODE_TRANSAURAL}.
+ * @return true if the combination of channel mask and virtualization mode is supported, false
+ * otherwise.
+ * <br>An indication that a certain channel mask is not supported doesn't necessarily mean
+ * you cannot play content with that channel mask, it more likely implies the content will
+ * be downmixed before being virtualized. For instance a virtualizer that only supports a
+ * mask such as {@link AudioFormat#CHANNEL_OUT_STEREO}
+ * will still be able to process content with a mask of
+ * {@link AudioFormat#CHANNEL_OUT_5POINT1}, but will downmix the content to stereo first, and
+ * then will virtualize, as opposed to virtualizing each channel individually.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public boolean canVirtualize(int inputChannelMask, @VirtualizationMode int virtualizationMode)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ return getAnglesInt(inputChannelMask, getDeviceForModeQuery(virtualizationMode), null);
+ }
+
+ /**
+ * Queries the virtual speaker angles (azimuth and elevation) for a combination of a channel
+ * mask and virtualization mode.
+ * If the virtualization configuration (mask and mode) is supported (see
+ * {@link #canVirtualize(int, int)}, the array angles will contain upon return the
+ * definition of each virtual speaker and its azimuth and elevation angles relative to the
+ * listener.
+ * <br>Note that in some virtualizer implementations, the angles may be strength-dependent.
+ * @param inputChannelMask the channel mask of the content to virtualize.
+ * @param virtualizationMode the mode for which virtualization processing is to be performed,
+ * one of {@link #VIRTUALIZATION_MODE_BINAURAL}, {@link #VIRTUALIZATION_MODE_TRANSAURAL}.
+ * @param angles a non-null array whose length is 3 times the number of channels in the channel
+ * mask.
+ * If the method indicates the configuration is supported, the array will contain upon return
+ * triplets of values: for each channel <code>i</code> among the channels of the mask:
+ * <ul>
+ * <li>the element at index <code>3*i</code> in the array contains the speaker
+ * identification (e.g. {@link AudioFormat#CHANNEL_OUT_FRONT_LEFT}),</li>
+ * <li>the element at index <code>3*i+1</code> contains its corresponding azimuth angle
+ * expressed in degrees, where 0 is the direction the listener faces, 180 is behind
+ * the listener, and -90 is to her/his left,</li>
+ * <li>the element at index <code>3*i+2</code> contains its corresponding elevation angle
+ * where +90 is directly above the listener, 0 is the horizontal plane, and -90 is
+ * directly below the listener.</li>
+ * @return true if the combination of channel mask and virtualization mode is supported, false
+ * otherwise.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public boolean getSpeakerAngles(int inputChannelMask,
+ @VirtualizationMode int virtualizationMode, int[] angles)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ if (angles == null) {
+ throw (new IllegalArgumentException(
+ "Virtualizer: illegal null channel / angle array"));
+ }
+
+ return getAnglesInt(inputChannelMask, getDeviceForModeQuery(virtualizationMode), angles);
+ }
+
+ /**
+ * Forces the virtualizer effect to use the given processing mode.
+ * The effect must be enabled for the forced mode to be applied.
+ * @param virtualizationMode one of {@link #VIRTUALIZATION_MODE_BINAURAL},
+ * {@link #VIRTUALIZATION_MODE_TRANSAURAL} to force a particular processing mode, or
+ * {@value #VIRTUALIZATION_MODE_AUTO} to stop forcing a mode.
+ * @return true if the processing mode is supported, and it is successfully set, or
+ * forcing was successfully disabled, false otherwise.
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public boolean forceVirtualizationMode(@ForceVirtualizationMode int virtualizationMode)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ // convert Java device type to internal representation
+ int deviceType = getDeviceForModeForce(virtualizationMode);
+ int internalDevice = AudioDeviceInfo.convertDeviceTypeToInternalDevice(deviceType);
+
+ int status = setParameter(PARAM_FORCE_VIRTUALIZATION_MODE, internalDevice);
+
+ if (status >= 0) {
+ return true;
+ } else if (status == AudioEffect.ERROR_BAD_VALUE) {
+ // a BAD_VALUE return from setParameter indicates the mode can't be forced
+ // don't throw an exception, just return false
+ return false;
+ } else {
+ // something wrong may have happened
+ checkStatus(status);
+ }
+ // unexpected virtualizer behavior
+ Log.e(TAG, "unexpected status code " + status
+ + " after setParameter(PARAM_FORCE_VIRTUALIZATION_MODE)");
+ return false;
+ }
+
+ /**
+ * Return the virtualization mode being used, if any.
+ * @return the virtualization mode being used.
+ * If virtualization is not active, the virtualization mode will be
+ * {@link #VIRTUALIZATION_MODE_OFF}. Otherwise the value will be
+ * {@link #VIRTUALIZATION_MODE_BINAURAL} or {@link #VIRTUALIZATION_MODE_TRANSAURAL}.
+ * Virtualization may not be active either because the effect is not enabled or
+ * because the current output device is not compatible with this virtualization
+ * implementation.
+ * @throws IllegalStateException
+ * @throws UnsupportedOperationException
+ */
+ public int getVirtualizationMode()
+ throws IllegalStateException, UnsupportedOperationException {
+ int[] value = new int[1];
+ int status = getParameter(PARAM_VIRTUALIZATION_MODE, value);
+ if (status >= 0) {
+ return deviceToMode(AudioDeviceInfo.convertInternalDeviceToDeviceType(value[0]));
+ } else if (status == AudioEffect.ERROR_BAD_VALUE) {
+ return VIRTUALIZATION_MODE_OFF;
+ } else {
+ // something wrong may have happened
+ checkStatus(status);
+ }
+ // unexpected virtualizer behavior
+ Log.e(TAG, "unexpected status code " + status
+ + " after getParameter(PARAM_VIRTUALIZATION_MODE)");
+ return VIRTUALIZATION_MODE_OFF;
+ }
+
+ /**
+ * The OnParameterChangeListener interface defines a method called by the Virtualizer when a
+ * parameter value has changed.
+ */
+ public interface OnParameterChangeListener {
+ /**
+ * Method called when a parameter value has changed. The method is called only if the
+ * parameter was changed by another application having the control of the same
+ * Virtualizer engine.
+ * @param effect the Virtualizer on which the interface is registered.
+ * @param status status of the set parameter operation.
+ * @param param ID of the modified parameter. See {@link #PARAM_STRENGTH} ...
+ * @param value the new parameter value.
+ */
+ void onParameterChange(Virtualizer effect, int status, int param, short value);
+ }
+
+ /**
+ * Listener used internally to receive unformatted parameter change events from AudioEffect
+ * super class.
+ */
+ private class BaseParameterListener implements AudioEffect.OnParameterChangeListener {
+ private BaseParameterListener() {
+
+ }
+ public void onParameterChange(AudioEffect effect, int status, byte[] param, byte[] value) {
+ OnParameterChangeListener l = null;
+
+ synchronized (mParamListenerLock) {
+ if (mParamListener != null) {
+ l = mParamListener;
+ }
+ }
+ if (l != null) {
+ int p = -1;
+ short v = -1;
+
+ if (param.length == 4) {
+ p = byteArrayToInt(param, 0);
+ }
+ if (value.length == 2) {
+ v = byteArrayToShort(value, 0);
+ }
+ if (p != -1 && v != -1) {
+ l.onParameterChange(Virtualizer.this, status, p, v);
+ }
+ }
+ }
+ }
+
+ /**
+ * Registers an OnParameterChangeListener interface.
+ * @param listener OnParameterChangeListener interface registered
+ */
+ public void setParameterListener(OnParameterChangeListener listener) {
+ synchronized (mParamListenerLock) {
+ if (mParamListener == null) {
+ mParamListener = listener;
+ mBaseParamListener = new BaseParameterListener();
+ super.setParameterListener(mBaseParamListener);
+ }
+ }
+ }
+
+ /**
+ * The Settings class regroups all virtualizer parameters. It is used in
+ * conjuntion with getProperties() and setProperties() methods to backup and restore
+ * all parameters in a single call.
+ */
+ public static class Settings {
+ public short strength;
+
+ public Settings() {
+ }
+
+ /**
+ * Settings class constructor from a key=value; pairs formatted string. The string is
+ * typically returned by Settings.toString() method.
+ * @throws IllegalArgumentException if the string is not correctly formatted.
+ */
+ public Settings(String settings) {
+ StringTokenizer st = new StringTokenizer(settings, "=;");
+ int tokens = st.countTokens();
+ if (st.countTokens() != 3) {
+ throw new IllegalArgumentException("settings: " + settings);
+ }
+ String key = st.nextToken();
+ if (!key.equals("Virtualizer")) {
+ throw new IllegalArgumentException(
+ "invalid settings for Virtualizer: " + key);
+ }
+ try {
+ key = st.nextToken();
+ if (!key.equals("strength")) {
+ throw new IllegalArgumentException("invalid key name: " + key);
+ }
+ strength = Short.parseShort(st.nextToken());
+ } catch (NumberFormatException nfe) {
+ throw new IllegalArgumentException("invalid value for key: " + key);
+ }
+ }
+
+ @Override
+ public String toString() {
+ String str = new String (
+ "Virtualizer"+
+ ";strength="+Short.toString(strength)
+ );
+ return str;
+ }
+ };
+
+
+ /**
+ * Gets the virtualizer properties. This method is useful when a snapshot of current
+ * virtualizer settings must be saved by the application.
+ * @return a Virtualizer.Settings object containing all current parameters values
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public Virtualizer.Settings getProperties()
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ Settings settings = new Settings();
+ short[] value = new short[1];
+ checkStatus(getParameter(PARAM_STRENGTH, value));
+ settings.strength = value[0];
+ return settings;
+ }
+
+ /**
+ * Sets the virtualizer properties. This method is useful when virtualizer settings have to
+ * be applied from a previous backup.
+ * @param settings a Virtualizer.Settings object containing the properties to apply
+ * @throws IllegalStateException
+ * @throws IllegalArgumentException
+ * @throws UnsupportedOperationException
+ */
+ public void setProperties(Virtualizer.Settings settings)
+ throws IllegalStateException, IllegalArgumentException, UnsupportedOperationException {
+ checkStatus(setParameter(PARAM_STRENGTH, settings.strength));
+ }
+}
diff --git a/android/media/audiofx/Visualizer.java b/android/media/audiofx/Visualizer.java
new file mode 100644
index 00000000..0fe7246e
--- /dev/null
+++ b/android/media/audiofx/Visualizer.java
@@ -0,0 +1,772 @@
+/*
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiofx;
+
+import android.app.ActivityThread;
+import android.util.Log;
+import java.lang.ref.WeakReference;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+
+/**
+ * The Visualizer class enables application to retrieve part of the currently playing audio for
+ * visualization purpose. It is not an audio recording interface and only returns partial and low
+ * quality audio content. However, to protect privacy of certain audio data (e.g voice mail) the use
+ * of the visualizer requires the permission android.permission.RECORD_AUDIO.
+ * <p>The audio session ID passed to the constructor indicates which audio content should be
+ * visualized:<br>
+ * <ul>
+ * <li>If the session is 0, the audio output mix is visualized</li>
+ * <li>If the session is not 0, the audio from a particular {@link android.media.MediaPlayer} or
+ * {@link android.media.AudioTrack}
+ * using this audio session is visualized </li>
+ * </ul>
+ * <p>Two types of representation of audio content can be captured: <br>
+ * <ul>
+ * <li>Waveform data: consecutive 8-bit (unsigned) mono samples by using the
+ * {@link #getWaveForm(byte[])} method</li>
+ * <li>Frequency data: 8-bit magnitude FFT by using the {@link #getFft(byte[])} method</li>
+ * </ul>
+ * <p>The length of the capture can be retrieved or specified by calling respectively
+ * {@link #getCaptureSize()} and {@link #setCaptureSize(int)} methods. The capture size must be a
+ * power of 2 in the range returned by {@link #getCaptureSizeRange()}.
+ * <p>In addition to the polling capture mode described above with {@link #getWaveForm(byte[])} and
+ * {@link #getFft(byte[])} methods, a callback mode is also available by installing a listener by
+ * use of the {@link #setDataCaptureListener(OnDataCaptureListener, int, boolean, boolean)} method.
+ * The rate at which the listener capture method is called as well as the type of data returned is
+ * specified.
+ * <p>Before capturing data, the Visualizer must be enabled by calling the
+ * {@link #setEnabled(boolean)} method.
+ * When data capture is not needed any more, the Visualizer should be disabled.
+ * <p>It is good practice to call the {@link #release()} method when the Visualizer is not used
+ * anymore to free up native resources associated to the Visualizer instance.
+ * <p>Creating a Visualizer on the output mix (audio session 0) requires permission
+ * {@link android.Manifest.permission#MODIFY_AUDIO_SETTINGS}
+ * <p>The Visualizer class can also be used to perform measurements on the audio being played back.
+ * The measurements to perform are defined by setting a mask of the requested measurement modes with
+ * {@link #setMeasurementMode(int)}. Supported values are {@link #MEASUREMENT_MODE_NONE} to cancel
+ * any measurement, and {@link #MEASUREMENT_MODE_PEAK_RMS} for peak and RMS monitoring.
+ * Measurements can be retrieved through {@link #getMeasurementPeakRms(MeasurementPeakRms)}.
+ */
+
+public class Visualizer {
+
+ static {
+ System.loadLibrary("audioeffect_jni");
+ native_init();
+ }
+
+ private final static String TAG = "Visualizer-JAVA";
+
+ /**
+ * State of a Visualizer object that was not successfully initialized upon creation
+ */
+ public static final int STATE_UNINITIALIZED = 0;
+ /**
+ * State of a Visualizer object that is ready to be used.
+ */
+ public static final int STATE_INITIALIZED = 1;
+ /**
+ * State of a Visualizer object that is active.
+ */
+ public static final int STATE_ENABLED = 2;
+
+ // to keep in sync with system/media/audio_effects/include/audio_effects/effect_visualizer.h
+ /**
+ * Defines a capture mode where amplification is applied based on the content of the captured
+ * data. This is the default Visualizer mode, and is suitable for music visualization.
+ */
+ public static final int SCALING_MODE_NORMALIZED = 0;
+ /**
+ * Defines a capture mode where the playback volume will affect (scale) the range of the
+ * captured data. A low playback volume will lead to low sample and fft values, and vice-versa.
+ */
+ public static final int SCALING_MODE_AS_PLAYED = 1;
+
+ /**
+ * Defines a measurement mode in which no measurements are performed.
+ */
+ public static final int MEASUREMENT_MODE_NONE = 0;
+
+ /**
+ * Defines a measurement mode which computes the peak and RMS value in mB, where 0mB is the
+ * maximum sample value, and -9600mB is the minimum value.
+ * Values for peak and RMS can be retrieved with
+ * {@link #getMeasurementPeakRms(MeasurementPeakRms)}.
+ */
+ public static final int MEASUREMENT_MODE_PEAK_RMS = 1 << 0;
+
+ // to keep in sync with frameworks/base/media/jni/audioeffect/android_media_Visualizer.cpp
+ private static final int NATIVE_EVENT_PCM_CAPTURE = 0;
+ private static final int NATIVE_EVENT_FFT_CAPTURE = 1;
+ private static final int NATIVE_EVENT_SERVER_DIED = 2;
+
+ // Error codes:
+ /**
+ * Successful operation.
+ */
+ public static final int SUCCESS = 0;
+ /**
+ * Unspecified error.
+ */
+ public static final int ERROR = -1;
+ /**
+ * Internal operation status. Not returned by any method.
+ */
+ public static final int ALREADY_EXISTS = -2;
+ /**
+ * Operation failed due to bad object initialization.
+ */
+ public static final int ERROR_NO_INIT = -3;
+ /**
+ * Operation failed due to bad parameter value.
+ */
+ public static final int ERROR_BAD_VALUE = -4;
+ /**
+ * Operation failed because it was requested in wrong state.
+ */
+ public static final int ERROR_INVALID_OPERATION = -5;
+ /**
+ * Operation failed due to lack of memory.
+ */
+ public static final int ERROR_NO_MEMORY = -6;
+ /**
+ * Operation failed due to dead remote object.
+ */
+ public static final int ERROR_DEAD_OBJECT = -7;
+
+ //--------------------------------------------------------------------------
+ // Member variables
+ //--------------------
+ /**
+ * Indicates the state of the Visualizer instance
+ */
+ private int mState = STATE_UNINITIALIZED;
+ /**
+ * Lock to synchronize access to mState
+ */
+ private final Object mStateLock = new Object();
+ /**
+ * System wide unique Identifier of the visualizer engine used by this Visualizer instance
+ */
+ private int mId;
+
+ /**
+ * Lock to protect listeners updates against event notifications
+ */
+ private final Object mListenerLock = new Object();
+ /**
+ * Handler for events coming from the native code
+ */
+ private NativeEventHandler mNativeEventHandler = null;
+ /**
+ * PCM and FFT capture listener registered by client
+ */
+ private OnDataCaptureListener mCaptureListener = null;
+ /**
+ * Server Died listener registered by client
+ */
+ private OnServerDiedListener mServerDiedListener = null;
+
+ // accessed by native methods
+ private long mNativeVisualizer;
+ private long mJniData;
+
+ //--------------------------------------------------------------------------
+ // Constructor, Finalize
+ //--------------------
+ /**
+ * Class constructor.
+ * @param audioSession system wide unique audio session identifier. If audioSession
+ * is not 0, the visualizer will be attached to the MediaPlayer or AudioTrack in the
+ * same audio session. Otherwise, the Visualizer will apply to the output mix.
+ *
+ * @throws java.lang.UnsupportedOperationException
+ * @throws java.lang.RuntimeException
+ */
+
+ public Visualizer(int audioSession)
+ throws UnsupportedOperationException, RuntimeException {
+ int[] id = new int[1];
+
+ synchronized (mStateLock) {
+ mState = STATE_UNINITIALIZED;
+ // native initialization
+ int result = native_setup(new WeakReference<Visualizer>(this), audioSession, id,
+ ActivityThread.currentOpPackageName());
+ if (result != SUCCESS && result != ALREADY_EXISTS) {
+ Log.e(TAG, "Error code "+result+" when initializing Visualizer.");
+ switch (result) {
+ case ERROR_INVALID_OPERATION:
+ throw (new UnsupportedOperationException("Effect library not loaded"));
+ default:
+ throw (new RuntimeException("Cannot initialize Visualizer engine, error: "
+ +result));
+ }
+ }
+ mId = id[0];
+ if (native_getEnabled()) {
+ mState = STATE_ENABLED;
+ } else {
+ mState = STATE_INITIALIZED;
+ }
+ }
+ }
+
+ /**
+ * Releases the native Visualizer resources. It is a good practice to release the
+ * visualization engine when not in use.
+ */
+ public void release() {
+ synchronized (mStateLock) {
+ native_release();
+ mState = STATE_UNINITIALIZED;
+ }
+ }
+
+ @Override
+ protected void finalize() {
+ native_finalize();
+ }
+
+ /**
+ * Enable or disable the visualization engine.
+ * @param enabled requested enable state
+ * @return {@link #SUCCESS} in case of success,
+ * {@link #ERROR_INVALID_OPERATION} or {@link #ERROR_DEAD_OBJECT} in case of failure.
+ * @throws IllegalStateException
+ */
+ public int setEnabled(boolean enabled)
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState == STATE_UNINITIALIZED) {
+ throw(new IllegalStateException("setEnabled() called in wrong state: "+mState));
+ }
+ int status = SUCCESS;
+ if ((enabled && (mState == STATE_INITIALIZED)) ||
+ (!enabled && (mState == STATE_ENABLED))) {
+ status = native_setEnabled(enabled);
+ if (status == SUCCESS) {
+ mState = enabled ? STATE_ENABLED : STATE_INITIALIZED;
+ }
+ }
+ return status;
+ }
+ }
+
+ /**
+ * Get current activation state of the visualizer.
+ * @return true if the visualizer is active, false otherwise
+ */
+ public boolean getEnabled()
+ {
+ synchronized (mStateLock) {
+ if (mState == STATE_UNINITIALIZED) {
+ throw(new IllegalStateException("getEnabled() called in wrong state: "+mState));
+ }
+ return native_getEnabled();
+ }
+ }
+
+ /**
+ * Returns the capture size range.
+ * @return the mininum capture size is returned in first array element and the maximum in second
+ * array element.
+ */
+ public static native int[] getCaptureSizeRange();
+
+ /**
+ * Returns the maximum capture rate for the callback capture method. This is the maximum value
+ * for the rate parameter of the
+ * {@link #setDataCaptureListener(OnDataCaptureListener, int, boolean, boolean)} method.
+ * @return the maximum capture rate expressed in milliHertz
+ */
+ public static native int getMaxCaptureRate();
+
+ /**
+ * Sets the capture size, i.e. the number of bytes returned by {@link #getWaveForm(byte[])} and
+ * {@link #getFft(byte[])} methods. The capture size must be a power of 2 in the range returned
+ * by {@link #getCaptureSizeRange()}.
+ * This method must not be called when the Visualizer is enabled.
+ * @param size requested capture size
+ * @return {@link #SUCCESS} in case of success,
+ * {@link #ERROR_BAD_VALUE} in case of failure.
+ * @throws IllegalStateException
+ */
+ public int setCaptureSize(int size)
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState != STATE_INITIALIZED) {
+ throw(new IllegalStateException("setCaptureSize() called in wrong state: "+mState));
+ }
+ return native_setCaptureSize(size);
+ }
+ }
+
+ /**
+ * Returns current capture size.
+ * @return the capture size in bytes.
+ */
+ public int getCaptureSize()
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState == STATE_UNINITIALIZED) {
+ throw(new IllegalStateException("getCaptureSize() called in wrong state: "+mState));
+ }
+ return native_getCaptureSize();
+ }
+ }
+
+ /**
+ * Set the type of scaling applied on the captured visualization data.
+ * @param mode see {@link #SCALING_MODE_NORMALIZED}
+ * and {@link #SCALING_MODE_AS_PLAYED}
+ * @return {@link #SUCCESS} in case of success,
+ * {@link #ERROR_BAD_VALUE} in case of failure.
+ * @throws IllegalStateException
+ */
+ public int setScalingMode(int mode)
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState == STATE_UNINITIALIZED) {
+ throw(new IllegalStateException("setScalingMode() called in wrong state: "
+ + mState));
+ }
+ return native_setScalingMode(mode);
+ }
+ }
+
+ /**
+ * Returns the current scaling mode on the captured visualization data.
+ * @return the scaling mode, see {@link #SCALING_MODE_NORMALIZED}
+ * and {@link #SCALING_MODE_AS_PLAYED}.
+ * @throws IllegalStateException
+ */
+ public int getScalingMode()
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState == STATE_UNINITIALIZED) {
+ throw(new IllegalStateException("getScalingMode() called in wrong state: "
+ + mState));
+ }
+ return native_getScalingMode();
+ }
+ }
+
+ /**
+ * Sets the combination of measurement modes to be performed by this audio effect.
+ * @param mode a mask of the measurements to perform. The valid values are
+ * {@link #MEASUREMENT_MODE_NONE} (to cancel any measurement)
+ * or {@link #MEASUREMENT_MODE_PEAK_RMS}.
+ * @return {@link #SUCCESS} in case of success, {@link #ERROR_BAD_VALUE} in case of failure.
+ * @throws IllegalStateException
+ */
+ public int setMeasurementMode(int mode)
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState == STATE_UNINITIALIZED) {
+ throw(new IllegalStateException("setMeasurementMode() called in wrong state: "
+ + mState));
+ }
+ return native_setMeasurementMode(mode);
+ }
+ }
+
+ /**
+ * Returns the current measurement modes performed by this audio effect
+ * @return the mask of the measurements,
+ * {@link #MEASUREMENT_MODE_NONE} (when no measurements are performed)
+ * or {@link #MEASUREMENT_MODE_PEAK_RMS}.
+ * @throws IllegalStateException
+ */
+ public int getMeasurementMode()
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState == STATE_UNINITIALIZED) {
+ throw(new IllegalStateException("getMeasurementMode() called in wrong state: "
+ + mState));
+ }
+ return native_getMeasurementMode();
+ }
+ }
+
+ /**
+ * Returns the sampling rate of the captured audio.
+ * @return the sampling rate in milliHertz.
+ */
+ public int getSamplingRate()
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState == STATE_UNINITIALIZED) {
+ throw(new IllegalStateException("getSamplingRate() called in wrong state: "+mState));
+ }
+ return native_getSamplingRate();
+ }
+ }
+
+ /**
+ * Returns a waveform capture of currently playing audio content. The capture consists in
+ * a number of consecutive 8-bit (unsigned) mono PCM samples equal to the capture size returned
+ * by {@link #getCaptureSize()}.
+ * <p>This method must be called when the Visualizer is enabled.
+ * @param waveform array of bytes where the waveform should be returned
+ * @return {@link #SUCCESS} in case of success,
+ * {@link #ERROR_NO_MEMORY}, {@link #ERROR_INVALID_OPERATION} or {@link #ERROR_DEAD_OBJECT}
+ * in case of failure.
+ * @throws IllegalStateException
+ */
+ public int getWaveForm(byte[] waveform)
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState != STATE_ENABLED) {
+ throw(new IllegalStateException("getWaveForm() called in wrong state: "+mState));
+ }
+ return native_getWaveForm(waveform);
+ }
+ }
+ /**
+ * Returns a frequency capture of currently playing audio content.
+ * <p>This method must be called when the Visualizer is enabled.
+ * <p>The capture is an 8-bit magnitude FFT, the frequency range covered being 0 (DC) to half of
+ * the sampling rate returned by {@link #getSamplingRate()}. The capture returns the real and
+ * imaginary parts of a number of frequency points equal to half of the capture size plus one.
+ * <p>Note: only the real part is returned for the first point (DC) and the last point
+ * (sampling frequency / 2).
+ * <p>The layout in the returned byte array is as follows:
+ * <ul>
+ * <li> n is the capture size returned by getCaptureSize()</li>
+ * <li> Rfk, Ifk are respectively the real and imaginary parts of the kth frequency
+ * component</li>
+ * <li> If Fs is the sampling frequency retuned by getSamplingRate() the kth frequency is:
+ * (k*Fs)/(n/2) </li>
+ * </ul>
+ * <table border="0" cellspacing="0" cellpadding="0">
+ * <tr><td>Index </p></td>
+ * <td>0 </p></td>
+ * <td>1 </p></td>
+ * <td>2 </p></td>
+ * <td>3 </p></td>
+ * <td>4 </p></td>
+ * <td>5 </p></td>
+ * <td>... </p></td>
+ * <td>n - 2 </p></td>
+ * <td>n - 1 </p></td></tr>
+ * <tr><td>Data </p></td>
+ * <td>Rf0 </p></td>
+ * <td>Rf(n/2) </p></td>
+ * <td>Rf1 </p></td>
+ * <td>If1 </p></td>
+ * <td>Rf2 </p></td>
+ * <td>If2 </p></td>
+ * <td>... </p></td>
+ * <td>Rf(n-1)/2 </p></td>
+ * <td>If(n-1)/2 </p></td></tr>
+ * </table>
+ * @param fft array of bytes where the FFT should be returned
+ * @return {@link #SUCCESS} in case of success,
+ * {@link #ERROR_NO_MEMORY}, {@link #ERROR_INVALID_OPERATION} or {@link #ERROR_DEAD_OBJECT}
+ * in case of failure.
+ * @throws IllegalStateException
+ */
+ public int getFft(byte[] fft)
+ throws IllegalStateException {
+ synchronized (mStateLock) {
+ if (mState != STATE_ENABLED) {
+ throw(new IllegalStateException("getFft() called in wrong state: "+mState));
+ }
+ return native_getFft(fft);
+ }
+ }
+
+ /**
+ * A class to store peak and RMS values.
+ * Peak and RMS are expressed in mB, as described in the
+ * {@link Visualizer#MEASUREMENT_MODE_PEAK_RMS} measurement mode.
+ */
+ public static final class MeasurementPeakRms {
+ /**
+ * The peak value in mB.
+ */
+ public int mPeak;
+ /**
+ * The RMS value in mB.
+ */
+ public int mRms;
+ }
+
+ /**
+ * Retrieves the latest peak and RMS measurement.
+ * Sets the peak and RMS fields of the supplied {@link Visualizer.MeasurementPeakRms} to the
+ * latest measured values.
+ * @param measurement a non-null {@link Visualizer.MeasurementPeakRms} instance to store
+ * the measurement values.
+ * @return {@link #SUCCESS} in case of success, {@link #ERROR_BAD_VALUE},
+ * {@link #ERROR_NO_MEMORY}, {@link #ERROR_INVALID_OPERATION} or {@link #ERROR_DEAD_OBJECT}
+ * in case of failure.
+ */
+ public int getMeasurementPeakRms(MeasurementPeakRms measurement) {
+ if (measurement == null) {
+ Log.e(TAG, "Cannot store measurements in a null object");
+ return ERROR_BAD_VALUE;
+ }
+ synchronized (mStateLock) {
+ if (mState != STATE_ENABLED) {
+ throw (new IllegalStateException("getMeasurementPeakRms() called in wrong state: "
+ + mState));
+ }
+ return native_getPeakRms(measurement);
+ }
+ }
+
+ //---------------------------------------------------------
+ // Interface definitions
+ //--------------------
+ /**
+ * The OnDataCaptureListener interface defines methods called by the Visualizer to periodically
+ * update the audio visualization capture.
+ * The client application can implement this interface and register the listener with the
+ * {@link #setDataCaptureListener(OnDataCaptureListener, int, boolean, boolean)} method.
+ */
+ public interface OnDataCaptureListener {
+ /**
+ * Method called when a new waveform capture is available.
+ * <p>Data in the waveform buffer is valid only within the scope of the callback.
+ * Applications which needs access to the waveform data after returning from the callback
+ * should make a copy of the data instead of holding a reference.
+ * @param visualizer Visualizer object on which the listener is registered.
+ * @param waveform array of bytes containing the waveform representation.
+ * @param samplingRate sampling rate of the audio visualized.
+ */
+ void onWaveFormDataCapture(Visualizer visualizer, byte[] waveform, int samplingRate);
+
+ /**
+ * Method called when a new frequency capture is available.
+ * <p>Data in the fft buffer is valid only within the scope of the callback.
+ * Applications which needs access to the fft data after returning from the callback
+ * should make a copy of the data instead of holding a reference.
+ * @param visualizer Visualizer object on which the listener is registered.
+ * @param fft array of bytes containing the frequency representation.
+ * @param samplingRate sampling rate of the audio visualized.
+ */
+ void onFftDataCapture(Visualizer visualizer, byte[] fft, int samplingRate);
+ }
+
+ /**
+ * Registers an OnDataCaptureListener interface and specifies the rate at which the capture
+ * should be updated as well as the type of capture requested.
+ * <p>Call this method with a null listener to stop receiving the capture updates.
+ * @param listener OnDataCaptureListener registered
+ * @param rate rate in milliHertz at which the capture should be updated
+ * @param waveform true if a waveform capture is requested: the onWaveFormDataCapture()
+ * method will be called on the OnDataCaptureListener interface.
+ * @param fft true if a frequency capture is requested: the onFftDataCapture() method will be
+ * called on the OnDataCaptureListener interface.
+ * @return {@link #SUCCESS} in case of success,
+ * {@link #ERROR_NO_INIT} or {@link #ERROR_BAD_VALUE} in case of failure.
+ */
+ public int setDataCaptureListener(OnDataCaptureListener listener,
+ int rate, boolean waveform, boolean fft) {
+ synchronized (mListenerLock) {
+ mCaptureListener = listener;
+ }
+ if (listener == null) {
+ // make sure capture callback is stopped in native code
+ waveform = false;
+ fft = false;
+ }
+ int status = native_setPeriodicCapture(rate, waveform, fft);
+ if (status == SUCCESS) {
+ if ((listener != null) && (mNativeEventHandler == null)) {
+ Looper looper;
+ if ((looper = Looper.myLooper()) != null) {
+ mNativeEventHandler = new NativeEventHandler(this, looper);
+ } else if ((looper = Looper.getMainLooper()) != null) {
+ mNativeEventHandler = new NativeEventHandler(this, looper);
+ } else {
+ mNativeEventHandler = null;
+ status = ERROR_NO_INIT;
+ }
+ }
+ }
+ return status;
+ }
+
+ /**
+ * @hide
+ *
+ * The OnServerDiedListener interface defines a method called by the Visualizer to indicate that
+ * the connection to the native media server has been broken and that the Visualizer object will
+ * need to be released and re-created.
+ * The client application can implement this interface and register the listener with the
+ * {@link #setServerDiedListener(OnServerDiedListener)} method.
+ */
+ public interface OnServerDiedListener {
+ /**
+ * @hide
+ *
+ * Method called when the native media server has died.
+ * <p>If the native media server encounters a fatal error and needs to restart, the binder
+ * connection from the {@link #Visualizer} to the media server will be broken. Data capture
+ * callbacks will stop happening, and client initiated calls to the {@link #Visualizer}
+ * instance will fail with the error code {@link #DEAD_OBJECT}. To restore functionality,
+ * clients should {@link #release()} their old visualizer and create a new instance.
+ */
+ void onServerDied();
+ }
+
+ /**
+ * @hide
+ *
+ * Registers an OnServerDiedListener interface.
+ * <p>Call this method with a null listener to stop receiving server death notifications.
+ * @return {@link #SUCCESS} in case of success,
+ */
+ public int setServerDiedListener(OnServerDiedListener listener) {
+ synchronized (mListenerLock) {
+ mServerDiedListener = listener;
+ }
+ return SUCCESS;
+ }
+
+ /**
+ * Helper class to handle the forwarding of native events to the appropriate listeners
+ */
+ private class NativeEventHandler extends Handler
+ {
+ private Visualizer mVisualizer;
+
+ public NativeEventHandler(Visualizer v, Looper looper) {
+ super(looper);
+ mVisualizer = v;
+ }
+
+ private void handleCaptureMessage(Message msg) {
+ OnDataCaptureListener l = null;
+ synchronized (mListenerLock) {
+ l = mVisualizer.mCaptureListener;
+ }
+
+ if (l != null) {
+ byte[] data = (byte[])msg.obj;
+ int samplingRate = msg.arg1;
+
+ switch(msg.what) {
+ case NATIVE_EVENT_PCM_CAPTURE:
+ l.onWaveFormDataCapture(mVisualizer, data, samplingRate);
+ break;
+ case NATIVE_EVENT_FFT_CAPTURE:
+ l.onFftDataCapture(mVisualizer, data, samplingRate);
+ break;
+ default:
+ Log.e(TAG,"Unknown native event in handleCaptureMessge: "+msg.what);
+ break;
+ }
+ }
+ }
+
+ private void handleServerDiedMessage(Message msg) {
+ OnServerDiedListener l = null;
+ synchronized (mListenerLock) {
+ l = mVisualizer.mServerDiedListener;
+ }
+
+ if (l != null)
+ l.onServerDied();
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (mVisualizer == null) {
+ return;
+ }
+
+ switch(msg.what) {
+ case NATIVE_EVENT_PCM_CAPTURE:
+ case NATIVE_EVENT_FFT_CAPTURE:
+ handleCaptureMessage(msg);
+ break;
+ case NATIVE_EVENT_SERVER_DIED:
+ handleServerDiedMessage(msg);
+ break;
+ default:
+ Log.e(TAG,"Unknown native event: "+msg.what);
+ break;
+ }
+ }
+ }
+
+ //---------------------------------------------------------
+ // Interface definitions
+ //--------------------
+
+ private static native final void native_init();
+
+ private native final int native_setup(Object audioeffect_this,
+ int audioSession,
+ int[] id,
+ String opPackageName);
+
+ private native final void native_finalize();
+
+ private native final void native_release();
+
+ private native final int native_setEnabled(boolean enabled);
+
+ private native final boolean native_getEnabled();
+
+ private native final int native_setCaptureSize(int size);
+
+ private native final int native_getCaptureSize();
+
+ private native final int native_setScalingMode(int mode);
+
+ private native final int native_getScalingMode();
+
+ private native final int native_setMeasurementMode(int mode);
+
+ private native final int native_getMeasurementMode();
+
+ private native final int native_getSamplingRate();
+
+ private native final int native_getWaveForm(byte[] waveform);
+
+ private native final int native_getFft(byte[] fft);
+
+ private native final int native_getPeakRms(MeasurementPeakRms measurement);
+
+ private native final int native_setPeriodicCapture(int rate, boolean waveForm, boolean fft);
+
+ //---------------------------------------------------------
+ // Java methods called from the native side
+ //--------------------
+ @SuppressWarnings("unused")
+ private static void postEventFromNative(Object effect_ref,
+ int what, int arg1, int arg2, Object obj) {
+ Visualizer visu = (Visualizer)((WeakReference)effect_ref).get();
+ if (visu == null) {
+ return;
+ }
+
+ if (visu.mNativeEventHandler != null) {
+ Message m = visu.mNativeEventHandler.obtainMessage(what, arg1, arg2, obj);
+ visu.mNativeEventHandler.sendMessage(m);
+ }
+
+ }
+}
+
diff --git a/android/media/audiopolicy/AudioMix.java b/android/media/audiopolicy/AudioMix.java
new file mode 100644
index 00000000..adeb8348
--- /dev/null
+++ b/android/media/audiopolicy/AudioMix.java
@@ -0,0 +1,375 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiopolicy;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.media.AudioDeviceInfo;
+import android.media.AudioFormat;
+import android.media.AudioSystem;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.Objects;
+
+/**
+ * @hide
+ */
+@SystemApi
+public class AudioMix {
+
+ private AudioMixingRule mRule;
+ private AudioFormat mFormat;
+ private int mRouteFlags;
+ private int mMixType = MIX_TYPE_INVALID;
+
+ // written by AudioPolicy
+ int mMixState = MIX_STATE_DISABLED;
+ int mCallbackFlags;
+ String mDeviceAddress;
+
+ // initialized in constructor, read by AudioPolicyConfig
+ final int mDeviceSystemType; // an AudioSystem.DEVICE_* value, not AudioDeviceInfo.TYPE_*
+
+ /**
+ * All parameters are guaranteed valid through the Builder.
+ */
+ private AudioMix(AudioMixingRule rule, AudioFormat format, int routeFlags, int callbackFlags,
+ int deviceType, String deviceAddress) {
+ mRule = rule;
+ mFormat = format;
+ mRouteFlags = routeFlags;
+ mMixType = rule.getTargetMixType();
+ mCallbackFlags = callbackFlags;
+ mDeviceSystemType = deviceType;
+ mDeviceAddress = (deviceAddress == null) ? new String("") : deviceAddress;
+ }
+
+ // CALLBACK_FLAG_* values: keep in sync with AudioMix::kCbFlag* values defined
+ // in frameworks/av/include/media/AudioPolicy.h
+ /** @hide */
+ public final static int CALLBACK_FLAG_NOTIFY_ACTIVITY = 0x1;
+ // when adding new MIX_FLAG_* flags, add them to this mask of authorized masks:
+ private final static int CALLBACK_FLAGS_ALL = CALLBACK_FLAG_NOTIFY_ACTIVITY;
+
+ // ROUTE_FLAG_* values: keep in sync with MIX_ROUTE_FLAG_* values defined
+ // in frameworks/av/include/media/AudioPolicy.h
+ /**
+ * An audio mix behavior where the output of the mix is sent to the original destination of
+ * the audio signal, i.e. an output device for an output mix, or a recording for an input mix.
+ */
+ @SystemApi
+ public static final int ROUTE_FLAG_RENDER = 0x1;
+ /**
+ * An audio mix behavior where the output of the mix is rerouted back to the framework and
+ * is accessible for injection or capture through the {@link AudioTrack} and {@link AudioRecord}
+ * APIs.
+ */
+ @SystemApi
+ public static final int ROUTE_FLAG_LOOP_BACK = 0x1 << 1;
+
+ private static final int ROUTE_FLAG_SUPPORTED = ROUTE_FLAG_RENDER | ROUTE_FLAG_LOOP_BACK;
+
+ // MIX_TYPE_* values to keep in sync with frameworks/av/include/media/AudioPolicy.h
+ /**
+ * @hide
+ * Invalid mix type, default value.
+ */
+ public static final int MIX_TYPE_INVALID = -1;
+ /**
+ * @hide
+ * Mix type indicating playback streams are mixed.
+ */
+ public static final int MIX_TYPE_PLAYERS = 0;
+ /**
+ * @hide
+ * Mix type indicating recording streams are mixed.
+ */
+ public static final int MIX_TYPE_RECORDERS = 1;
+
+
+ // MIX_STATE_* values to keep in sync with frameworks/av/include/media/AudioPolicy.h
+ /**
+ * @hide
+ * State of a mix before its policy is enabled.
+ */
+ @SystemApi
+ public static final int MIX_STATE_DISABLED = -1;
+ /**
+ * @hide
+ * State of a mix when there is no audio to mix.
+ */
+ @SystemApi
+ public static final int MIX_STATE_IDLE = 0;
+ /**
+ * @hide
+ * State of a mix that is actively mixing audio.
+ */
+ @SystemApi
+ public static final int MIX_STATE_MIXING = 1;
+
+ /**
+ * @hide
+ * The current mixing state.
+ * @return one of {@link #MIX_STATE_DISABLED}, {@link #MIX_STATE_IDLE},
+ * {@link #MIX_STATE_MIXING}.
+ */
+ @SystemApi
+ public int getMixState() {
+ return mMixState;
+ }
+
+
+ int getRouteFlags() {
+ return mRouteFlags;
+ }
+
+ AudioFormat getFormat() {
+ return mFormat;
+ }
+
+ AudioMixingRule getRule() {
+ return mRule;
+ }
+
+ /** @hide */
+ public int getMixType() {
+ return mMixType;
+ }
+
+ void setRegistration(String regId) {
+ mDeviceAddress = regId;
+ }
+
+ /** @hide */
+ public String getRegistration() {
+ return mDeviceAddress;
+ }
+
+ /** @hide */
+ @Override
+ public int hashCode() {
+ return Objects.hash(mRouteFlags, mRule, mMixType, mFormat);
+ }
+
+ /** @hide */
+ @IntDef(flag = true,
+ value = { ROUTE_FLAG_RENDER, ROUTE_FLAG_LOOP_BACK } )
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface RouteFlags {}
+
+ /**
+ * Builder class for {@link AudioMix} objects
+ *
+ */
+ @SystemApi
+ public static class Builder {
+ private AudioMixingRule mRule = null;
+ private AudioFormat mFormat = null;
+ private int mRouteFlags = 0;
+ private int mCallbackFlags = 0;
+ // an AudioSystem.DEVICE_* value, not AudioDeviceInfo.TYPE_*
+ private int mDeviceSystemType = AudioSystem.DEVICE_NONE;
+ private String mDeviceAddress = null;
+
+ /**
+ * @hide
+ * Only used by AudioPolicyConfig, not a public API.
+ */
+ Builder() { }
+
+ /**
+ * Construct an instance for the given {@link AudioMixingRule}.
+ * @param rule a non-null {@link AudioMixingRule} instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder(AudioMixingRule rule)
+ throws IllegalArgumentException {
+ if (rule == null) {
+ throw new IllegalArgumentException("Illegal null AudioMixingRule argument");
+ }
+ mRule = rule;
+ }
+
+ /**
+ * @hide
+ * Only used by AudioPolicyConfig, not a public API.
+ * @param rule
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ Builder setMixingRule(AudioMixingRule rule)
+ throws IllegalArgumentException {
+ if (rule == null) {
+ throw new IllegalArgumentException("Illegal null AudioMixingRule argument");
+ }
+ mRule = rule;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Only used by AudioPolicyConfig, not a public API.
+ * @param callbackFlags which callbacks are called from native
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ Builder setCallbackFlags(int flags) throws IllegalArgumentException {
+ if ((flags != 0) && ((flags & CALLBACK_FLAGS_ALL) == 0)) {
+ throw new IllegalArgumentException("Illegal callback flags 0x"
+ + Integer.toHexString(flags).toUpperCase());
+ }
+ mCallbackFlags = flags;
+ return this;
+ }
+
+ /**
+ * @hide
+ * Only used by AudioPolicyConfig, not a public API.
+ * @param deviceType an AudioSystem.DEVICE_* value, not AudioDeviceInfo.TYPE_*
+ * @param address
+ * @return the same Builder instance.
+ */
+ Builder setDevice(int deviceType, String address) {
+ mDeviceSystemType = deviceType;
+ mDeviceAddress = address;
+ return this;
+ }
+
+ /**
+ * Sets the {@link AudioFormat} for the mix.
+ * @param format a non-null {@link AudioFormat} instance.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setFormat(AudioFormat format)
+ throws IllegalArgumentException {
+ if (format == null) {
+ throw new IllegalArgumentException("Illegal null AudioFormat argument");
+ }
+ mFormat = format;
+ return this;
+ }
+
+ /**
+ * Sets the routing behavior for the mix. If not set, routing behavior will default to
+ * {@link AudioMix#ROUTE_FLAG_LOOP_BACK}.
+ * @param routeFlags one of {@link AudioMix#ROUTE_FLAG_LOOP_BACK},
+ * {@link AudioMix#ROUTE_FLAG_RENDER}
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setRouteFlags(@RouteFlags int routeFlags)
+ throws IllegalArgumentException {
+ if (routeFlags == 0) {
+ throw new IllegalArgumentException("Illegal empty route flags");
+ }
+ if ((routeFlags & ROUTE_FLAG_SUPPORTED) == 0) {
+ throw new IllegalArgumentException("Invalid route flags 0x"
+ + Integer.toHexString(routeFlags) + "when configuring an AudioMix");
+ }
+ if ((routeFlags & ~ROUTE_FLAG_SUPPORTED) != 0) {
+ throw new IllegalArgumentException("Unknown route flags 0x"
+ + Integer.toHexString(routeFlags) + "when configuring an AudioMix");
+ }
+ mRouteFlags = routeFlags;
+ return this;
+ }
+
+ /**
+ * Sets the audio device used for playback. Cannot be used in the context of an audio
+ * policy used to inject audio to be recorded, or in a mix whose route flags doesn't
+ * specify {@link AudioMix#ROUTE_FLAG_RENDER}.
+ * @param device a non-null AudioDeviceInfo describing the audio device to play the output
+ * of this mix.
+ * @return the same Builder instance
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setDevice(@NonNull AudioDeviceInfo device) throws IllegalArgumentException {
+ if (device == null) {
+ throw new IllegalArgumentException("Illegal null AudioDeviceInfo argument");
+ }
+ if (!device.isSink()) {
+ throw new IllegalArgumentException("Unsupported device type on mix, not a sink");
+ }
+ mDeviceSystemType = AudioDeviceInfo.convertDeviceTypeToInternalDevice(device.getType());
+ mDeviceAddress = device.getAddress();
+ return this;
+ }
+
+ /**
+ * Combines all of the settings and return a new {@link AudioMix} object.
+ * @return a new {@link AudioMix} object
+ * @throws IllegalArgumentException if no {@link AudioMixingRule} has been set.
+ */
+ @SystemApi
+ public AudioMix build() throws IllegalArgumentException {
+ if (mRule == null) {
+ throw new IllegalArgumentException("Illegal null AudioMixingRule");
+ }
+ if (mRouteFlags == 0) {
+ // no route flags set, use default as described in Builder.setRouteFlags(int)
+ mRouteFlags = ROUTE_FLAG_LOOP_BACK;
+ }
+ // can't do loop back AND render at same time in this implementation
+ if (mRouteFlags == (ROUTE_FLAG_RENDER | ROUTE_FLAG_LOOP_BACK)) {
+ throw new IllegalArgumentException("Unsupported route behavior combination 0x" +
+ Integer.toHexString(mRouteFlags));
+ }
+ if (mFormat == null) {
+ // FIXME Can we eliminate this? Will AudioMix work with an unspecified sample rate?
+ int rate = AudioSystem.getPrimaryOutputSamplingRate();
+ if (rate <= 0) {
+ rate = 44100;
+ }
+ mFormat = new AudioFormat.Builder().setSampleRate(rate).build();
+ }
+ if ((mDeviceSystemType != AudioSystem.DEVICE_NONE)
+ && (mDeviceSystemType != AudioSystem.DEVICE_OUT_REMOTE_SUBMIX)
+ && (mDeviceSystemType != AudioSystem.DEVICE_IN_REMOTE_SUBMIX)) {
+ if ((mRouteFlags & ROUTE_FLAG_RENDER) == 0) {
+ throw new IllegalArgumentException(
+ "Can't have audio device without flag ROUTE_FLAG_RENDER");
+ }
+ if (mRule.getTargetMixType() != AudioMix.MIX_TYPE_PLAYERS) {
+ throw new IllegalArgumentException("Unsupported device on non-playback mix");
+ }
+ } else {
+ if ((mRouteFlags & ROUTE_FLAG_RENDER) == ROUTE_FLAG_RENDER) {
+ throw new IllegalArgumentException(
+ "Can't have flag ROUTE_FLAG_RENDER without an audio device");
+ }
+ if ((mRouteFlags & ROUTE_FLAG_SUPPORTED) == ROUTE_FLAG_LOOP_BACK) {
+ if (mRule.getTargetMixType() == MIX_TYPE_PLAYERS) {
+ mDeviceSystemType = AudioSystem.DEVICE_OUT_REMOTE_SUBMIX;
+ } else if (mRule.getTargetMixType() == MIX_TYPE_RECORDERS) {
+ mDeviceSystemType = AudioSystem.DEVICE_IN_REMOTE_SUBMIX;
+ } else {
+ throw new IllegalArgumentException("Unknown mixing rule type");
+ }
+ }
+ }
+ return new AudioMix(mRule, mFormat, mRouteFlags, mCallbackFlags, mDeviceSystemType,
+ mDeviceAddress);
+ }
+ }
+}
diff --git a/android/media/audiopolicy/AudioMixingRule.java b/android/media/audiopolicy/AudioMixingRule.java
new file mode 100644
index 00000000..5f127421
--- /dev/null
+++ b/android/media/audiopolicy/AudioMixingRule.java
@@ -0,0 +1,482 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiopolicy;
+
+import android.annotation.SystemApi;
+import android.media.AudioAttributes;
+import android.os.Parcel;
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.Objects;
+
+
+/**
+ * @hide
+ *
+ * Here's an example of creating a mixing rule for all media playback:
+ * <pre>
+ * AudioAttributes mediaAttr = new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_MEDIA)
+ * .build();
+ * AudioMixingRule mediaRule = new AudioMixingRule.Builder()
+ * .addRule(mediaAttr, AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE)
+ * .build();
+ * </pre>
+ */
+@SystemApi
+public class AudioMixingRule {
+
+ private AudioMixingRule(int mixType, ArrayList<AudioMixMatchCriterion> criteria) {
+ mCriteria = criteria;
+ mTargetMixType = mixType;
+ }
+
+ /**
+ * A rule requiring the usage information of the {@link AudioAttributes} to match.
+ * This mixing rule can be added with {@link Builder#addRule(AudioAttributes, int)} or
+ * {@link Builder#addMixRule(int, Object)} where the Object parameter is an instance of
+ * {@link AudioAttributes}.
+ */
+ @SystemApi
+ public static final int RULE_MATCH_ATTRIBUTE_USAGE = 0x1;
+ /**
+ * A rule requiring the capture preset information of the {@link AudioAttributes} to match.
+ * This mixing rule can be added with {@link Builder#addRule(AudioAttributes, int)} or
+ * {@link Builder#addMixRule(int, Object)} where the Object parameter is an instance of
+ * {@link AudioAttributes}.
+ */
+ @SystemApi
+ public static final int RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET = 0x1 << 1;
+ /**
+ * A rule requiring the UID of the audio stream to match that specified.
+ * This mixing rule can be added with {@link Builder#addMixRule(int, Object)} where the Object
+ * parameter is an instance of {@link java.lang.Integer}.
+ */
+ @SystemApi
+ public static final int RULE_MATCH_UID = 0x1 << 2;
+
+ private final static int RULE_EXCLUSION_MASK = 0x8000;
+ /**
+ * @hide
+ * A rule requiring the usage information of the {@link AudioAttributes} to differ.
+ */
+ public static final int RULE_EXCLUDE_ATTRIBUTE_USAGE =
+ RULE_EXCLUSION_MASK | RULE_MATCH_ATTRIBUTE_USAGE;
+ /**
+ * @hide
+ * A rule requiring the capture preset information of the {@link AudioAttributes} to differ.
+ */
+ public static final int RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET =
+ RULE_EXCLUSION_MASK | RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET;
+ /**
+ * @hide
+ * A rule requiring the UID information to differ.
+ */
+ public static final int RULE_EXCLUDE_UID =
+ RULE_EXCLUSION_MASK | RULE_MATCH_UID;
+
+ static final class AudioMixMatchCriterion {
+ final AudioAttributes mAttr;
+ final int mIntProp;
+ final int mRule;
+
+ /** input parameters must be valid */
+ AudioMixMatchCriterion(AudioAttributes attributes, int rule) {
+ mAttr = attributes;
+ mIntProp = Integer.MIN_VALUE;
+ mRule = rule;
+ }
+ /** input parameters must be valid */
+ AudioMixMatchCriterion(Integer intProp, int rule) {
+ mAttr = null;
+ mIntProp = intProp.intValue();
+ mRule = rule;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mAttr, mIntProp, mRule);
+ }
+
+ void writeToParcel(Parcel dest) {
+ dest.writeInt(mRule);
+ final int match_rule = mRule & ~RULE_EXCLUSION_MASK;
+ switch (match_rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ dest.writeInt(mAttr.getUsage());
+ break;
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ dest.writeInt(mAttr.getCapturePreset());
+ break;
+ case RULE_MATCH_UID:
+ dest.writeInt(mIntProp);
+ break;
+ default:
+ Log.e("AudioMixMatchCriterion", "Unknown match rule" + match_rule
+ + " when writing to Parcel");
+ dest.writeInt(-1);
+ }
+ }
+ }
+
+ private final int mTargetMixType;
+ int getTargetMixType() { return mTargetMixType; }
+ private final ArrayList<AudioMixMatchCriterion> mCriteria;
+ ArrayList<AudioMixMatchCriterion> getCriteria() { return mCriteria; }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mTargetMixType, mCriteria);
+ }
+
+ private static boolean isValidSystemApiRule(int rule) {
+ // API rules only expose the RULE_MATCH_* rules
+ switch (rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ case RULE_MATCH_UID:
+ return true;
+ default:
+ return false;
+ }
+ }
+ private static boolean isValidAttributesSystemApiRule(int rule) {
+ // API rules only expose the RULE_MATCH_* rules
+ switch (rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private static boolean isValidRule(int rule) {
+ final int match_rule = rule & ~RULE_EXCLUSION_MASK;
+ switch (match_rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ case RULE_MATCH_UID:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private static boolean isPlayerRule(int rule) {
+ final int match_rule = rule & ~RULE_EXCLUSION_MASK;
+ switch (match_rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_MATCH_UID:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private static boolean isAudioAttributeRule(int match_rule) {
+ switch(match_rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ /**
+ * Builder class for {@link AudioMixingRule} objects
+ */
+ @SystemApi
+ public static class Builder {
+ private ArrayList<AudioMixMatchCriterion> mCriteria;
+ private int mTargetMixType = AudioMix.MIX_TYPE_INVALID;
+
+ /**
+ * Constructs a new Builder with no rules.
+ */
+ @SystemApi
+ public Builder() {
+ mCriteria = new ArrayList<AudioMixMatchCriterion>();
+ }
+
+ /**
+ * Add a rule for the selection of which streams are mixed together.
+ * @param attrToMatch a non-null AudioAttributes instance for which a contradictory
+ * rule hasn't been set yet.
+ * @param rule {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE} or
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ * @see #excludeRule(AudioAttributes, int)
+ */
+ @SystemApi
+ public Builder addRule(AudioAttributes attrToMatch, int rule)
+ throws IllegalArgumentException {
+ if (!isValidAttributesSystemApiRule(rule)) {
+ throw new IllegalArgumentException("Illegal rule value " + rule);
+ }
+ return checkAddRuleObjInternal(rule, attrToMatch);
+ }
+
+ /**
+ * Add a rule by exclusion for the selection of which streams are mixed together.
+ * <br>For instance the following code
+ * <br><pre>
+ * AudioAttributes mediaAttr = new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_MEDIA)
+ * .build();
+ * AudioMixingRule noMediaRule = new AudioMixingRule.Builder()
+ * .excludeRule(mediaAttr, AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE)
+ * .build();
+ * </pre>
+ * <br>will create a rule which maps to any usage value, except USAGE_MEDIA.
+ * @param attrToMatch a non-null AudioAttributes instance for which a contradictory
+ * rule hasn't been set yet.
+ * @param rule {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE} or
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ * @see #addRule(AudioAttributes, int)
+ */
+ @SystemApi
+ public Builder excludeRule(AudioAttributes attrToMatch, int rule)
+ throws IllegalArgumentException {
+ if (!isValidAttributesSystemApiRule(rule)) {
+ throw new IllegalArgumentException("Illegal rule value " + rule);
+ }
+ return checkAddRuleObjInternal(rule | RULE_EXCLUSION_MASK, attrToMatch);
+ }
+
+ /**
+ * Add a rule for the selection of which streams are mixed together.
+ * The rule defines what the matching will be made on. It also determines the type of the
+ * property to match against.
+ * @param rule one of {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE},
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET} or
+ * {@link AudioMixingRule#RULE_MATCH_UID}.
+ * @param property see the definition of each rule for the type to use (either an
+ * {@link AudioAttributes} or an {@link java.lang.Integer}).
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ * @see #excludeMixRule(int, Object)
+ */
+ @SystemApi
+ public Builder addMixRule(int rule, Object property) throws IllegalArgumentException {
+ if (!isValidSystemApiRule(rule)) {
+ throw new IllegalArgumentException("Illegal rule value " + rule);
+ }
+ return checkAddRuleObjInternal(rule, property);
+ }
+
+ /**
+ * Add a rule by exclusion for the selection of which streams are mixed together.
+ * <br>For instance the following code
+ * <br><pre>
+ * AudioAttributes mediaAttr = new AudioAttributes.Builder()
+ * .setUsage(AudioAttributes.USAGE_MEDIA)
+ * .build();
+ * AudioMixingRule noMediaRule = new AudioMixingRule.Builder()
+ * .addMixRule(AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE, mediaAttr)
+ * .excludeMixRule(AudioMixingRule.RULE_MATCH_UID, new Integer(uidToExclude)
+ * .build();
+ * </pre>
+ * <br>will create a rule which maps to usage USAGE_MEDIA, but excludes any stream
+ * coming from the specified UID.
+ * @param rule one of {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE},
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET} or
+ * {@link AudioMixingRule#RULE_MATCH_UID}.
+ * @param property see the definition of each rule for the type to use (either an
+ * {@link AudioAttributes} or an {@link java.lang.Integer}).
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder excludeMixRule(int rule, Object property) throws IllegalArgumentException {
+ if (!isValidSystemApiRule(rule)) {
+ throw new IllegalArgumentException("Illegal rule value " + rule);
+ }
+ return checkAddRuleObjInternal(rule | RULE_EXCLUSION_MASK, property);
+ }
+
+ /**
+ * Add or exclude a rule for the selection of which streams are mixed together.
+ * Does error checking on the parameters.
+ * @param rule
+ * @param property
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ private Builder checkAddRuleObjInternal(int rule, Object property)
+ throws IllegalArgumentException {
+ if (property == null) {
+ throw new IllegalArgumentException("Illegal null argument for mixing rule");
+ }
+ if (!isValidRule(rule)) {
+ throw new IllegalArgumentException("Illegal rule value " + rule);
+ }
+ final int match_rule = rule & ~RULE_EXCLUSION_MASK;
+ if (isAudioAttributeRule(match_rule)) {
+ if (!(property instanceof AudioAttributes)) {
+ throw new IllegalArgumentException("Invalid AudioAttributes argument");
+ }
+ return addRuleInternal((AudioAttributes) property, null, rule);
+ } else {
+ // implies integer match rule
+ if (!(property instanceof Integer)) {
+ throw new IllegalArgumentException("Invalid Integer argument");
+ }
+ return addRuleInternal(null, (Integer) property, rule);
+ }
+ }
+
+ /**
+ * Add or exclude a rule on AudioAttributes or integer property for the selection of which
+ * streams are mixed together.
+ * No rule-to-parameter type check, all done in {@link #checkAddRuleObjInternal(int, Object)}.
+ * Exceptions are thrown only when incompatible rules are added.
+ * @param attrToMatch a non-null AudioAttributes instance for which a contradictory
+ * rule hasn't been set yet, null if not used.
+ * @param intProp an integer property to match or exclude, null if not used.
+ * @param rule one of {@link AudioMixingRule#RULE_EXCLUDE_ATTRIBUTE_USAGE},
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_USAGE},
+ * {@link AudioMixingRule#RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET} or
+ * {@link AudioMixingRule#RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET},
+ * {@link AudioMixingRule#RULE_MATCH_UID}, {@link AudioMixingRule#RULE_EXCLUDE_UID}.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ private Builder addRuleInternal(AudioAttributes attrToMatch, Integer intProp, int rule)
+ throws IllegalArgumentException {
+ // as rules are added to the Builder, we verify they are consistent with the type
+ // of mix being built. When adding the first rule, the mix type is MIX_TYPE_INVALID.
+ if (mTargetMixType == AudioMix.MIX_TYPE_INVALID) {
+ if (isPlayerRule(rule)) {
+ mTargetMixType = AudioMix.MIX_TYPE_PLAYERS;
+ } else {
+ mTargetMixType = AudioMix.MIX_TYPE_RECORDERS;
+ }
+ } else if (((mTargetMixType == AudioMix.MIX_TYPE_PLAYERS) && !isPlayerRule(rule))
+ || ((mTargetMixType == AudioMix.MIX_TYPE_RECORDERS) && isPlayerRule(rule)))
+ {
+ throw new IllegalArgumentException("Incompatible rule for mix");
+ }
+ synchronized (mCriteria) {
+ Iterator<AudioMixMatchCriterion> crIterator = mCriteria.iterator();
+ final int match_rule = rule & ~RULE_EXCLUSION_MASK;
+ while (crIterator.hasNext()) {
+ final AudioMixMatchCriterion criterion = crIterator.next();
+ switch (match_rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ // "usage"-based rule
+ if (criterion.mAttr.getUsage() == attrToMatch.getUsage()) {
+ if (criterion.mRule == rule) {
+ // rule already exists, we're done
+ return this;
+ } else {
+ // criterion already exists with a another rule,
+ // it is incompatible
+ throw new IllegalArgumentException("Contradictory rule exists"
+ + " for " + attrToMatch);
+ }
+ }
+ break;
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ // "capture preset"-base rule
+ if (criterion.mAttr.getCapturePreset() == attrToMatch.getCapturePreset()) {
+ if (criterion.mRule == rule) {
+ // rule already exists, we're done
+ return this;
+ } else {
+ // criterion already exists with a another rule,
+ // it is incompatible
+ throw new IllegalArgumentException("Contradictory rule exists"
+ + " for " + attrToMatch);
+ }
+ }
+ break;
+ case RULE_MATCH_UID:
+ // "usage"-based rule
+ if (criterion.mIntProp == intProp.intValue()) {
+ if (criterion.mRule == rule) {
+ // rule already exists, we're done
+ return this;
+ } else {
+ // criterion already exists with a another rule,
+ // it is incompatible
+ throw new IllegalArgumentException("Contradictory rule exists"
+ + " for UID " + intProp);
+ }
+ }
+ break;
+ }
+ }
+ // rule didn't exist, add it
+ switch (match_rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ mCriteria.add(new AudioMixMatchCriterion(attrToMatch, rule));
+ break;
+ case RULE_MATCH_UID:
+ mCriteria.add(new AudioMixMatchCriterion(intProp, rule));
+ break;
+ default:
+ throw new IllegalStateException("Unreachable code in addRuleInternal()");
+ }
+ }
+ return this;
+ }
+
+ Builder addRuleFromParcel(Parcel in) throws IllegalArgumentException {
+ final int rule = in.readInt();
+ final int match_rule = rule & ~RULE_EXCLUSION_MASK;
+ AudioAttributes attr = null;
+ Integer intProp = null;
+ switch (match_rule) {
+ case RULE_MATCH_ATTRIBUTE_USAGE:
+ int usage = in.readInt();
+ attr = new AudioAttributes.Builder()
+ .setUsage(usage).build();
+ break;
+ case RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ int preset = in.readInt();
+ attr = new AudioAttributes.Builder()
+ .setInternalCapturePreset(preset).build();
+ break;
+ case RULE_MATCH_UID:
+ intProp = new Integer(in.readInt());
+ break;
+ default:
+ // assume there was in int value to read as for now they come in pair
+ in.readInt();
+ throw new IllegalArgumentException("Illegal rule value " + rule + " in parcel");
+ }
+ return addRuleInternal(attr, intProp, rule);
+ }
+
+ /**
+ * Combines all of the matching and exclusion rules that have been set and return a new
+ * {@link AudioMixingRule} object.
+ * @return a new {@link AudioMixingRule} object
+ */
+ public AudioMixingRule build() {
+ return new AudioMixingRule(mTargetMixType, mCriteria);
+ }
+ }
+}
diff --git a/android/media/audiopolicy/AudioPolicy.java b/android/media/audiopolicy/AudioPolicy.java
new file mode 100644
index 00000000..7e88c277
--- /dev/null
+++ b/android/media/audiopolicy/AudioPolicy.java
@@ -0,0 +1,624 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiopolicy;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.media.AudioAttributes;
+import android.media.AudioFocusInfo;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioRecord;
+import android.media.AudioTrack;
+import android.media.IAudioService;
+import android.media.MediaRecorder;
+import android.os.Binder;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.Log;
+import android.util.Slog;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.ArrayList;
+
+/**
+ * @hide
+ * AudioPolicy provides access to the management of audio routing and audio focus.
+ */
+@SystemApi
+public class AudioPolicy {
+
+ private static final String TAG = "AudioPolicy";
+ private static final boolean DEBUG = false;
+ private final Object mLock = new Object();
+
+ /**
+ * The status of an audio policy that is valid but cannot be used because it is not registered.
+ */
+ @SystemApi
+ public static final int POLICY_STATUS_UNREGISTERED = 1;
+ /**
+ * The status of an audio policy that is valid, successfully registered and thus active.
+ */
+ @SystemApi
+ public static final int POLICY_STATUS_REGISTERED = 2;
+
+ private int mStatus;
+ private String mRegistrationId;
+ private AudioPolicyStatusListener mStatusListener;
+ private boolean mIsFocusPolicy;
+
+ /**
+ * The behavior of a policy with regards to audio focus where it relies on the application
+ * to do the ducking, the is the legacy and default behavior.
+ */
+ @SystemApi
+ public static final int FOCUS_POLICY_DUCKING_IN_APP = 0;
+ public static final int FOCUS_POLICY_DUCKING_DEFAULT = FOCUS_POLICY_DUCKING_IN_APP;
+ /**
+ * The behavior of a policy with regards to audio focus where it handles ducking instead
+ * of the application losing focus and being signaled it can duck (as communicated by
+ * {@link android.media.AudioManager#AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK}).
+ * <br>Can only be used after having set a listener with
+ * {@link AudioPolicy#setAudioPolicyFocusListener(AudioPolicyFocusListener)}.
+ */
+ @SystemApi
+ public static final int FOCUS_POLICY_DUCKING_IN_POLICY = 1;
+
+ private AudioPolicyFocusListener mFocusListener;
+
+ private Context mContext;
+
+ private AudioPolicyConfig mConfig;
+
+ /** @hide */
+ public AudioPolicyConfig getConfig() { return mConfig; }
+ /** @hide */
+ public boolean hasFocusListener() { return mFocusListener != null; }
+ /** @hide */
+ public boolean isFocusPolicy() { return mIsFocusPolicy; }
+
+ /**
+ * The parameter is guaranteed non-null through the Builder
+ */
+ private AudioPolicy(AudioPolicyConfig config, Context context, Looper looper,
+ AudioPolicyFocusListener fl, AudioPolicyStatusListener sl, boolean isFocusPolicy) {
+ mConfig = config;
+ mStatus = POLICY_STATUS_UNREGISTERED;
+ mContext = context;
+ if (looper == null) {
+ looper = Looper.getMainLooper();
+ }
+ if (looper != null) {
+ mEventHandler = new EventHandler(this, looper);
+ } else {
+ mEventHandler = null;
+ Log.e(TAG, "No event handler due to looper without a thread");
+ }
+ mFocusListener = fl;
+ mStatusListener = sl;
+ mIsFocusPolicy = isFocusPolicy;
+ }
+
+ /**
+ * Builder class for {@link AudioPolicy} objects.
+ * By default the policy to be created doesn't govern audio focus decisions.
+ */
+ @SystemApi
+ public static class Builder {
+ private ArrayList<AudioMix> mMixes;
+ private Context mContext;
+ private Looper mLooper;
+ private AudioPolicyFocusListener mFocusListener;
+ private AudioPolicyStatusListener mStatusListener;
+ private boolean mIsFocusPolicy = false;
+
+ /**
+ * Constructs a new Builder with no audio mixes.
+ * @param context the context for the policy
+ */
+ @SystemApi
+ public Builder(Context context) {
+ mMixes = new ArrayList<AudioMix>();
+ mContext = context;
+ }
+
+ /**
+ * Add an {@link AudioMix} to be part of the audio policy being built.
+ * @param mix a non-null {@link AudioMix} to be part of the audio policy.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder addMix(@NonNull AudioMix mix) throws IllegalArgumentException {
+ if (mix == null) {
+ throw new IllegalArgumentException("Illegal null AudioMix argument");
+ }
+ mMixes.add(mix);
+ return this;
+ }
+
+ /**
+ * Sets the {@link Looper} on which to run the event loop.
+ * @param looper a non-null specific Looper.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public Builder setLooper(@NonNull Looper looper) throws IllegalArgumentException {
+ if (looper == null) {
+ throw new IllegalArgumentException("Illegal null Looper argument");
+ }
+ mLooper = looper;
+ return this;
+ }
+
+ /**
+ * Sets the audio focus listener for the policy.
+ * @param l a {@link AudioPolicy.AudioPolicyFocusListener}
+ */
+ @SystemApi
+ public void setAudioPolicyFocusListener(AudioPolicyFocusListener l) {
+ mFocusListener = l;
+ }
+
+ /**
+ * Declares whether this policy will grant and deny audio focus through
+ * the {@link AudioPolicy.AudioPolicyFocusListener}.
+ * If set to {@code true}, it is mandatory to set an
+ * {@link AudioPolicy.AudioPolicyFocusListener} in order to successfully build
+ * an {@code AudioPolicy} instance.
+ * @param enforce true if the policy will govern audio focus decisions.
+ * @return the same Builder instance.
+ */
+ @SystemApi
+ public Builder setIsAudioFocusPolicy(boolean isFocusPolicy) {
+ mIsFocusPolicy = isFocusPolicy;
+ return this;
+ }
+
+ /**
+ * Sets the audio policy status listener.
+ * @param l a {@link AudioPolicy.AudioPolicyStatusListener}
+ */
+ @SystemApi
+ public void setAudioPolicyStatusListener(AudioPolicyStatusListener l) {
+ mStatusListener = l;
+ }
+
+ /**
+ * Combines all of the attributes that have been set on this {@code Builder} and returns a
+ * new {@link AudioPolicy} object.
+ * @return a new {@code AudioPolicy} object.
+ * @throws IllegalStateException if there is no
+ * {@link AudioPolicy.AudioPolicyStatusListener} but the policy was configured
+ * as an audio focus policy with {@link #setIsAudioFocusPolicy(boolean)}.
+ */
+ @SystemApi
+ public AudioPolicy build() {
+ if (mStatusListener != null) {
+ // the AudioPolicy status listener includes updates on each mix activity state
+ for (AudioMix mix : mMixes) {
+ mix.mCallbackFlags |= AudioMix.CALLBACK_FLAG_NOTIFY_ACTIVITY;
+ }
+ }
+ if (mIsFocusPolicy && mFocusListener == null) {
+ throw new IllegalStateException("Cannot be a focus policy without "
+ + "an AudioPolicyFocusListener");
+ }
+ return new AudioPolicy(new AudioPolicyConfig(mMixes), mContext, mLooper,
+ mFocusListener, mStatusListener, mIsFocusPolicy);
+ }
+ }
+
+ public void setRegistration(String regId) {
+ synchronized (mLock) {
+ mRegistrationId = regId;
+ mConfig.setRegistration(regId);
+ if (regId != null) {
+ mStatus = POLICY_STATUS_REGISTERED;
+ } else {
+ mStatus = POLICY_STATUS_UNREGISTERED;
+ }
+ }
+ sendMsg(MSG_POLICY_STATUS_CHANGE);
+ }
+
+ private boolean policyReadyToUse() {
+ synchronized (mLock) {
+ if (mStatus != POLICY_STATUS_REGISTERED) {
+ Log.e(TAG, "Cannot use unregistered AudioPolicy");
+ return false;
+ }
+ if (mContext == null) {
+ Log.e(TAG, "Cannot use AudioPolicy without context");
+ return false;
+ }
+ if (mRegistrationId == null) {
+ Log.e(TAG, "Cannot use unregistered AudioPolicy");
+ return false;
+ }
+ }
+ if (!(PackageManager.PERMISSION_GRANTED == mContext.checkCallingOrSelfPermission(
+ android.Manifest.permission.MODIFY_AUDIO_ROUTING))) {
+ Slog.w(TAG, "Cannot use AudioPolicy for pid " + Binder.getCallingPid() + " / uid "
+ + Binder.getCallingUid() + ", needs MODIFY_AUDIO_ROUTING");
+ return false;
+ }
+ return true;
+ }
+
+ private void checkMixReadyToUse(AudioMix mix, boolean forTrack)
+ throws IllegalArgumentException{
+ if (mix == null) {
+ String msg = forTrack ? "Invalid null AudioMix for AudioTrack creation"
+ : "Invalid null AudioMix for AudioRecord creation";
+ throw new IllegalArgumentException(msg);
+ }
+ if (!mConfig.mMixes.contains(mix)) {
+ throw new IllegalArgumentException("Invalid mix: not part of this policy");
+ }
+ if ((mix.getRouteFlags() & AudioMix.ROUTE_FLAG_LOOP_BACK) != AudioMix.ROUTE_FLAG_LOOP_BACK)
+ {
+ throw new IllegalArgumentException("Invalid AudioMix: not defined for loop back");
+ }
+ if (forTrack && (mix.getMixType() != AudioMix.MIX_TYPE_RECORDERS)) {
+ throw new IllegalArgumentException(
+ "Invalid AudioMix: not defined for being a recording source");
+ }
+ if (!forTrack && (mix.getMixType() != AudioMix.MIX_TYPE_PLAYERS)) {
+ throw new IllegalArgumentException(
+ "Invalid AudioMix: not defined for capturing playback");
+ }
+ }
+
+ /**
+ * Returns the current behavior for audio focus-related ducking.
+ * @return {@link #FOCUS_POLICY_DUCKING_IN_APP} or {@link #FOCUS_POLICY_DUCKING_IN_POLICY}
+ */
+ @SystemApi
+ public int getFocusDuckingBehavior() {
+ return mConfig.mDuckingPolicy;
+ }
+
+ // Note on implementation: not part of the Builder as there can be only one registered policy
+ // that handles ducking but there can be multiple policies
+ /**
+ * Sets the behavior for audio focus-related ducking.
+ * There must be a focus listener if this policy is to handle ducking.
+ * @param behavior {@link #FOCUS_POLICY_DUCKING_IN_APP} or
+ * {@link #FOCUS_POLICY_DUCKING_IN_POLICY}
+ * @return {@link AudioManager#SUCCESS} or {@link AudioManager#ERROR} (for instance if there
+ * is already an audio policy that handles ducking).
+ * @throws IllegalArgumentException
+ * @throws IllegalStateException
+ */
+ @SystemApi
+ public int setFocusDuckingBehavior(int behavior)
+ throws IllegalArgumentException, IllegalStateException {
+ if ((behavior != FOCUS_POLICY_DUCKING_IN_APP)
+ && (behavior != FOCUS_POLICY_DUCKING_IN_POLICY)) {
+ throw new IllegalArgumentException("Invalid ducking behavior " + behavior);
+ }
+ synchronized (mLock) {
+ if (mStatus != POLICY_STATUS_REGISTERED) {
+ throw new IllegalStateException(
+ "Cannot change ducking behavior for unregistered policy");
+ }
+ if ((behavior == FOCUS_POLICY_DUCKING_IN_POLICY)
+ && (mFocusListener == null)) {
+ // there must be a focus listener if the policy handles ducking
+ throw new IllegalStateException(
+ "Cannot handle ducking without an audio focus listener");
+ }
+ IAudioService service = getService();
+ try {
+ final int status = service.setFocusPropertiesForPolicy(behavior /*duckingBehavior*/,
+ this.cb());
+ if (status == AudioManager.SUCCESS) {
+ mConfig.mDuckingPolicy = behavior;
+ }
+ return status;
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in setFocusPropertiesForPolicy for behavior", e);
+ return AudioManager.ERROR;
+ }
+ }
+ }
+
+ /**
+ * Create an {@link AudioRecord} instance that is associated with the given {@link AudioMix}.
+ * Audio buffers recorded through the created instance will contain the mix of the audio
+ * streams that fed the given mixer.
+ * @param mix a non-null {@link AudioMix} instance whose routing flags was defined with
+ * {@link AudioMix#ROUTE_FLAG_LOOP_BACK}, previously added to this policy.
+ * @return a new {@link AudioRecord} instance whose data format is the one defined in the
+ * {@link AudioMix}, or null if this policy was not successfully registered
+ * with {@link AudioManager#registerAudioPolicy(AudioPolicy)}.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public AudioRecord createAudioRecordSink(AudioMix mix) throws IllegalArgumentException {
+ if (!policyReadyToUse()) {
+ Log.e(TAG, "Cannot create AudioRecord sink for AudioMix");
+ return null;
+ }
+ checkMixReadyToUse(mix, false/*not for an AudioTrack*/);
+ // create an AudioFormat from the mix format compatible with recording, as the mix
+ // was defined for playback
+ AudioFormat mixFormat = new AudioFormat.Builder(mix.getFormat())
+ .setChannelMask(AudioFormat.inChannelMaskFromOutChannelMask(
+ mix.getFormat().getChannelMask()))
+ .build();
+ // create the AudioRecord, configured for loop back, using the same format as the mix
+ AudioRecord ar = new AudioRecord(
+ new AudioAttributes.Builder()
+ .setInternalCapturePreset(MediaRecorder.AudioSource.REMOTE_SUBMIX)
+ .addTag(addressForTag(mix))
+ .build(),
+ mixFormat,
+ AudioRecord.getMinBufferSize(mix.getFormat().getSampleRate(),
+ // using stereo for buffer size to avoid the current poor support for masks
+ AudioFormat.CHANNEL_IN_STEREO, mix.getFormat().getEncoding()),
+ AudioManager.AUDIO_SESSION_ID_GENERATE
+ );
+ return ar;
+ }
+
+ /**
+ * Create an {@link AudioTrack} instance that is associated with the given {@link AudioMix}.
+ * Audio buffers played through the created instance will be sent to the given mix
+ * to be recorded through the recording APIs.
+ * @param mix a non-null {@link AudioMix} instance whose routing flags was defined with
+ * {@link AudioMix#ROUTE_FLAG_LOOP_BACK}, previously added to this policy.
+ * @return a new {@link AudioTrack} instance whose data format is the one defined in the
+ * {@link AudioMix}, or null if this policy was not successfully registered
+ * with {@link AudioManager#registerAudioPolicy(AudioPolicy)}.
+ * @throws IllegalArgumentException
+ */
+ @SystemApi
+ public AudioTrack createAudioTrackSource(AudioMix mix) throws IllegalArgumentException {
+ if (!policyReadyToUse()) {
+ Log.e(TAG, "Cannot create AudioTrack source for AudioMix");
+ return null;
+ }
+ checkMixReadyToUse(mix, true/*for an AudioTrack*/);
+ // create the AudioTrack, configured for loop back, using the same format as the mix
+ AudioTrack at = new AudioTrack(
+ new AudioAttributes.Builder()
+ .setUsage(AudioAttributes.USAGE_VIRTUAL_SOURCE)
+ .addTag(addressForTag(mix))
+ .build(),
+ mix.getFormat(),
+ AudioTrack.getMinBufferSize(mix.getFormat().getSampleRate(),
+ mix.getFormat().getChannelMask(), mix.getFormat().getEncoding()),
+ AudioTrack.MODE_STREAM,
+ AudioManager.AUDIO_SESSION_ID_GENERATE
+ );
+ return at;
+ }
+
+ @SystemApi
+ public int getStatus() {
+ return mStatus;
+ }
+
+ @SystemApi
+ public static abstract class AudioPolicyStatusListener {
+ public void onStatusChange() {}
+ public void onMixStateUpdate(AudioMix mix) {}
+ }
+
+ @SystemApi
+ public static abstract class AudioPolicyFocusListener {
+ public void onAudioFocusGrant(AudioFocusInfo afi, int requestResult) {}
+ public void onAudioFocusLoss(AudioFocusInfo afi, boolean wasNotified) {}
+ /**
+ * Called whenever an application requests audio focus.
+ * Only ever called if the {@link AudioPolicy} was built with
+ * {@link AudioPolicy.Builder#setIsAudioFocusPolicy(boolean)} set to {@code true}.
+ * @param afi information about the focus request and the requester
+ * @param requestResult the result that was returned synchronously by the framework to the
+ * application, {@link #AUDIOFOCUS_REQUEST_FAILED},or
+ * {@link #AUDIOFOCUS_REQUEST_DELAYED}.
+ */
+ public void onAudioFocusRequest(AudioFocusInfo afi, int requestResult) {}
+ /**
+ * Called whenever an application abandons audio focus.
+ * Only ever called if the {@link AudioPolicy} was built with
+ * {@link AudioPolicy.Builder#setIsAudioFocusPolicy(boolean)} set to {@code true}.
+ * @param afi information about the focus request being abandoned and the original
+ * requester.
+ */
+ public void onAudioFocusAbandon(AudioFocusInfo afi) {}
+ }
+
+ private void onPolicyStatusChange() {
+ AudioPolicyStatusListener l;
+ synchronized (mLock) {
+ if (mStatusListener == null) {
+ return;
+ }
+ l = mStatusListener;
+ }
+ l.onStatusChange();
+ }
+
+ //==================================================
+ // Callback interface
+
+ /** @hide */
+ public IAudioPolicyCallback cb() { return mPolicyCb; }
+
+ private final IAudioPolicyCallback mPolicyCb = new IAudioPolicyCallback.Stub() {
+
+ public void notifyAudioFocusGrant(AudioFocusInfo afi, int requestResult) {
+ sendMsg(MSG_FOCUS_GRANT, afi, requestResult);
+ if (DEBUG) {
+ Log.v(TAG, "notifyAudioFocusGrant: pack=" + afi.getPackageName() + " client="
+ + afi.getClientId() + "reqRes=" + requestResult);
+ }
+ }
+
+ public void notifyAudioFocusLoss(AudioFocusInfo afi, boolean wasNotified) {
+ sendMsg(MSG_FOCUS_LOSS, afi, wasNotified ? 1 : 0);
+ if (DEBUG) {
+ Log.v(TAG, "notifyAudioFocusLoss: pack=" + afi.getPackageName() + " client="
+ + afi.getClientId() + "wasNotified=" + wasNotified);
+ }
+ }
+
+ public void notifyAudioFocusRequest(AudioFocusInfo afi, int requestResult) {
+ sendMsg(MSG_FOCUS_REQUEST, afi, requestResult);
+ if (DEBUG) {
+ Log.v(TAG, "notifyAudioFocusRequest: pack=" + afi.getPackageName() + " client="
+ + afi.getClientId() + "reqRes=" + requestResult);
+ }
+ }
+
+ public void notifyAudioFocusAbandon(AudioFocusInfo afi) {
+ sendMsg(MSG_FOCUS_ABANDON, afi, 0 /* ignored */);
+ if (DEBUG) {
+ Log.v(TAG, "notifyAudioFocusAbandon: pack=" + afi.getPackageName() + " client="
+ + afi.getClientId());
+ }
+ }
+
+ public void notifyMixStateUpdate(String regId, int state) {
+ for (AudioMix mix : mConfig.getMixes()) {
+ if (mix.getRegistration().equals(regId)) {
+ mix.mMixState = state;
+ sendMsg(MSG_MIX_STATE_UPDATE, mix, 0/*ignored*/);
+ if (DEBUG) {
+ Log.v(TAG, "notifyMixStateUpdate: regId=" + regId + " state=" + state);
+ }
+ }
+ }
+ }
+ };
+
+ //==================================================
+ // Event handling
+ private final EventHandler mEventHandler;
+ private final static int MSG_POLICY_STATUS_CHANGE = 0;
+ private final static int MSG_FOCUS_GRANT = 1;
+ private final static int MSG_FOCUS_LOSS = 2;
+ private final static int MSG_MIX_STATE_UPDATE = 3;
+ private final static int MSG_FOCUS_REQUEST = 4;
+ private final static int MSG_FOCUS_ABANDON = 5;
+
+ private class EventHandler extends Handler {
+ public EventHandler(AudioPolicy ap, Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch(msg.what) {
+ case MSG_POLICY_STATUS_CHANGE:
+ onPolicyStatusChange();
+ break;
+ case MSG_FOCUS_GRANT:
+ if (mFocusListener != null) {
+ mFocusListener.onAudioFocusGrant(
+ (AudioFocusInfo) msg.obj, msg.arg1);
+ }
+ break;
+ case MSG_FOCUS_LOSS:
+ if (mFocusListener != null) {
+ mFocusListener.onAudioFocusLoss(
+ (AudioFocusInfo) msg.obj, msg.arg1 != 0);
+ }
+ break;
+ case MSG_MIX_STATE_UPDATE:
+ if (mStatusListener != null) {
+ mStatusListener.onMixStateUpdate((AudioMix) msg.obj);
+ }
+ break;
+ case MSG_FOCUS_REQUEST:
+ if (mFocusListener != null) {
+ mFocusListener.onAudioFocusRequest((AudioFocusInfo) msg.obj, msg.arg1);
+ } else { // should never be null, but don't crash
+ Log.e(TAG, "Invalid null focus listener for focus request event");
+ }
+ break;
+ case MSG_FOCUS_ABANDON:
+ if (mFocusListener != null) { // should never be null
+ mFocusListener.onAudioFocusAbandon((AudioFocusInfo) msg.obj);
+ } else { // should never be null, but don't crash
+ Log.e(TAG, "Invalid null focus listener for focus abandon event");
+ }
+ break;
+ default:
+ Log.e(TAG, "Unknown event " + msg.what);
+ }
+ }
+ }
+
+ //==========================================================
+ // Utils
+ private static String addressForTag(AudioMix mix) {
+ return "addr=" + mix.getRegistration();
+ }
+
+ private void sendMsg(int msg) {
+ if (mEventHandler != null) {
+ mEventHandler.sendEmptyMessage(msg);
+ }
+ }
+
+ private void sendMsg(int msg, Object obj, int i) {
+ if (mEventHandler != null) {
+ mEventHandler.sendMessage(
+ mEventHandler.obtainMessage(msg, i /*arg1*/, 0 /*arg2, ignored*/, obj));
+ }
+ }
+
+ private static IAudioService sService;
+
+ private static IAudioService getService()
+ {
+ if (sService != null) {
+ return sService;
+ }
+ IBinder b = ServiceManager.getService(Context.AUDIO_SERVICE);
+ sService = IAudioService.Stub.asInterface(b);
+ return sService;
+ }
+
+ public String toLogFriendlyString() {
+ String textDump = new String("android.media.audiopolicy.AudioPolicy:\n");
+ textDump += "config=" + mConfig.toLogFriendlyString();
+ return (textDump);
+ }
+
+ /** @hide */
+ @IntDef({
+ POLICY_STATUS_REGISTERED,
+ POLICY_STATUS_UNREGISTERED
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface PolicyStatus {}
+}
diff --git a/android/media/audiopolicy/AudioPolicyConfig.java b/android/media/audiopolicy/AudioPolicyConfig.java
new file mode 100644
index 00000000..cafa5a8c
--- /dev/null
+++ b/android/media/audiopolicy/AudioPolicyConfig.java
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audiopolicy;
+
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioPatch;
+import android.media.audiopolicy.AudioMixingRule.AudioMixMatchCriterion;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.Objects;
+
+/**
+ * @hide
+ * Internal storage class for AudioPolicy configuration.
+ */
+public class AudioPolicyConfig implements Parcelable {
+
+ private static final String TAG = "AudioPolicyConfig";
+
+ protected ArrayList<AudioMix> mMixes;
+ protected int mDuckingPolicy = AudioPolicy.FOCUS_POLICY_DUCKING_IN_APP;
+
+ private String mRegistrationId = null;
+
+ protected AudioPolicyConfig(AudioPolicyConfig conf) {
+ mMixes = conf.mMixes;
+ }
+
+ AudioPolicyConfig(ArrayList<AudioMix> mixes) {
+ mMixes = mixes;
+ }
+
+ /**
+ * Add an {@link AudioMix} to be part of the audio policy being built.
+ * @param mix a non-null {@link AudioMix} to be part of the audio policy.
+ * @return the same Builder instance.
+ * @throws IllegalArgumentException
+ */
+ public void addMix(AudioMix mix) throws IllegalArgumentException {
+ if (mix == null) {
+ throw new IllegalArgumentException("Illegal null AudioMix argument");
+ }
+ mMixes.add(mix);
+ }
+
+ public ArrayList<AudioMix> getMixes() {
+ return mMixes;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mMixes);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mMixes.size());
+ for (AudioMix mix : mMixes) {
+ // write mix route flags
+ dest.writeInt(mix.getRouteFlags());
+ // write callback flags
+ dest.writeInt(mix.mCallbackFlags);
+ // write device information
+ dest.writeInt(mix.mDeviceSystemType);
+ dest.writeString(mix.mDeviceAddress);
+ // write mix format
+ dest.writeInt(mix.getFormat().getSampleRate());
+ dest.writeInt(mix.getFormat().getEncoding());
+ dest.writeInt(mix.getFormat().getChannelMask());
+ // write mix rules
+ final ArrayList<AudioMixMatchCriterion> criteria = mix.getRule().getCriteria();
+ dest.writeInt(criteria.size());
+ for (AudioMixMatchCriterion criterion : criteria) {
+ criterion.writeToParcel(dest);
+ }
+ }
+ }
+
+ private AudioPolicyConfig(Parcel in) {
+ mMixes = new ArrayList<AudioMix>();
+ int nbMixes = in.readInt();
+ for (int i = 0 ; i < nbMixes ; i++) {
+ final AudioMix.Builder mixBuilder = new AudioMix.Builder();
+ // read mix route flags
+ int routeFlags = in.readInt();
+ mixBuilder.setRouteFlags(routeFlags);
+ // read callback flags
+ mixBuilder.setCallbackFlags(in.readInt());
+ // read device information
+ mixBuilder.setDevice(in.readInt(), in.readString());
+ // read mix format
+ int sampleRate = in.readInt();
+ int encoding = in.readInt();
+ int channelMask = in.readInt();
+ final AudioFormat format = new AudioFormat.Builder().setSampleRate(sampleRate)
+ .setChannelMask(channelMask).setEncoding(encoding).build();
+ mixBuilder.setFormat(format);
+ // read mix rules
+ int nbRules = in.readInt();
+ AudioMixingRule.Builder ruleBuilder = new AudioMixingRule.Builder();
+ for (int j = 0 ; j < nbRules ; j++) {
+ // read the matching rules
+ ruleBuilder.addRuleFromParcel(in);
+ }
+ mixBuilder.setMixingRule(ruleBuilder.build());
+ mMixes.add(mixBuilder.build());
+ }
+ }
+
+ public static final Parcelable.Creator<AudioPolicyConfig> CREATOR
+ = new Parcelable.Creator<AudioPolicyConfig>() {
+ /**
+ * Rebuilds an AudioPolicyConfig previously stored with writeToParcel().
+ * @param p Parcel object to read the AudioPolicyConfig from
+ * @return a new AudioPolicyConfig created from the data in the parcel
+ */
+ public AudioPolicyConfig createFromParcel(Parcel p) {
+ return new AudioPolicyConfig(p);
+ }
+ public AudioPolicyConfig[] newArray(int size) {
+ return new AudioPolicyConfig[size];
+ }
+ };
+
+ public String toLogFriendlyString () {
+ String textDump = new String("android.media.audiopolicy.AudioPolicyConfig:\n");
+ textDump += mMixes.size() + " AudioMix: "+ mRegistrationId + "\n";
+ for(AudioMix mix : mMixes) {
+ // write mix route flags
+ textDump += "* route flags=0x" + Integer.toHexString(mix.getRouteFlags()) + "\n";
+ // write mix format
+ textDump += " rate=" + mix.getFormat().getSampleRate() + "Hz\n";
+ textDump += " encoding=" + mix.getFormat().getEncoding() + "\n";
+ textDump += " channels=0x";
+ textDump += Integer.toHexString(mix.getFormat().getChannelMask()).toUpperCase() +"\n";
+ // write mix rules
+ final ArrayList<AudioMixMatchCriterion> criteria = mix.getRule().getCriteria();
+ for (AudioMixMatchCriterion criterion : criteria) {
+ switch(criterion.mRule) {
+ case AudioMixingRule.RULE_EXCLUDE_ATTRIBUTE_USAGE:
+ textDump += " exclude usage ";
+ textDump += criterion.mAttr.usageToString();
+ break;
+ case AudioMixingRule.RULE_MATCH_ATTRIBUTE_USAGE:
+ textDump += " match usage ";
+ textDump += criterion.mAttr.usageToString();
+ break;
+ case AudioMixingRule.RULE_EXCLUDE_ATTRIBUTE_CAPTURE_PRESET:
+ textDump += " exclude capture preset ";
+ textDump += criterion.mAttr.getCapturePreset();
+ break;
+ case AudioMixingRule.RULE_MATCH_ATTRIBUTE_CAPTURE_PRESET:
+ textDump += " match capture preset ";
+ textDump += criterion.mAttr.getCapturePreset();
+ break;
+ case AudioMixingRule.RULE_MATCH_UID:
+ textDump += " match UID ";
+ textDump += criterion.mIntProp;
+ break;
+ case AudioMixingRule.RULE_EXCLUDE_UID:
+ textDump += " exclude UID ";
+ textDump += criterion.mIntProp;
+ break;
+ default:
+ textDump += "invalid rule!";
+ }
+ textDump += "\n";
+ }
+ }
+ return textDump;
+ }
+
+ protected void setRegistration(String regId) {
+ final boolean currentRegNull = (mRegistrationId == null) || mRegistrationId.isEmpty();
+ final boolean newRegNull = (regId == null) || regId.isEmpty();
+ if (!currentRegNull && !newRegNull && !mRegistrationId.equals(regId)) {
+ Log.e(TAG, "Invalid registration transition from " + mRegistrationId + " to " + regId);
+ return;
+ }
+ mRegistrationId = regId == null ? "" : regId;
+ int mixIndex = 0;
+ for (AudioMix mix : mMixes) {
+ if (!mRegistrationId.isEmpty()) {
+ if ((mix.getRouteFlags() & AudioMix.ROUTE_FLAG_LOOP_BACK) ==
+ AudioMix.ROUTE_FLAG_LOOP_BACK) {
+ mix.setRegistration(mRegistrationId + "mix" + mixTypeId(mix.getMixType()) + ":"
+ + mixIndex++);
+ } else if ((mix.getRouteFlags() & AudioMix.ROUTE_FLAG_RENDER) ==
+ AudioMix.ROUTE_FLAG_RENDER) {
+ mix.setRegistration(mix.mDeviceAddress);
+ }
+ } else {
+ mix.setRegistration("");
+ }
+ }
+ }
+
+ private static String mixTypeId(int type) {
+ if (type == AudioMix.MIX_TYPE_PLAYERS) return "p";
+ else if (type == AudioMix.MIX_TYPE_RECORDERS) return "r";
+ else return "i";
+ }
+
+ protected String getRegistration() {
+ return mRegistrationId;
+ }
+}
diff --git a/android/media/browse/MediaBrowser.java b/android/media/browse/MediaBrowser.java
new file mode 100644
index 00000000..2bccd884
--- /dev/null
+++ b/android/media/browse/MediaBrowser.java
@@ -0,0 +1,1171 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.browse;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.ServiceConnection;
+import android.content.pm.ParceledListSlice;
+import android.media.MediaDescription;
+import android.media.session.MediaController;
+import android.media.session.MediaSession;
+import android.os.Binder;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.RemoteException;
+import android.os.ResultReceiver;
+import android.service.media.IMediaBrowserService;
+import android.service.media.IMediaBrowserServiceCallbacks;
+import android.service.media.MediaBrowserService;
+import android.text.TextUtils;
+import android.util.ArrayMap;
+import android.util.Log;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map.Entry;
+
+/**
+ * Browses media content offered by a link MediaBrowserService.
+ * <p>
+ * This object is not thread-safe. All calls should happen on the thread on which the browser
+ * was constructed.
+ * </p>
+ * <h3>Standard Extra Data</h3>
+ *
+ * <p>These are the current standard fields that can be used as extra data via
+ * {@link #subscribe(String, Bundle, SubscriptionCallback)},
+ * {@link #unsubscribe(String, SubscriptionCallback)}, and
+ * {@link SubscriptionCallback#onChildrenLoaded(String, List, Bundle)}.
+ *
+ * <ul>
+ * <li> {@link #EXTRA_PAGE}
+ * <li> {@link #EXTRA_PAGE_SIZE}
+ * </ul>
+ */
+public final class MediaBrowser {
+ private static final String TAG = "MediaBrowser";
+ private static final boolean DBG = false;
+
+ /**
+ * Used as an int extra field to denote the page number to subscribe.
+ * The value of {@code EXTRA_PAGE} should be greater than or equal to 0.
+ *
+ * @see #EXTRA_PAGE_SIZE
+ */
+ public static final String EXTRA_PAGE = "android.media.browse.extra.PAGE";
+
+ /**
+ * Used as an int extra field to denote the number of media items in a page.
+ * The value of {@code EXTRA_PAGE_SIZE} should be greater than or equal to 1.
+ *
+ * @see #EXTRA_PAGE
+ */
+ public static final String EXTRA_PAGE_SIZE = "android.media.browse.extra.PAGE_SIZE";
+
+ private static final int CONNECT_STATE_DISCONNECTING = 0;
+ private static final int CONNECT_STATE_DISCONNECTED = 1;
+ private static final int CONNECT_STATE_CONNECTING = 2;
+ private static final int CONNECT_STATE_CONNECTED = 3;
+ private static final int CONNECT_STATE_SUSPENDED = 4;
+
+ private final Context mContext;
+ private final ComponentName mServiceComponent;
+ private final ConnectionCallback mCallback;
+ private final Bundle mRootHints;
+ private final Handler mHandler = new Handler();
+ private final ArrayMap<String, Subscription> mSubscriptions = new ArrayMap<>();
+
+ private volatile int mState = CONNECT_STATE_DISCONNECTED;
+ private volatile String mRootId;
+ private volatile MediaSession.Token mMediaSessionToken;
+ private volatile Bundle mExtras;
+
+ private MediaServiceConnection mServiceConnection;
+ private IMediaBrowserService mServiceBinder;
+ private IMediaBrowserServiceCallbacks mServiceCallbacks;
+
+ /**
+ * Creates a media browser for the specified media browser service.
+ *
+ * @param context The context.
+ * @param serviceComponent The component name of the media browser service.
+ * @param callback The connection callback.
+ * @param rootHints An optional bundle of service-specific arguments to send
+ * to the media browser service when connecting and retrieving the root id
+ * for browsing, or null if none. The contents of this bundle may affect
+ * the information returned when browsing.
+ * @see android.service.media.MediaBrowserService.BrowserRoot#EXTRA_RECENT
+ * @see android.service.media.MediaBrowserService.BrowserRoot#EXTRA_OFFLINE
+ * @see android.service.media.MediaBrowserService.BrowserRoot#EXTRA_SUGGESTED
+ */
+ public MediaBrowser(Context context, ComponentName serviceComponent,
+ ConnectionCallback callback, Bundle rootHints) {
+ if (context == null) {
+ throw new IllegalArgumentException("context must not be null");
+ }
+ if (serviceComponent == null) {
+ throw new IllegalArgumentException("service component must not be null");
+ }
+ if (callback == null) {
+ throw new IllegalArgumentException("connection callback must not be null");
+ }
+ mContext = context;
+ mServiceComponent = serviceComponent;
+ mCallback = callback;
+ mRootHints = rootHints == null ? null : new Bundle(rootHints);
+ }
+
+ /**
+ * Connects to the media browser service.
+ * <p>
+ * The connection callback specified in the constructor will be invoked
+ * when the connection completes or fails.
+ * </p>
+ */
+ public void connect() {
+ if (mState != CONNECT_STATE_DISCONNECTING && mState != CONNECT_STATE_DISCONNECTED) {
+ throw new IllegalStateException("connect() called while neither disconnecting nor "
+ + "disconnected (state=" + getStateLabel(mState) + ")");
+ }
+
+ mState = CONNECT_STATE_CONNECTING;
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (mState == CONNECT_STATE_DISCONNECTING) {
+ return;
+ }
+ mState = CONNECT_STATE_CONNECTING;
+ // TODO: remove this extra check.
+ if (DBG) {
+ if (mServiceConnection != null) {
+ throw new RuntimeException("mServiceConnection should be null. Instead it"
+ + " is " + mServiceConnection);
+ }
+ }
+ if (mServiceBinder != null) {
+ throw new RuntimeException("mServiceBinder should be null. Instead it is "
+ + mServiceBinder);
+ }
+ if (mServiceCallbacks != null) {
+ throw new RuntimeException("mServiceCallbacks should be null. Instead it is "
+ + mServiceCallbacks);
+ }
+
+ final Intent intent = new Intent(MediaBrowserService.SERVICE_INTERFACE);
+ intent.setComponent(mServiceComponent);
+
+ mServiceConnection = new MediaServiceConnection();
+
+ boolean bound = false;
+ try {
+ bound = mContext.bindService(intent, mServiceConnection,
+ Context.BIND_AUTO_CREATE);
+ } catch (Exception ex) {
+ Log.e(TAG, "Failed binding to service " + mServiceComponent);
+ }
+
+ if (!bound) {
+ // Tell them that it didn't work.
+ forceCloseConnection();
+ mCallback.onConnectionFailed();
+ }
+
+ if (DBG) {
+ Log.d(TAG, "connect...");
+ dump();
+ }
+ }
+ });
+ }
+
+ /**
+ * Disconnects from the media browser service.
+ * After this, no more callbacks will be received.
+ */
+ public void disconnect() {
+ // It's ok to call this any state, because allowing this lets apps not have
+ // to check isConnected() unnecessarily. They won't appreciate the extra
+ // assertions for this. We do everything we can here to go back to a sane state.
+ mState = CONNECT_STATE_DISCONNECTING;
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ // connect() could be called before this. Then we will disconnect and reconnect.
+ if (mServiceCallbacks != null) {
+ try {
+ mServiceBinder.disconnect(mServiceCallbacks);
+ } catch (RemoteException ex) {
+ // We are disconnecting anyway. Log, just for posterity but it's not
+ // a big problem.
+ Log.w(TAG, "RemoteException during connect for " + mServiceComponent);
+ }
+ }
+ int state = mState;
+ forceCloseConnection();
+ // If the state was not CONNECT_STATE_DISCONNECTING, keep the state so that
+ // the operation came after disconnect() can be handled properly.
+ if (state != CONNECT_STATE_DISCONNECTING) {
+ mState = state;
+ }
+ if (DBG) {
+ Log.d(TAG, "disconnect...");
+ dump();
+ }
+ }
+ });
+ }
+
+ /**
+ * Null out the variables and unbind from the service. This doesn't include
+ * calling disconnect on the service, because we only try to do that in the
+ * clean shutdown cases.
+ * <p>
+ * Everywhere that calls this EXCEPT for disconnect() should follow it with
+ * a call to mCallback.onConnectionFailed(). Disconnect doesn't do that callback
+ * for a clean shutdown, but everywhere else is a dirty shutdown and should
+ * notify the app.
+ * <p>
+ * Also, mState should be updated properly. Mostly it should be CONNECT_STATE_DIACONNECTED
+ * except for disconnect().
+ */
+ private void forceCloseConnection() {
+ if (mServiceConnection != null) {
+ try {
+ mContext.unbindService(mServiceConnection);
+ } catch (IllegalArgumentException e) {
+ if (DBG) {
+ Log.d(TAG, "unbindService failed", e);
+ }
+ }
+ }
+ mState = CONNECT_STATE_DISCONNECTED;
+ mServiceConnection = null;
+ mServiceBinder = null;
+ mServiceCallbacks = null;
+ mRootId = null;
+ mMediaSessionToken = null;
+ }
+
+ /**
+ * Returns whether the browser is connected to the service.
+ */
+ public boolean isConnected() {
+ return mState == CONNECT_STATE_CONNECTED;
+ }
+
+ /**
+ * Gets the service component that the media browser is connected to.
+ */
+ public @NonNull ComponentName getServiceComponent() {
+ if (!isConnected()) {
+ throw new IllegalStateException("getServiceComponent() called while not connected" +
+ " (state=" + mState + ")");
+ }
+ return mServiceComponent;
+ }
+
+ /**
+ * Gets the root id.
+ * <p>
+ * Note that the root id may become invalid or change when the
+ * browser is disconnected.
+ * </p>
+ *
+ * @throws IllegalStateException if not connected.
+ */
+ public @NonNull String getRoot() {
+ if (!isConnected()) {
+ throw new IllegalStateException("getRoot() called while not connected (state="
+ + getStateLabel(mState) + ")");
+ }
+ return mRootId;
+ }
+
+ /**
+ * Gets any extras for the media service.
+ *
+ * @throws IllegalStateException if not connected.
+ */
+ public @Nullable Bundle getExtras() {
+ if (!isConnected()) {
+ throw new IllegalStateException("getExtras() called while not connected (state="
+ + getStateLabel(mState) + ")");
+ }
+ return mExtras;
+ }
+
+ /**
+ * Gets the media session token associated with the media browser.
+ * <p>
+ * Note that the session token may become invalid or change when the
+ * browser is disconnected.
+ * </p>
+ *
+ * @return The session token for the browser, never null.
+ *
+ * @throws IllegalStateException if not connected.
+ */
+ public @NonNull MediaSession.Token getSessionToken() {
+ if (!isConnected()) {
+ throw new IllegalStateException("getSessionToken() called while not connected (state="
+ + mState + ")");
+ }
+ return mMediaSessionToken;
+ }
+
+ /**
+ * Queries for information about the media items that are contained within
+ * the specified id and subscribes to receive updates when they change.
+ * <p>
+ * The list of subscriptions is maintained even when not connected and is
+ * restored after the reconnection. It is ok to subscribe while not connected
+ * but the results will not be returned until the connection completes.
+ * </p>
+ * <p>
+ * If the id is already subscribed with a different callback then the new
+ * callback will replace the previous one and the child data will be
+ * reloaded.
+ * </p>
+ *
+ * @param parentId The id of the parent media item whose list of children
+ * will be subscribed.
+ * @param callback The callback to receive the list of children.
+ */
+ public void subscribe(@NonNull String parentId, @NonNull SubscriptionCallback callback) {
+ subscribeInternal(parentId, null, callback);
+ }
+
+ /**
+ * Queries with service-specific arguments for information about the media items
+ * that are contained within the specified id and subscribes to receive updates
+ * when they change.
+ * <p>
+ * The list of subscriptions is maintained even when not connected and is
+ * restored after the reconnection. It is ok to subscribe while not connected
+ * but the results will not be returned until the connection completes.
+ * </p>
+ * <p>
+ * If the id is already subscribed with a different callback then the new
+ * callback will replace the previous one and the child data will be
+ * reloaded.
+ * </p>
+ *
+ * @param parentId The id of the parent media item whose list of children
+ * will be subscribed.
+ * @param options The bundle of service-specific arguments to send to the media
+ * browser service. The contents of this bundle may affect the
+ * information returned when browsing.
+ * @param callback The callback to receive the list of children.
+ */
+ public void subscribe(@NonNull String parentId, @NonNull Bundle options,
+ @NonNull SubscriptionCallback callback) {
+ if (options == null) {
+ throw new IllegalArgumentException("options cannot be null");
+ }
+ subscribeInternal(parentId, new Bundle(options), callback);
+ }
+
+ /**
+ * Unsubscribes for changes to the children of the specified media id.
+ * <p>
+ * The query callback will no longer be invoked for results associated with
+ * this id once this method returns.
+ * </p>
+ *
+ * @param parentId The id of the parent media item whose list of children
+ * will be unsubscribed.
+ */
+ public void unsubscribe(@NonNull String parentId) {
+ unsubscribeInternal(parentId, null);
+ }
+
+ /**
+ * Unsubscribes for changes to the children of the specified media id through a callback.
+ * <p>
+ * The query callback will no longer be invoked for results associated with
+ * this id once this method returns.
+ * </p>
+ *
+ * @param parentId The id of the parent media item whose list of children
+ * will be unsubscribed.
+ * @param callback A callback sent to the media browser service to subscribe.
+ */
+ public void unsubscribe(@NonNull String parentId, @NonNull SubscriptionCallback callback) {
+ if (callback == null) {
+ throw new IllegalArgumentException("callback cannot be null");
+ }
+ unsubscribeInternal(parentId, callback);
+ }
+
+ /**
+ * Retrieves a specific {@link MediaItem} from the connected service. Not
+ * all services may support this, so falling back to subscribing to the
+ * parent's id should be used when unavailable.
+ *
+ * @param mediaId The id of the item to retrieve.
+ * @param cb The callback to receive the result on.
+ */
+ public void getItem(final @NonNull String mediaId, @NonNull final ItemCallback cb) {
+ if (TextUtils.isEmpty(mediaId)) {
+ throw new IllegalArgumentException("mediaId cannot be empty.");
+ }
+ if (cb == null) {
+ throw new IllegalArgumentException("cb cannot be null.");
+ }
+ if (mState != CONNECT_STATE_CONNECTED) {
+ Log.i(TAG, "Not connected, unable to retrieve the MediaItem.");
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ cb.onError(mediaId);
+ }
+ });
+ return;
+ }
+ ResultReceiver receiver = new ResultReceiver(mHandler) {
+ @Override
+ protected void onReceiveResult(int resultCode, Bundle resultData) {
+ if (!isConnected()) {
+ return;
+ }
+ if (resultCode != 0 || resultData == null
+ || !resultData.containsKey(MediaBrowserService.KEY_MEDIA_ITEM)) {
+ cb.onError(mediaId);
+ return;
+ }
+ Parcelable item = resultData.getParcelable(MediaBrowserService.KEY_MEDIA_ITEM);
+ if (item != null && !(item instanceof MediaItem)) {
+ cb.onError(mediaId);
+ return;
+ }
+ cb.onItemLoaded((MediaItem)item);
+ }
+ };
+ try {
+ mServiceBinder.getMediaItem(mediaId, receiver, mServiceCallbacks);
+ } catch (RemoteException e) {
+ Log.i(TAG, "Remote error getting media item.");
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ cb.onError(mediaId);
+ }
+ });
+ }
+ }
+
+ private void subscribeInternal(String parentId, Bundle options, SubscriptionCallback callback) {
+ // Check arguments.
+ if (TextUtils.isEmpty(parentId)) {
+ throw new IllegalArgumentException("parentId cannot be empty.");
+ }
+ if (callback == null) {
+ throw new IllegalArgumentException("callback cannot be null");
+ }
+ // Update or create the subscription.
+ Subscription sub = mSubscriptions.get(parentId);
+ if (sub == null) {
+ sub = new Subscription();
+ mSubscriptions.put(parentId, sub);
+ }
+ sub.putCallback(mContext, options, callback);
+
+ // If we are connected, tell the service that we are watching. If we aren't connected,
+ // the service will be told when we connect.
+ if (isConnected()) {
+ try {
+ if (options == null) {
+ mServiceBinder.addSubscriptionDeprecated(parentId, mServiceCallbacks);
+ }
+ mServiceBinder.addSubscription(parentId, callback.mToken, options,
+ mServiceCallbacks);
+ } catch (RemoteException ex) {
+ // Process is crashing. We will disconnect, and upon reconnect we will
+ // automatically reregister. So nothing to do here.
+ Log.d(TAG, "addSubscription failed with RemoteException parentId=" + parentId);
+ }
+ }
+ }
+
+ private void unsubscribeInternal(String parentId, SubscriptionCallback callback) {
+ // Check arguments.
+ if (TextUtils.isEmpty(parentId)) {
+ throw new IllegalArgumentException("parentId cannot be empty.");
+ }
+
+ Subscription sub = mSubscriptions.get(parentId);
+ if (sub == null) {
+ return;
+ }
+ // Tell the service if necessary.
+ try {
+ if (callback == null) {
+ if (isConnected()) {
+ mServiceBinder.removeSubscriptionDeprecated(parentId, mServiceCallbacks);
+ mServiceBinder.removeSubscription(parentId, null, mServiceCallbacks);
+ }
+ } else {
+ final List<SubscriptionCallback> callbacks = sub.getCallbacks();
+ final List<Bundle> optionsList = sub.getOptionsList();
+ for (int i = callbacks.size() - 1; i >= 0; --i) {
+ if (callbacks.get(i) == callback) {
+ if (isConnected()) {
+ mServiceBinder.removeSubscription(
+ parentId, callback.mToken, mServiceCallbacks);
+ }
+ callbacks.remove(i);
+ optionsList.remove(i);
+ }
+ }
+ }
+ } catch (RemoteException ex) {
+ // Process is crashing. We will disconnect, and upon reconnect we will
+ // automatically reregister. So nothing to do here.
+ Log.d(TAG, "removeSubscription failed with RemoteException parentId=" + parentId);
+ }
+
+ if (sub.isEmpty() || callback == null) {
+ mSubscriptions.remove(parentId);
+ }
+ }
+
+ /**
+ * For debugging.
+ */
+ private static String getStateLabel(int state) {
+ switch (state) {
+ case CONNECT_STATE_DISCONNECTING:
+ return "CONNECT_STATE_DISCONNECTING";
+ case CONNECT_STATE_DISCONNECTED:
+ return "CONNECT_STATE_DISCONNECTED";
+ case CONNECT_STATE_CONNECTING:
+ return "CONNECT_STATE_CONNECTING";
+ case CONNECT_STATE_CONNECTED:
+ return "CONNECT_STATE_CONNECTED";
+ case CONNECT_STATE_SUSPENDED:
+ return "CONNECT_STATE_SUSPENDED";
+ default:
+ return "UNKNOWN/" + state;
+ }
+ }
+
+ private final void onServiceConnected(final IMediaBrowserServiceCallbacks callback,
+ final String root, final MediaSession.Token session, final Bundle extra) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ // Check to make sure there hasn't been a disconnect or a different
+ // ServiceConnection.
+ if (!isCurrent(callback, "onConnect")) {
+ return;
+ }
+ // Don't allow them to call us twice.
+ if (mState != CONNECT_STATE_CONNECTING) {
+ Log.w(TAG, "onConnect from service while mState="
+ + getStateLabel(mState) + "... ignoring");
+ return;
+ }
+ mRootId = root;
+ mMediaSessionToken = session;
+ mExtras = extra;
+ mState = CONNECT_STATE_CONNECTED;
+
+ if (DBG) {
+ Log.d(TAG, "ServiceCallbacks.onConnect...");
+ dump();
+ }
+ mCallback.onConnected();
+
+ // we may receive some subscriptions before we are connected, so re-subscribe
+ // everything now
+ for (Entry<String, Subscription> subscriptionEntry : mSubscriptions.entrySet()) {
+ String id = subscriptionEntry.getKey();
+ Subscription sub = subscriptionEntry.getValue();
+ List<SubscriptionCallback> callbackList = sub.getCallbacks();
+ List<Bundle> optionsList = sub.getOptionsList();
+ for (int i = 0; i < callbackList.size(); ++i) {
+ try {
+ mServiceBinder.addSubscription(id, callbackList.get(i).mToken,
+ optionsList.get(i), mServiceCallbacks);
+ } catch (RemoteException ex) {
+ // Process is crashing. We will disconnect, and upon reconnect we will
+ // automatically reregister. So nothing to do here.
+ Log.d(TAG, "addSubscription failed with RemoteException parentId="
+ + id);
+ }
+ }
+ }
+ }
+ });
+ }
+
+ private final void onConnectionFailed(final IMediaBrowserServiceCallbacks callback) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ Log.e(TAG, "onConnectFailed for " + mServiceComponent);
+
+ // Check to make sure there hasn't been a disconnect or a different
+ // ServiceConnection.
+ if (!isCurrent(callback, "onConnectFailed")) {
+ return;
+ }
+ // Don't allow them to call us twice.
+ if (mState != CONNECT_STATE_CONNECTING) {
+ Log.w(TAG, "onConnect from service while mState="
+ + getStateLabel(mState) + "... ignoring");
+ return;
+ }
+
+ // Clean up
+ forceCloseConnection();
+
+ // Tell the app.
+ mCallback.onConnectionFailed();
+ }
+ });
+ }
+
+ private final void onLoadChildren(final IMediaBrowserServiceCallbacks callback,
+ final String parentId, final ParceledListSlice list, final Bundle options) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ // Check that there hasn't been a disconnect or a different
+ // ServiceConnection.
+ if (!isCurrent(callback, "onLoadChildren")) {
+ return;
+ }
+
+ if (DBG) {
+ Log.d(TAG, "onLoadChildren for " + mServiceComponent + " id=" + parentId);
+ }
+
+ // Check that the subscription is still subscribed.
+ final Subscription subscription = mSubscriptions.get(parentId);
+ if (subscription != null) {
+ // Tell the app.
+ SubscriptionCallback subscriptionCallback =
+ subscription.getCallback(mContext, options);
+ if (subscriptionCallback != null) {
+ List<MediaItem> data = list == null ? null : list.getList();
+ if (options == null) {
+ if (data == null) {
+ subscriptionCallback.onError(parentId);
+ } else {
+ subscriptionCallback.onChildrenLoaded(parentId, data);
+ }
+ } else {
+ if (data == null) {
+ subscriptionCallback.onError(parentId, options);
+ } else {
+ subscriptionCallback.onChildrenLoaded(parentId, data, options);
+ }
+ }
+ return;
+ }
+ }
+ if (DBG) {
+ Log.d(TAG, "onLoadChildren for id that isn't subscribed id=" + parentId);
+ }
+ }
+ });
+ }
+
+ /**
+ * Return true if {@code callback} is the current ServiceCallbacks. Also logs if it's not.
+ */
+ private boolean isCurrent(IMediaBrowserServiceCallbacks callback, String funcName) {
+ if (mServiceCallbacks != callback || mState == CONNECT_STATE_DISCONNECTING
+ || mState == CONNECT_STATE_DISCONNECTED) {
+ if (mState != CONNECT_STATE_DISCONNECTING && mState != CONNECT_STATE_DISCONNECTED) {
+ Log.i(TAG, funcName + " for " + mServiceComponent + " with mServiceConnection="
+ + mServiceCallbacks + " this=" + this);
+ }
+ return false;
+ }
+ return true;
+ }
+
+ private ServiceCallbacks getNewServiceCallbacks() {
+ return new ServiceCallbacks(this);
+ }
+
+ /**
+ * Log internal state.
+ * @hide
+ */
+ void dump() {
+ Log.d(TAG, "MediaBrowser...");
+ Log.d(TAG, " mServiceComponent=" + mServiceComponent);
+ Log.d(TAG, " mCallback=" + mCallback);
+ Log.d(TAG, " mRootHints=" + mRootHints);
+ Log.d(TAG, " mState=" + getStateLabel(mState));
+ Log.d(TAG, " mServiceConnection=" + mServiceConnection);
+ Log.d(TAG, " mServiceBinder=" + mServiceBinder);
+ Log.d(TAG, " mServiceCallbacks=" + mServiceCallbacks);
+ Log.d(TAG, " mRootId=" + mRootId);
+ Log.d(TAG, " mMediaSessionToken=" + mMediaSessionToken);
+ }
+
+ /**
+ * A class with information on a single media item for use in browsing/searching media.
+ * MediaItems are application dependent so we cannot guarantee that they contain the
+ * right values.
+ */
+ public static class MediaItem implements Parcelable {
+ private final int mFlags;
+ private final MediaDescription mDescription;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(flag=true, value = { FLAG_BROWSABLE, FLAG_PLAYABLE })
+ public @interface Flags { }
+
+ /**
+ * Flag: Indicates that the item has children of its own.
+ */
+ public static final int FLAG_BROWSABLE = 1 << 0;
+
+ /**
+ * Flag: Indicates that the item is playable.
+ * <p>
+ * The id of this item may be passed to
+ * {@link MediaController.TransportControls#playFromMediaId(String, Bundle)}
+ * to start playing it.
+ * </p>
+ */
+ public static final int FLAG_PLAYABLE = 1 << 1;
+
+ /**
+ * Create a new MediaItem for use in browsing media.
+ * @param description The description of the media, which must include a
+ * media id.
+ * @param flags The flags for this item.
+ */
+ public MediaItem(@NonNull MediaDescription description, @Flags int flags) {
+ if (description == null) {
+ throw new IllegalArgumentException("description cannot be null");
+ }
+ if (TextUtils.isEmpty(description.getMediaId())) {
+ throw new IllegalArgumentException("description must have a non-empty media id");
+ }
+ mFlags = flags;
+ mDescription = description;
+ }
+
+ /**
+ * Private constructor.
+ */
+ private MediaItem(Parcel in) {
+ mFlags = in.readInt();
+ mDescription = MediaDescription.CREATOR.createFromParcel(in);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel out, int flags) {
+ out.writeInt(mFlags);
+ mDescription.writeToParcel(out, flags);
+ }
+
+ @Override
+ public String toString() {
+ final StringBuilder sb = new StringBuilder("MediaItem{");
+ sb.append("mFlags=").append(mFlags);
+ sb.append(", mDescription=").append(mDescription);
+ sb.append('}');
+ return sb.toString();
+ }
+
+ public static final Parcelable.Creator<MediaItem> CREATOR =
+ new Parcelable.Creator<MediaItem>() {
+ @Override
+ public MediaItem createFromParcel(Parcel in) {
+ return new MediaItem(in);
+ }
+
+ @Override
+ public MediaItem[] newArray(int size) {
+ return new MediaItem[size];
+ }
+ };
+
+ /**
+ * Gets the flags of the item.
+ */
+ public @Flags int getFlags() {
+ return mFlags;
+ }
+
+ /**
+ * Returns whether this item is browsable.
+ * @see #FLAG_BROWSABLE
+ */
+ public boolean isBrowsable() {
+ return (mFlags & FLAG_BROWSABLE) != 0;
+ }
+
+ /**
+ * Returns whether this item is playable.
+ * @see #FLAG_PLAYABLE
+ */
+ public boolean isPlayable() {
+ return (mFlags & FLAG_PLAYABLE) != 0;
+ }
+
+ /**
+ * Returns the description of the media.
+ */
+ public @NonNull MediaDescription getDescription() {
+ return mDescription;
+ }
+
+ /**
+ * Returns the media id in the {@link MediaDescription} for this item.
+ * @see android.media.MediaMetadata#METADATA_KEY_MEDIA_ID
+ */
+ public @Nullable String getMediaId() {
+ return mDescription.getMediaId();
+ }
+ }
+
+ /**
+ * Callbacks for connection related events.
+ */
+ public static class ConnectionCallback {
+ /**
+ * Invoked after {@link MediaBrowser#connect()} when the request has successfully completed.
+ */
+ public void onConnected() {
+ }
+
+ /**
+ * Invoked when the client is disconnected from the media browser.
+ */
+ public void onConnectionSuspended() {
+ }
+
+ /**
+ * Invoked when the connection to the media browser failed.
+ */
+ public void onConnectionFailed() {
+ }
+ }
+
+ /**
+ * Callbacks for subscription related events.
+ */
+ public static abstract class SubscriptionCallback {
+ Binder mToken;
+
+ public SubscriptionCallback() {
+ mToken = new Binder();
+ }
+
+ /**
+ * Called when the list of children is loaded or updated.
+ *
+ * @param parentId The media id of the parent media item.
+ * @param children The children which were loaded.
+ */
+ public void onChildrenLoaded(@NonNull String parentId, @NonNull List<MediaItem> children) {
+ }
+
+ /**
+ * Called when the list of children is loaded or updated.
+ *
+ * @param parentId The media id of the parent media item.
+ * @param children The children which were loaded.
+ * @param options The bundle of service-specific arguments sent to the media
+ * browser service. The contents of this bundle may affect the
+ * information returned when browsing.
+ */
+ public void onChildrenLoaded(@NonNull String parentId, @NonNull List<MediaItem> children,
+ @NonNull Bundle options) {
+ }
+
+ /**
+ * Called when the id doesn't exist or other errors in subscribing.
+ * <p>
+ * If this is called, the subscription remains until {@link MediaBrowser#unsubscribe}
+ * called, because some errors may heal themselves.
+ * </p>
+ *
+ * @param parentId The media id of the parent media item whose children could
+ * not be loaded.
+ */
+ public void onError(@NonNull String parentId) {
+ }
+
+ /**
+ * Called when the id doesn't exist or other errors in subscribing.
+ * <p>
+ * If this is called, the subscription remains until {@link MediaBrowser#unsubscribe}
+ * called, because some errors may heal themselves.
+ * </p>
+ *
+ * @param parentId The media id of the parent media item whose children could
+ * not be loaded.
+ * @param options The bundle of service-specific arguments sent to the media
+ * browser service.
+ */
+ public void onError(@NonNull String parentId, @NonNull Bundle options) {
+ }
+ }
+
+ /**
+ * Callback for receiving the result of {@link #getItem}.
+ */
+ public static abstract class ItemCallback {
+ /**
+ * Called when the item has been returned by the connected service.
+ *
+ * @param item The item that was returned or null if it doesn't exist.
+ */
+ public void onItemLoaded(MediaItem item) {
+ }
+
+ /**
+ * Called there was an error retrieving it or the connected service doesn't support
+ * {@link #getItem}.
+ *
+ * @param mediaId The media id of the media item which could not be loaded.
+ */
+ public void onError(@NonNull String mediaId) {
+ }
+ }
+
+ /**
+ * ServiceConnection to the other app.
+ */
+ private class MediaServiceConnection implements ServiceConnection {
+ @Override
+ public void onServiceConnected(final ComponentName name, final IBinder binder) {
+ postOrRun(new Runnable() {
+ @Override
+ public void run() {
+ if (DBG) {
+ Log.d(TAG, "MediaServiceConnection.onServiceConnected name=" + name
+ + " binder=" + binder);
+ dump();
+ }
+
+ // Make sure we are still the current connection, and that they haven't called
+ // disconnect().
+ if (!isCurrent("onServiceConnected")) {
+ return;
+ }
+
+ // Save their binder
+ mServiceBinder = IMediaBrowserService.Stub.asInterface(binder);
+
+ // We make a new mServiceCallbacks each time we connect so that we can drop
+ // responses from previous connections.
+ mServiceCallbacks = getNewServiceCallbacks();
+ mState = CONNECT_STATE_CONNECTING;
+
+ // Call connect, which is async. When we get a response from that we will
+ // say that we're connected.
+ try {
+ if (DBG) {
+ Log.d(TAG, "ServiceCallbacks.onConnect...");
+ dump();
+ }
+ mServiceBinder.connect(mContext.getPackageName(), mRootHints,
+ mServiceCallbacks);
+ } catch (RemoteException ex) {
+ // Connect failed, which isn't good. But the auto-reconnect on the service
+ // will take over and we will come back. We will also get the
+ // onServiceDisconnected, which has all the cleanup code. So let that do
+ // it.
+ Log.w(TAG, "RemoteException during connect for " + mServiceComponent);
+ if (DBG) {
+ Log.d(TAG, "ServiceCallbacks.onConnect...");
+ dump();
+ }
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onServiceDisconnected(final ComponentName name) {
+ postOrRun(new Runnable() {
+ @Override
+ public void run() {
+ if (DBG) {
+ Log.d(TAG, "MediaServiceConnection.onServiceDisconnected name=" + name
+ + " this=" + this + " mServiceConnection=" + mServiceConnection);
+ dump();
+ }
+
+ // Make sure we are still the current connection, and that they haven't called
+ // disconnect().
+ if (!isCurrent("onServiceDisconnected")) {
+ return;
+ }
+
+ // Clear out what we set in onServiceConnected
+ mServiceBinder = null;
+ mServiceCallbacks = null;
+
+ // And tell the app that it's suspended.
+ mState = CONNECT_STATE_SUSPENDED;
+ mCallback.onConnectionSuspended();
+ }
+ });
+ }
+
+ private void postOrRun(Runnable r) {
+ if (Thread.currentThread() == mHandler.getLooper().getThread()) {
+ r.run();
+ } else {
+ mHandler.post(r);
+ }
+ }
+
+ /**
+ * Return true if this is the current ServiceConnection. Also logs if it's not.
+ */
+ private boolean isCurrent(String funcName) {
+ if (mServiceConnection != this || mState == CONNECT_STATE_DISCONNECTING
+ || mState == CONNECT_STATE_DISCONNECTED) {
+ if (mState != CONNECT_STATE_DISCONNECTING && mState != CONNECT_STATE_DISCONNECTED) {
+ // Check mState, because otherwise this log is noisy.
+ Log.i(TAG, funcName + " for " + mServiceComponent + " with mServiceConnection="
+ + mServiceConnection + " this=" + this);
+ }
+ return false;
+ }
+ return true;
+ }
+ }
+
+ /**
+ * Callbacks from the service.
+ */
+ private static class ServiceCallbacks extends IMediaBrowserServiceCallbacks.Stub {
+ private WeakReference<MediaBrowser> mMediaBrowser;
+
+ public ServiceCallbacks(MediaBrowser mediaBrowser) {
+ mMediaBrowser = new WeakReference<MediaBrowser>(mediaBrowser);
+ }
+
+ /**
+ * The other side has acknowledged our connection. The parameters to this function
+ * are the initial data as requested.
+ */
+ @Override
+ public void onConnect(String root, MediaSession.Token session,
+ final Bundle extras) {
+ MediaBrowser mediaBrowser = mMediaBrowser.get();
+ if (mediaBrowser != null) {
+ mediaBrowser.onServiceConnected(this, root, session, extras);
+ }
+ }
+
+ /**
+ * The other side does not like us. Tell the app via onConnectionFailed.
+ */
+ @Override
+ public void onConnectFailed() {
+ MediaBrowser mediaBrowser = mMediaBrowser.get();
+ if (mediaBrowser != null) {
+ mediaBrowser.onConnectionFailed(this);
+ }
+ }
+
+ @Override
+ public void onLoadChildren(String parentId, ParceledListSlice list) {
+ onLoadChildrenWithOptions(parentId, list, null);
+ }
+
+ @Override
+ public void onLoadChildrenWithOptions(String parentId, ParceledListSlice list,
+ final Bundle options) {
+ MediaBrowser mediaBrowser = mMediaBrowser.get();
+ if (mediaBrowser != null) {
+ mediaBrowser.onLoadChildren(this, parentId, list, options);
+ }
+ }
+ }
+
+ private static class Subscription {
+ private final List<SubscriptionCallback> mCallbacks;
+ private final List<Bundle> mOptionsList;
+
+ public Subscription() {
+ mCallbacks = new ArrayList<>();
+ mOptionsList = new ArrayList<>();
+ }
+
+ public boolean isEmpty() {
+ return mCallbacks.isEmpty();
+ }
+
+ public List<Bundle> getOptionsList() {
+ return mOptionsList;
+ }
+
+ public List<SubscriptionCallback> getCallbacks() {
+ return mCallbacks;
+ }
+
+ public SubscriptionCallback getCallback(Context context, Bundle options) {
+ if (options != null) {
+ options.setClassLoader(context.getClassLoader());
+ }
+ for (int i = 0; i < mOptionsList.size(); ++i) {
+ if (MediaBrowserUtils.areSameOptions(mOptionsList.get(i), options)) {
+ return mCallbacks.get(i);
+ }
+ }
+ return null;
+ }
+
+ public void putCallback(Context context, Bundle options, SubscriptionCallback callback) {
+ if (options != null) {
+ options.setClassLoader(context.getClassLoader());
+ }
+ for (int i = 0; i < mOptionsList.size(); ++i) {
+ if (MediaBrowserUtils.areSameOptions(mOptionsList.get(i), options)) {
+ mCallbacks.set(i, callback);
+ return;
+ }
+ }
+ mCallbacks.add(callback);
+ mOptionsList.add(options);
+ }
+ }
+}
diff --git a/android/media/browse/MediaBrowserUtils.java b/android/media/browse/MediaBrowserUtils.java
new file mode 100644
index 00000000..2943e60d
--- /dev/null
+++ b/android/media/browse/MediaBrowserUtils.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.browse;
+
+import android.os.Bundle;
+
+/**
+ * @hide
+ */
+public class MediaBrowserUtils {
+ public static boolean areSameOptions(Bundle options1, Bundle options2) {
+ if (options1 == options2) {
+ return true;
+ } else if (options1 == null) {
+ return options2.getInt(MediaBrowser.EXTRA_PAGE, -1) == -1
+ && options2.getInt(MediaBrowser.EXTRA_PAGE_SIZE, -1) == -1;
+ } else if (options2 == null) {
+ return options1.getInt(MediaBrowser.EXTRA_PAGE, -1) == -1
+ && options1.getInt(MediaBrowser.EXTRA_PAGE_SIZE, -1) == -1;
+ } else {
+ return options1.getInt(MediaBrowser.EXTRA_PAGE, -1)
+ == options2.getInt(MediaBrowser.EXTRA_PAGE, -1)
+ && options1.getInt(MediaBrowser.EXTRA_PAGE_SIZE, -1)
+ == options2.getInt(MediaBrowser.EXTRA_PAGE_SIZE, -1);
+ }
+ }
+
+ public static boolean hasDuplicatedItems(Bundle options1, Bundle options2) {
+ int page1 = options1 == null ? -1 : options1.getInt(MediaBrowser.EXTRA_PAGE, -1);
+ int page2 = options2 == null ? -1 : options2.getInt(MediaBrowser.EXTRA_PAGE, -1);
+ int pageSize1 = options1 == null ? -1 : options1.getInt(MediaBrowser.EXTRA_PAGE_SIZE, -1);
+ int pageSize2 = options2 == null ? -1 : options2.getInt(MediaBrowser.EXTRA_PAGE_SIZE, -1);
+
+ int startIndex1, startIndex2, endIndex1, endIndex2;
+ if (page1 == -1 || pageSize1 == -1) {
+ startIndex1 = 0;
+ endIndex1 = Integer.MAX_VALUE;
+ } else {
+ startIndex1 = pageSize1 * page1;
+ endIndex1 = startIndex1 + pageSize1 - 1;
+ }
+
+ if (page2 == -1 || pageSize2 == -1) {
+ startIndex2 = 0;
+ endIndex2 = Integer.MAX_VALUE;
+ } else {
+ startIndex2 = pageSize2 * page2;
+ endIndex2 = startIndex2 + pageSize2 - 1;
+ }
+
+ if (startIndex1 <= startIndex2 && startIndex2 <= endIndex1) {
+ return true;
+ } else if (startIndex1 <= endIndex2 && endIndex2 <= endIndex1) {
+ return true;
+ }
+ return false;
+ }
+}
diff --git a/android/media/effect/Effect.java b/android/media/effect/Effect.java
new file mode 100644
index 00000000..b2b44270
--- /dev/null
+++ b/android/media/effect/Effect.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect;
+
+
+/**
+ * <p>Effects are high-performance transformations that can be applied to image frames. These are
+ * passed in the form of OpenGL ES 2.0 texture names. Typical frames could be images loaded from
+ * disk, or frames from the camera or other video streams.</p>
+ *
+ * <p>To create an Effect you must first create an EffectContext. You can obtain an instance of the
+ * context's EffectFactory by calling
+ * {@link android.media.effect.EffectContext#getFactory() getFactory()}. The EffectFactory allows
+ * you to instantiate specific Effects.</p>
+ *
+ * <p>The application is responsible for creating an EGL context, and making it current before
+ * applying an effect. An effect is bound to a single EffectContext, which in turn is bound to a
+ * single EGL context. If your EGL context is destroyed, the EffectContext becomes invalid and any
+ * effects bound to this context can no longer be used.</p>
+ *
+ */
+public abstract class Effect {
+
+ /**
+ * Get the effect name.
+ *
+ * Returns the unique name of the effect, which matches the name used for instantiating this
+ * effect by the EffectFactory.
+ *
+ * @return The name of the effect.
+ */
+ public abstract String getName();
+
+ /**
+ * Apply an effect to GL textures.
+ *
+ * <p>Apply the Effect on the specified input GL texture, and write the result into the
+ * output GL texture. The texture names passed must be valid in the current GL context.</p>
+ *
+ * <p>The input texture must be a valid texture name with the given width and height and must be
+ * bound to a GL_TEXTURE_2D texture image (usually done by calling the glTexImage2D() function).
+ * Multiple mipmap levels may be provided.</p>
+ *
+ * <p>If the output texture has not been bound to a texture image, it will be automatically
+ * bound by the effect as a GL_TEXTURE_2D. It will contain one mipmap level (0), which will have
+ * the same size as the input. No other mipmap levels are defined. If the output texture was
+ * bound already, and its size does not match the input texture size, the result may be clipped
+ * or only partially fill the texture.</p>
+ *
+ * <p>Note, that regardless of whether a texture image was originally provided or not, both the
+ * input and output textures are owned by the caller. That is, the caller is responsible for
+ * calling glDeleteTextures() to deallocate the input and output textures.</p>
+ *
+ * @param inputTexId The GL texture name of a valid and bound input texture.
+ * @param width The width of the input texture in pixels.
+ * @param height The height of the input texture in pixels.
+ * @param outputTexId The GL texture name of the output texture.
+ */
+ public abstract void apply(int inputTexId, int width, int height, int outputTexId);
+
+ /**
+ * Set a filter parameter.
+ *
+ * Consult the effect documentation for a list of supported parameter keys for each effect.
+ *
+ * @param parameterKey The name of the parameter to adjust.
+ * @param value The new value to set the parameter to.
+ * @throws InvalidArgumentException if parameterName is not a recognized name, or the value is
+ * not a valid value for this parameter.
+ */
+ public abstract void setParameter(String parameterKey, Object value);
+
+ /**
+ * Set an effect listener.
+ *
+ * Some effects may report state changes back to the host, if a listener is set. Consult the
+ * individual effect documentation for more details.
+ *
+ * @param listener The listener to receive update callbacks on.
+ */
+ public void setUpdateListener(EffectUpdateListener listener) {
+ }
+
+ /**
+ * Release an effect.
+ *
+ * <p>Releases the effect and any resources associated with it. You may call this if you need to
+ * make sure acquired resources are no longer held by the effect. Releasing an effect makes it
+ * invalid for reuse.</p>
+ *
+ * <p>Note that this method must be called with the EffectContext and EGL context current, as
+ * the effect may release internal GL resources.</p>
+ */
+ public abstract void release();
+}
+
diff --git a/android/media/effect/EffectContext.java b/android/media/effect/EffectContext.java
new file mode 100644
index 00000000..a11b9c48
--- /dev/null
+++ b/android/media/effect/EffectContext.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect;
+
+import android.filterfw.core.CachedFrameManager;
+import android.filterfw.core.FilterContext;
+import android.filterfw.core.GLEnvironment;
+import android.opengl.GLES20;
+
+/**
+ * <p>An EffectContext keeps all necessary state information to run Effects within a Open GL ES 2.0
+ * context.</p>
+ *
+ * <p>Every EffectContext is bound to one GL context. The application is responsible for creating
+ * this EGL context, and making it current before applying any effect. If your EGL context is
+ * destroyed, the EffectContext becomes invalid and any effects bound to this context can no longer
+ * be used. If you switch to another EGL context, you must create a new EffectContext. Each Effect
+ * is bound to a single EffectContext, and can only be executed in that context.</p>
+ */
+public class EffectContext {
+
+ private final int GL_STATE_FBO = 0;
+ private final int GL_STATE_PROGRAM = 1;
+ private final int GL_STATE_ARRAYBUFFER = 2;
+ private final int GL_STATE_COUNT = 3;
+
+ FilterContext mFilterContext;
+
+ private EffectFactory mFactory;
+
+ private int[] mOldState = new int[GL_STATE_COUNT];
+
+ /**
+ * Creates a context within the current GL context.
+ *
+ * <p>Binds the EffectContext to the current OpenGL context. All subsequent calls to the
+ * EffectContext must be made in the GL context that was active during creation.
+ * When you have finished using a context, you must call {@link #release()}. to dispose of all
+ * resources associated with this context.</p>
+ */
+ public static EffectContext createWithCurrentGlContext() {
+ EffectContext result = new EffectContext();
+ result.initInCurrentGlContext();
+ return result;
+ }
+
+ /**
+ * Returns the EffectFactory for this context.
+ *
+ * <p>The EffectFactory returned from this method allows instantiating new effects within this
+ * context.</p>
+ *
+ * @return The EffectFactory instance for this context.
+ */
+ public EffectFactory getFactory() {
+ return mFactory;
+ }
+
+ /**
+ * Releases the context.
+ *
+ * <p>Releases all the resources and effects associated with the EffectContext. This renders the
+ * context and all the effects bound to this context invalid. You must no longer use the context
+ * or any of its bound effects after calling release().</p>
+ *
+ * <p>Note that this method must be called with the proper EGL context made current, as the
+ * EffectContext and its effects may release internal GL resources.</p>
+ */
+ public void release() {
+ mFilterContext.tearDown();
+ mFilterContext = null;
+ }
+
+ private EffectContext() {
+ mFilterContext = new FilterContext();
+ mFilterContext.setFrameManager(new CachedFrameManager());
+ mFactory = new EffectFactory(this);
+ }
+
+ private void initInCurrentGlContext() {
+ if (!GLEnvironment.isAnyContextActive()) {
+ throw new RuntimeException("Attempting to initialize EffectContext with no active "
+ + "GL context!");
+ }
+ GLEnvironment glEnvironment = new GLEnvironment();
+ glEnvironment.initWithCurrentContext();
+ mFilterContext.initGLEnvironment(glEnvironment);
+ }
+
+ final void assertValidGLState() {
+ GLEnvironment glEnv = mFilterContext.getGLEnvironment();
+ if (glEnv == null || !glEnv.isContextActive()) {
+ if (GLEnvironment.isAnyContextActive()) {
+ throw new RuntimeException("Applying effect in wrong GL context!");
+ } else {
+ throw new RuntimeException("Attempting to apply effect without valid GL context!");
+ }
+ }
+ }
+
+ final void saveGLState() {
+ GLES20.glGetIntegerv(GLES20.GL_FRAMEBUFFER_BINDING, mOldState, GL_STATE_FBO);
+ GLES20.glGetIntegerv(GLES20.GL_CURRENT_PROGRAM, mOldState, GL_STATE_PROGRAM);
+ GLES20.glGetIntegerv(GLES20.GL_ARRAY_BUFFER_BINDING, mOldState, GL_STATE_ARRAYBUFFER);
+ }
+
+ final void restoreGLState() {
+ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, mOldState[GL_STATE_FBO]);
+ GLES20.glUseProgram(mOldState[GL_STATE_PROGRAM]);
+ GLES20.glBindBuffer(GLES20.GL_ARRAY_BUFFER, mOldState[GL_STATE_ARRAYBUFFER]);
+ }
+}
+
diff --git a/android/media/effect/EffectFactory.java b/android/media/effect/EffectFactory.java
new file mode 100644
index 00000000..f6fcba71
--- /dev/null
+++ b/android/media/effect/EffectFactory.java
@@ -0,0 +1,516 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect;
+
+import java.lang.reflect.Constructor;
+
+/**
+ * <p>The EffectFactory class defines the list of available Effects, and provides functionality to
+ * inspect and instantiate them. Some effects may not be available on all platforms, so before
+ * creating a certain effect, the application should confirm that the effect is supported on this
+ * platform by calling {@link #isEffectSupported(String)}.</p>
+ */
+public class EffectFactory {
+
+ private EffectContext mEffectContext;
+
+ private final static String[] EFFECT_PACKAGES = {
+ "android.media.effect.effects.", // Default effect package
+ "" // Allows specifying full class path
+ };
+
+ /** List of Effects */
+ /**
+ * <p>Copies the input texture to the output.</p>
+ * <p>Available parameters: None</p>
+ * @hide
+ */
+ public final static String EFFECT_IDENTITY = "IdentityEffect";
+
+ /**
+ * <p>Adjusts the brightness of the image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>brightness</code></td>
+ * <td>The brightness multiplier.</td>
+ * <td>Positive float. 1.0 means no change;
+ larger values will increase brightness.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_BRIGHTNESS =
+ "android.media.effect.effects.BrightnessEffect";
+
+ /**
+ * <p>Adjusts the contrast of the image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>contrast</code></td>
+ * <td>The contrast multiplier.</td>
+ * <td>Float. 1.0 means no change;
+ larger values will increase contrast.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_CONTRAST =
+ "android.media.effect.effects.ContrastEffect";
+
+ /**
+ * <p>Applies a fisheye lens distortion to the image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>scale</code></td>
+ * <td>The scale of the distortion.</td>
+ * <td>Float, between 0 and 1. Zero means no distortion.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_FISHEYE =
+ "android.media.effect.effects.FisheyeEffect";
+
+ /**
+ * <p>Replaces the background of the input frames with frames from a
+ * selected video. Requires an initial learning period with only the
+ * background visible before the effect becomes active. The effect will wait
+ * until it does not see any motion in the scene before learning the
+ * background and starting the effect.</p>
+ *
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>source</code></td>
+ * <td>A URI for the background video to use. This parameter must be
+ * supplied before calling apply() for the first time.</td>
+ * <td>String, such as from
+ * {@link android.net.Uri#toString Uri.toString()}</td>
+ * </tr>
+ * </table>
+ *
+ * <p>If the update listener is set for this effect using
+ * {@link Effect#setUpdateListener}, it will be called when the effect has
+ * finished learning the background, with a null value for the info
+ * parameter.</p>
+ */
+ public final static String EFFECT_BACKDROPPER =
+ "android.media.effect.effects.BackDropperEffect";
+
+ /**
+ * <p>Attempts to auto-fix the image based on histogram equalization.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>scale</code></td>
+ * <td>The scale of the adjustment.</td>
+ * <td>Float, between 0 and 1. Zero means no adjustment, while 1 indicates the maximum
+ * amount of adjustment.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_AUTOFIX =
+ "android.media.effect.effects.AutoFixEffect";
+
+ /**
+ * <p>Adjusts the range of minimal and maximal color pixel intensities.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>black</code></td>
+ * <td>The value of the minimal pixel.</td>
+ * <td>Float, between 0 and 1.</td>
+ * </tr>
+ * <tr><td><code>white</code></td>
+ * <td>The value of the maximal pixel.</td>
+ * <td>Float, between 0 and 1.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_BLACKWHITE =
+ "android.media.effect.effects.BlackWhiteEffect";
+
+ /**
+ * <p>Crops an upright rectangular area from the image. If the crop region falls outside of
+ * the image bounds, the results are undefined.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>xorigin</code></td>
+ * <td>The origin's x-value.</td>
+ * <td>Integer, between 0 and width of the image.</td>
+ * </tr>
+ * <tr><td><code>yorigin</code></td>
+ * <td>The origin's y-value.</td>
+ * <td>Integer, between 0 and height of the image.</td>
+ * </tr>
+ * <tr><td><code>width</code></td>
+ * <td>The width of the cropped image.</td>
+ * <td>Integer, between 1 and the width of the image minus xorigin.</td>
+ * </tr>
+ * <tr><td><code>height</code></td>
+ * <td>The height of the cropped image.</td>
+ * <td>Integer, between 1 and the height of the image minus yorigin.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_CROP =
+ "android.media.effect.effects.CropEffect";
+
+ /**
+ * <p>Applies a cross process effect on image, in which the red and green channels are
+ * enhanced while the blue channel is restricted.</p>
+ * <p>Available parameters: None</p>
+ */
+ public final static String EFFECT_CROSSPROCESS =
+ "android.media.effect.effects.CrossProcessEffect";
+
+ /**
+ * <p>Applies black and white documentary style effect on image..</p>
+ * <p>Available parameters: None</p>
+ */
+ public final static String EFFECT_DOCUMENTARY =
+ "android.media.effect.effects.DocumentaryEffect";
+
+
+ /**
+ * <p>Overlays a bitmap (with premultiplied alpha channel) onto the input image. The bitmap
+ * is stretched to fit the input image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>bitmap</code></td>
+ * <td>The overlay bitmap.</td>
+ * <td>A non-null Bitmap instance.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_BITMAPOVERLAY =
+ "android.media.effect.effects.BitmapOverlayEffect";
+
+ /**
+ * <p>Representation of photo using only two color tones.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>first_color</code></td>
+ * <td>The first color tone.</td>
+ * <td>Integer, representing an ARGB color with 8 bits per channel. May be created using
+ * {@link android.graphics.Color Color} class.</td>
+ * </tr>
+ * <tr><td><code>second_color</code></td>
+ * <td>The second color tone.</td>
+ * <td>Integer, representing an ARGB color with 8 bits per channel. May be created using
+ * {@link android.graphics.Color Color} class.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_DUOTONE =
+ "android.media.effect.effects.DuotoneEffect";
+
+ /**
+ * <p>Applies back-light filling to the image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>strength</code></td>
+ * <td>The strength of the backlight.</td>
+ * <td>Float, between 0 and 1. Zero means no change.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_FILLLIGHT =
+ "android.media.effect.effects.FillLightEffect";
+
+ /**
+ * <p>Flips image vertically and/or horizontally.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>vertical</code></td>
+ * <td>Whether to flip image vertically.</td>
+ * <td>Boolean</td>
+ * </tr>
+ * <tr><td><code>horizontal</code></td>
+ * <td>Whether to flip image horizontally.</td>
+ * <td>Boolean</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_FLIP =
+ "android.media.effect.effects.FlipEffect";
+
+ /**
+ * <p>Applies film grain effect to image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>strength</code></td>
+ * <td>The strength of the grain effect.</td>
+ * <td>Float, between 0 and 1. Zero means no change.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_GRAIN =
+ "android.media.effect.effects.GrainEffect";
+
+ /**
+ * <p>Converts image to grayscale.</p>
+ * <p>Available parameters: None</p>
+ */
+ public final static String EFFECT_GRAYSCALE =
+ "android.media.effect.effects.GrayscaleEffect";
+
+ /**
+ * <p>Applies lomo-camera style effect to image.</p>
+ * <p>Available parameters: None</p>
+ */
+ public final static String EFFECT_LOMOISH =
+ "android.media.effect.effects.LomoishEffect";
+
+ /**
+ * <p>Inverts the image colors.</p>
+ * <p>Available parameters: None</p>
+ */
+ public final static String EFFECT_NEGATIVE =
+ "android.media.effect.effects.NegativeEffect";
+
+ /**
+ * <p>Applies posterization effect to image.</p>
+ * <p>Available parameters: None</p>
+ */
+ public final static String EFFECT_POSTERIZE =
+ "android.media.effect.effects.PosterizeEffect";
+
+ /**
+ * <p>Removes red eyes on specified region.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>centers</code></td>
+ * <td>Multiple center points (x, y) of the red eye regions.</td>
+ * <td>An array of floats, where (f[2*i], f[2*i+1]) specifies the center of the i'th eye.
+ * Coordinate values are expected to be normalized between 0 and 1.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_REDEYE =
+ "android.media.effect.effects.RedEyeEffect";
+
+ /**
+ * <p>Rotates the image. The output frame size must be able to fit the rotated version of
+ * the input image. Note that the rotation snaps to a the closest multiple of 90 degrees.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>angle</code></td>
+ * <td>The angle of rotation in degrees.</td>
+ * <td>Integer value. This will be rounded to the nearest multiple of 90.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_ROTATE =
+ "android.media.effect.effects.RotateEffect";
+
+ /**
+ * <p>Adjusts color saturation of image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>scale</code></td>
+ * <td>The scale of color saturation.</td>
+ * <td>Float, between -1 and 1. 0 means no change, while -1 indicates full desaturation,
+ * i.e. grayscale.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_SATURATE =
+ "android.media.effect.effects.SaturateEffect";
+
+ /**
+ * <p>Converts image to sepia tone.</p>
+ * <p>Available parameters: None</p>
+ */
+ public final static String EFFECT_SEPIA =
+ "android.media.effect.effects.SepiaEffect";
+
+ /**
+ * <p>Sharpens the image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>scale</code></td>
+ * <td>The degree of sharpening.</td>
+ * <td>Float, between 0 and 1. 0 means no change.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_SHARPEN =
+ "android.media.effect.effects.SharpenEffect";
+
+ /**
+ * <p>Rotates the image according to the specified angle, and crops the image so that no
+ * non-image portions are visible.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>angle</code></td>
+ * <td>The angle of rotation.</td>
+ * <td>Float, between -45 and +45.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_STRAIGHTEN =
+ "android.media.effect.effects.StraightenEffect";
+
+ /**
+ * <p>Adjusts color temperature of the image.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>scale</code></td>
+ * <td>The value of color temperature.</td>
+ * <td>Float, between 0 and 1, with 0 indicating cool, and 1 indicating warm. A value of
+ * of 0.5 indicates no change.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_TEMPERATURE =
+ "android.media.effect.effects.ColorTemperatureEffect";
+
+ /**
+ * <p>Tints the photo with specified color.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>tint</code></td>
+ * <td>The color of the tint.</td>
+ * <td>Integer, representing an ARGB color with 8 bits per channel. May be created using
+ * {@link android.graphics.Color Color} class.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_TINT =
+ "android.media.effect.effects.TintEffect";
+
+ /**
+ * <p>Adds a vignette effect to image, i.e. fades away the outer image edges.</p>
+ * <p>Available parameters:</p>
+ * <table>
+ * <tr><td>Parameter name</td><td>Meaning</td><td>Valid values</td></tr>
+ * <tr><td><code>scale</code></td>
+ * <td>The scale of vignetting.</td>
+ * <td>Float, between 0 and 1. 0 means no change.</td>
+ * </tr>
+ * </table>
+ */
+ public final static String EFFECT_VIGNETTE =
+ "android.media.effect.effects.VignetteEffect";
+
+ EffectFactory(EffectContext effectContext) {
+ mEffectContext = effectContext;
+ }
+
+ /**
+ * Instantiate a new effect with the given effect name.
+ *
+ * <p>The effect's parameters will be set to their default values.</p>
+ *
+ * <p>Note that the EGL context associated with the current EffectContext need not be made
+ * current when creating an effect. This allows the host application to instantiate effects
+ * before any EGL context has become current.</p>
+ *
+ * @param effectName The name of the effect to create.
+ * @return A new Effect instance.
+ * @throws IllegalArgumentException if the effect with the specified name is not supported or
+ * not known.
+ */
+ public Effect createEffect(String effectName) {
+ Class effectClass = getEffectClassByName(effectName);
+ if (effectClass == null) {
+ throw new IllegalArgumentException("Cannot instantiate unknown effect '" +
+ effectName + "'!");
+ }
+ return instantiateEffect(effectClass, effectName);
+ }
+
+ /**
+ * Check if an effect is supported on this platform.
+ *
+ * <p>Some effects may only be available on certain platforms. Use this method before
+ * instantiating an effect to make sure it is supported.</p>
+ *
+ * @param effectName The name of the effect.
+ * @return true, if the effect is supported on this platform.
+ * @throws IllegalArgumentException if the effect name is not known.
+ */
+ public static boolean isEffectSupported(String effectName) {
+ return getEffectClassByName(effectName) != null;
+ }
+
+ private static Class getEffectClassByName(String className) {
+ Class effectClass = null;
+
+ // Get context's classloader; otherwise cannot load non-framework effects
+ ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader();
+
+ // Look for the class in the imported packages
+ for (String packageName : EFFECT_PACKAGES) {
+ try {
+ effectClass = contextClassLoader.loadClass(packageName + className);
+ } catch (ClassNotFoundException e) {
+ continue;
+ }
+ // Exit loop if class was found.
+ if (effectClass != null) {
+ break;
+ }
+ }
+ return effectClass;
+ }
+
+ private Effect instantiateEffect(Class effectClass, String name) {
+ // Make sure this is an Effect subclass
+ try {
+ effectClass.asSubclass(Effect.class);
+ } catch (ClassCastException e) {
+ throw new IllegalArgumentException("Attempting to allocate effect '" + effectClass
+ + "' which is not a subclass of Effect!", e);
+ }
+
+ // Look for the correct constructor
+ Constructor effectConstructor = null;
+ try {
+ effectConstructor = effectClass.getConstructor(EffectContext.class, String.class);
+ } catch (NoSuchMethodException e) {
+ throw new RuntimeException("The effect class '" + effectClass + "' does not have "
+ + "the required constructor.", e);
+ }
+
+ // Construct the effect
+ Effect effect = null;
+ try {
+ effect = (Effect)effectConstructor.newInstance(mEffectContext, name);
+ } catch (Throwable t) {
+ throw new RuntimeException("There was an error constructing the effect '" + effectClass
+ + "'!", t);
+ }
+
+ return effect;
+ }
+}
diff --git a/android/media/effect/EffectUpdateListener.java b/android/media/effect/EffectUpdateListener.java
new file mode 100644
index 00000000..155fe49d
--- /dev/null
+++ b/android/media/effect/EffectUpdateListener.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect;
+
+/**
+ * Some effects may issue callbacks to inform the host of changes to the effect state. This is the
+ * listener interface for receiving those callbacks.
+ */
+public interface EffectUpdateListener {
+
+ /**
+ * Called when the effect state is updated.
+ *
+ * @param effect The effect that has been updated.
+ * @param info A value that gives more information about the update. See the effect's
+ * documentation for more details on what this object is.
+ */
+ public void onEffectUpdated(Effect effect, Object info);
+
+}
+
diff --git a/android/media/effect/FilterEffect.java b/android/media/effect/FilterEffect.java
new file mode 100644
index 00000000..34b35496
--- /dev/null
+++ b/android/media/effect/FilterEffect.java
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect;
+
+import android.filterfw.core.FilterContext;
+import android.filterfw.core.GLFrame;
+import android.filterfw.core.Frame;
+import android.filterfw.core.FrameFormat;
+import android.filterfw.core.FrameManager;
+import android.filterfw.format.ImageFormat;
+
+/**
+ * The FilterEffect class is the base class for all Effects based on Filters from the Mobile
+ * Filter Framework (MFF).
+ * @hide
+ */
+public abstract class FilterEffect extends Effect {
+
+ protected EffectContext mEffectContext;
+ private String mName;
+
+ /**
+ * Protected constructor as FilterEffects should be created by Factory.
+ */
+ protected FilterEffect(EffectContext context, String name) {
+ mEffectContext = context;
+ mName = name;
+ }
+
+ /**
+ * Get the effect name.
+ *
+ * Returns the unique name of the effect, which matches the name used for instantiating this
+ * effect by the EffectFactory.
+ *
+ * @return The name of the effect.
+ */
+ @Override
+ public String getName() {
+ return mName;
+ }
+
+ // Helper Methods for subclasses ///////////////////////////////////////////////////////////////
+ /**
+ * Call this before manipulating the GL context. Will assert that the GL environment is in a
+ * valid state, and save it.
+ */
+ protected void beginGLEffect() {
+ mEffectContext.assertValidGLState();
+ mEffectContext.saveGLState();
+ }
+
+ /**
+ * Call this after manipulating the GL context. Restores the previous GL state.
+ */
+ protected void endGLEffect() {
+ mEffectContext.restoreGLState();
+ }
+
+ /**
+ * Returns the active filter context for this effect.
+ */
+ protected FilterContext getFilterContext() {
+ return mEffectContext.mFilterContext;
+ }
+
+ /**
+ * Converts a texture into a Frame.
+ */
+ protected Frame frameFromTexture(int texId, int width, int height) {
+ FrameManager manager = getFilterContext().getFrameManager();
+ FrameFormat format = ImageFormat.create(width, height,
+ ImageFormat.COLORSPACE_RGBA,
+ FrameFormat.TARGET_GPU);
+ Frame frame = manager.newBoundFrame(format,
+ GLFrame.EXISTING_TEXTURE_BINDING,
+ texId);
+ frame.setTimestamp(Frame.TIMESTAMP_UNKNOWN);
+ return frame;
+ }
+
+}
+
diff --git a/android/media/effect/FilterGraphEffect.java b/android/media/effect/FilterGraphEffect.java
new file mode 100644
index 00000000..80c695bd
--- /dev/null
+++ b/android/media/effect/FilterGraphEffect.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.effect;
+
+import android.filterfw.core.Filter;
+import android.filterfw.core.FilterGraph;
+import android.filterfw.core.GraphRunner;
+import android.filterfw.core.SyncRunner;
+import android.media.effect.FilterEffect;
+import android.media.effect.EffectContext;
+import android.filterfw.io.GraphIOException;
+import android.filterfw.io.GraphReader;
+import android.filterfw.io.TextGraphReader;
+
+/**
+ * Effect subclass for effects based on a single Filter. Subclasses need only invoke the
+ * constructor with the correct arguments to obtain an Effect implementation.
+ *
+ * @hide
+ */
+public class FilterGraphEffect extends FilterEffect {
+
+ private static final String TAG = "FilterGraphEffect";
+
+ protected String mInputName;
+ protected String mOutputName;
+ protected GraphRunner mRunner;
+ protected FilterGraph mGraph;
+ protected Class mSchedulerClass;
+
+ /**
+ * Constructs a new FilterGraphEffect.
+ *
+ * @param name The name of this effect (used to create it in the EffectFactory).
+ * @param graphString The graph string to create the graph.
+ * @param inputName The name of the input GLTextureSource filter.
+ * @param outputName The name of the output GLTextureSource filter.
+ */
+ public FilterGraphEffect(EffectContext context,
+ String name,
+ String graphString,
+ String inputName,
+ String outputName,
+ Class scheduler) {
+ super(context, name);
+
+ mInputName = inputName;
+ mOutputName = outputName;
+ mSchedulerClass = scheduler;
+ createGraph(graphString);
+
+ }
+
+ private void createGraph(String graphString) {
+ GraphReader reader = new TextGraphReader();
+ try {
+ mGraph = reader.readGraphString(graphString);
+ } catch (GraphIOException e) {
+ throw new RuntimeException("Could not setup effect", e);
+ }
+
+ if (mGraph == null) {
+ throw new RuntimeException("Could not setup effect");
+ }
+ mRunner = new SyncRunner(getFilterContext(), mGraph, mSchedulerClass);
+ }
+
+ @Override
+ public void apply(int inputTexId, int width, int height, int outputTexId) {
+ beginGLEffect();
+ Filter src = mGraph.getFilter(mInputName);
+ if (src != null) {
+ src.setInputValue("texId", inputTexId);
+ src.setInputValue("width", width);
+ src.setInputValue("height", height);
+ } else {
+ throw new RuntimeException("Internal error applying effect");
+ }
+ Filter dest = mGraph.getFilter(mOutputName);
+ if (dest != null) {
+ dest.setInputValue("texId", outputTexId);
+ } else {
+ throw new RuntimeException("Internal error applying effect");
+ }
+ try {
+ mRunner.run();
+ } catch (RuntimeException e) {
+ throw new RuntimeException("Internal error applying effect: ", e);
+ }
+ endGLEffect();
+ }
+
+ @Override
+ public void setParameter(String parameterKey, Object value) {
+ }
+
+ @Override
+ public void release() {
+ mGraph.tearDown(getFilterContext());
+ mGraph = null;
+ }
+}
diff --git a/android/media/effect/SingleFilterEffect.java b/android/media/effect/SingleFilterEffect.java
new file mode 100644
index 00000000..47900dfc
--- /dev/null
+++ b/android/media/effect/SingleFilterEffect.java
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect;
+
+import android.filterfw.core.Filter;
+import android.filterfw.core.FilterFactory;
+import android.filterfw.core.FilterFunction;
+import android.filterfw.core.Frame;
+import android.media.effect.EffectContext;
+
+/**
+ * Effect subclass for effects based on a single Filter. Subclasses need only invoke the
+ * constructor with the correct arguments to obtain an Effect implementation.
+ *
+ * @hide
+ */
+public class SingleFilterEffect extends FilterEffect {
+
+ protected FilterFunction mFunction;
+ protected String mInputName;
+ protected String mOutputName;
+
+ /**
+ * Constructs a new FilterFunctionEffect.
+ *
+ * @param name The name of this effect (used to create it in the EffectFactory).
+ * @param filterClass The class of the filter to wrap.
+ * @param inputName The name of the input image port.
+ * @param outputName The name of the output image port.
+ * @param finalParameters Key-value pairs of final input port assignments.
+ */
+ public SingleFilterEffect(EffectContext context,
+ String name,
+ Class filterClass,
+ String inputName,
+ String outputName,
+ Object... finalParameters) {
+ super(context, name);
+
+ mInputName = inputName;
+ mOutputName = outputName;
+
+ String filterName = filterClass.getSimpleName();
+ FilterFactory factory = FilterFactory.sharedFactory();
+ Filter filter = factory.createFilterByClass(filterClass, filterName);
+ filter.initWithAssignmentList(finalParameters);
+
+ mFunction = new FilterFunction(getFilterContext(), filter);
+ }
+
+ @Override
+ public void apply(int inputTexId, int width, int height, int outputTexId) {
+ beginGLEffect();
+
+ Frame inputFrame = frameFromTexture(inputTexId, width, height);
+ Frame outputFrame = frameFromTexture(outputTexId, width, height);
+
+ Frame resultFrame = mFunction.executeWithArgList(mInputName, inputFrame);
+
+ outputFrame.setDataFromFrame(resultFrame);
+
+ inputFrame.release();
+ outputFrame.release();
+ resultFrame.release();
+
+ endGLEffect();
+ }
+
+ @Override
+ public void setParameter(String parameterKey, Object value) {
+ mFunction.setInputValue(parameterKey, value);
+ }
+
+ @Override
+ public void release() {
+ mFunction.tearDown();
+ mFunction = null;
+ }
+}
+
diff --git a/android/media/effect/SizeChangeEffect.java b/android/media/effect/SizeChangeEffect.java
new file mode 100644
index 00000000..1bf7d400
--- /dev/null
+++ b/android/media/effect/SizeChangeEffect.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.effect;
+
+import android.filterfw.core.Frame;
+import android.media.effect.EffectContext;
+
+/**
+ * Effect subclass for effects based on a single Filter with output size differnet
+ * from input. Subclasses need only invoke the constructor with the correct arguments
+ * to obtain an Effect implementation.
+ *
+ * @hide
+ */
+public class SizeChangeEffect extends SingleFilterEffect {
+
+ public SizeChangeEffect(EffectContext context,
+ String name,
+ Class filterClass,
+ String inputName,
+ String outputName,
+ Object... finalParameters) {
+ super(context, name, filterClass, inputName, outputName, finalParameters);
+ }
+
+ @Override
+ public void apply(int inputTexId, int width, int height, int outputTexId) {
+ beginGLEffect();
+
+ Frame inputFrame = frameFromTexture(inputTexId, width, height);
+ Frame resultFrame = mFunction.executeWithArgList(mInputName, inputFrame);
+
+ int outputWidth = resultFrame.getFormat().getWidth();
+ int outputHeight = resultFrame.getFormat().getHeight();
+
+ Frame outputFrame = frameFromTexture(outputTexId, outputWidth, outputHeight);
+ outputFrame.setDataFromFrame(resultFrame);
+
+ inputFrame.release();
+ outputFrame.release();
+ resultFrame.release();
+
+ endGLEffect();
+ }
+}
diff --git a/android/media/effect/effects/AutoFixEffect.java b/android/media/effect/effects/AutoFixEffect.java
new file mode 100644
index 00000000..44a141b6
--- /dev/null
+++ b/android/media/effect/effects/AutoFixEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.AutoFixFilter;
+
+/**
+ * @hide
+ */
+public class AutoFixEffect extends SingleFilterEffect {
+ public AutoFixEffect(EffectContext context, String name) {
+ super(context, name, AutoFixFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/BackDropperEffect.java b/android/media/effect/effects/BackDropperEffect.java
new file mode 100644
index 00000000..f977e600
--- /dev/null
+++ b/android/media/effect/effects/BackDropperEffect.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.effect.effects;
+
+import android.filterfw.core.Filter;
+import android.filterfw.core.OneShotScheduler;
+import android.media.effect.EffectContext;
+import android.media.effect.FilterGraphEffect;
+import android.media.effect.EffectUpdateListener;
+
+import android.filterpacks.videoproc.BackDropperFilter;
+import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
+
+/**
+ * Background replacement Effect.
+ *
+ * Replaces the background of the input video stream with a selected video
+ * Learns the background when it first starts up;
+ * needs unobstructed view of background when this happens.
+ *
+ * Effect parameters:
+ * source: A URI for the background video
+ * Listener: Called when learning period is complete
+ *
+ * @hide
+ */
+public class BackDropperEffect extends FilterGraphEffect {
+ private static final String mGraphDefinition =
+ "@import android.filterpacks.base;\n" +
+ "@import android.filterpacks.videoproc;\n" +
+ "@import android.filterpacks.videosrc;\n" +
+ "\n" +
+ "@filter GLTextureSource foreground {\n" +
+ " texId = 0;\n" + // Will be set by base class
+ " width = 0;\n" +
+ " height = 0;\n" +
+ " repeatFrame = true;\n" +
+ "}\n" +
+ "\n" +
+ "@filter MediaSource background {\n" +
+ " sourceUrl = \"no_file_specified\";\n" +
+ " waitForNewFrame = false;\n" +
+ " sourceIsUrl = true;\n" +
+ "}\n" +
+ "\n" +
+ "@filter BackDropperFilter replacer {\n" +
+ " autowbToggle = 1;\n" +
+ "}\n" +
+ "\n" +
+ "@filter GLTextureTarget output {\n" +
+ " texId = 0;\n" +
+ "}\n" +
+ "\n" +
+ "@connect foreground[frame] => replacer[video];\n" +
+ "@connect background[video] => replacer[background];\n" +
+ "@connect replacer[video] => output[frame];\n";
+
+ private EffectUpdateListener mEffectListener = null;
+
+ private LearningDoneListener mLearningListener = new LearningDoneListener() {
+ public void onLearningDone(BackDropperFilter filter) {
+ if (mEffectListener != null) {
+ mEffectListener.onEffectUpdated(BackDropperEffect.this, null);
+ }
+ }
+ };
+
+ public BackDropperEffect(EffectContext context, String name) {
+ super(context, name, mGraphDefinition, "foreground", "output", OneShotScheduler.class);
+
+ Filter replacer = mGraph.getFilter("replacer");
+ replacer.setInputValue("learningDoneListener", mLearningListener);
+ }
+
+ @Override
+ public void setParameter(String parameterKey, Object value) {
+ if (parameterKey.equals("source")) {
+ Filter background = mGraph.getFilter("background");
+ background.setInputValue("sourceUrl", value);
+ } else if (parameterKey.equals("context")) {
+ Filter background = mGraph.getFilter("background");
+ background.setInputValue("context", value);
+ }
+ }
+
+ @Override
+ public void setUpdateListener(EffectUpdateListener listener) {
+ mEffectListener = listener;
+ }
+
+} \ No newline at end of file
diff --git a/android/media/effect/effects/BitmapOverlayEffect.java b/android/media/effect/effects/BitmapOverlayEffect.java
new file mode 100644
index 00000000..43f461c8
--- /dev/null
+++ b/android/media/effect/effects/BitmapOverlayEffect.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.BitmapOverlayFilter;
+
+/**
+ * @hide
+ */
+public class BitmapOverlayEffect extends SingleFilterEffect {
+ public BitmapOverlayEffect(EffectContext context, String name) {
+ super(context, name, BitmapOverlayFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/BlackWhiteEffect.java b/android/media/effect/effects/BlackWhiteEffect.java
new file mode 100644
index 00000000..771afff8
--- /dev/null
+++ b/android/media/effect/effects/BlackWhiteEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.BlackWhiteFilter;
+
+/**
+ * @hide
+ */
+public class BlackWhiteEffect extends SingleFilterEffect {
+ public BlackWhiteEffect(EffectContext context, String name) {
+ super(context, name, BlackWhiteFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/BrightnessEffect.java b/android/media/effect/effects/BrightnessEffect.java
new file mode 100644
index 00000000..774e72f7
--- /dev/null
+++ b/android/media/effect/effects/BrightnessEffect.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.BrightnessFilter;
+
+/**
+ * @hide
+ */
+public class BrightnessEffect extends SingleFilterEffect {
+ public BrightnessEffect(EffectContext context, String name) {
+ super(context, name, BrightnessFilter.class, "image", "image");
+ }
+}
+
diff --git a/android/media/effect/effects/ColorTemperatureEffect.java b/android/media/effect/effects/ColorTemperatureEffect.java
new file mode 100644
index 00000000..62d98ced
--- /dev/null
+++ b/android/media/effect/effects/ColorTemperatureEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.ColorTemperatureFilter;
+
+/**
+ * @hide
+ */
+public class ColorTemperatureEffect extends SingleFilterEffect {
+ public ColorTemperatureEffect(EffectContext context, String name) {
+ super(context, name, ColorTemperatureFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/ContrastEffect.java b/android/media/effect/effects/ContrastEffect.java
new file mode 100644
index 00000000..d5bfc21f
--- /dev/null
+++ b/android/media/effect/effects/ContrastEffect.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.ContrastFilter;
+
+/**
+ * @hide
+ */
+public class ContrastEffect extends SingleFilterEffect {
+ public ContrastEffect(EffectContext context, String name) {
+ super(context, name, ContrastFilter.class, "image", "image");
+ }
+}
+
diff --git a/android/media/effect/effects/CropEffect.java b/android/media/effect/effects/CropEffect.java
new file mode 100644
index 00000000..7e1c495a
--- /dev/null
+++ b/android/media/effect/effects/CropEffect.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SizeChangeEffect;
+import android.filterpacks.imageproc.CropRectFilter;
+
+/**
+ * @hide
+ */
+//public class CropEffect extends SingleFilterEffect {
+public class CropEffect extends SizeChangeEffect {
+ public CropEffect(EffectContext context, String name) {
+ super(context, name, CropRectFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/CrossProcessEffect.java b/android/media/effect/effects/CrossProcessEffect.java
new file mode 100644
index 00000000..d7a7df58
--- /dev/null
+++ b/android/media/effect/effects/CrossProcessEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.CrossProcessFilter;
+
+/**
+ * @hide
+ */
+public class CrossProcessEffect extends SingleFilterEffect {
+ public CrossProcessEffect(EffectContext context, String name) {
+ super(context, name, CrossProcessFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/DocumentaryEffect.java b/android/media/effect/effects/DocumentaryEffect.java
new file mode 100644
index 00000000..1a5ea351
--- /dev/null
+++ b/android/media/effect/effects/DocumentaryEffect.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.DocumentaryFilter;
+
+/**
+ * @hide
+ */
+public class DocumentaryEffect extends SingleFilterEffect {
+ public DocumentaryEffect(EffectContext context, String name) {
+ super(context, name, DocumentaryFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/DuotoneEffect.java b/android/media/effect/effects/DuotoneEffect.java
new file mode 100644
index 00000000..1391b1f2
--- /dev/null
+++ b/android/media/effect/effects/DuotoneEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.DuotoneFilter;
+
+/**
+ * @hide
+ */
+public class DuotoneEffect extends SingleFilterEffect {
+ public DuotoneEffect(EffectContext context, String name) {
+ super(context, name, DuotoneFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/FillLightEffect.java b/android/media/effect/effects/FillLightEffect.java
new file mode 100644
index 00000000..5260de34
--- /dev/null
+++ b/android/media/effect/effects/FillLightEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.FillLightFilter;
+
+/**
+ * @hide
+ */
+public class FillLightEffect extends SingleFilterEffect {
+ public FillLightEffect(EffectContext context, String name) {
+ super(context, name, FillLightFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/FisheyeEffect.java b/android/media/effect/effects/FisheyeEffect.java
new file mode 100644
index 00000000..6abfe420
--- /dev/null
+++ b/android/media/effect/effects/FisheyeEffect.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.FisheyeFilter;
+
+/**
+ * @hide
+ */
+public class FisheyeEffect extends SingleFilterEffect {
+ public FisheyeEffect(EffectContext context, String name) {
+ super(context, name, FisheyeFilter.class, "image", "image");
+ }
+}
+
diff --git a/android/media/effect/effects/FlipEffect.java b/android/media/effect/effects/FlipEffect.java
new file mode 100644
index 00000000..0f5c4212
--- /dev/null
+++ b/android/media/effect/effects/FlipEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.FlipFilter;
+
+/**
+ * @hide
+ */
+public class FlipEffect extends SingleFilterEffect {
+ public FlipEffect(EffectContext context, String name) {
+ super(context, name, FlipFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/GrainEffect.java b/android/media/effect/effects/GrainEffect.java
new file mode 100644
index 00000000..2fda7e90
--- /dev/null
+++ b/android/media/effect/effects/GrainEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.GrainFilter;
+
+/**
+ * @hide
+ */
+public class GrainEffect extends SingleFilterEffect {
+ public GrainEffect(EffectContext context, String name) {
+ super(context, name, GrainFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/GrayscaleEffect.java b/android/media/effect/effects/GrayscaleEffect.java
new file mode 100644
index 00000000..26ca081f
--- /dev/null
+++ b/android/media/effect/effects/GrayscaleEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.ToGrayFilter;
+
+/**
+ * @hide
+ */
+public class GrayscaleEffect extends SingleFilterEffect {
+ public GrayscaleEffect(EffectContext context, String name) {
+ super(context, name, ToGrayFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/IdentityEffect.java b/android/media/effect/effects/IdentityEffect.java
new file mode 100644
index 00000000..d07779ee
--- /dev/null
+++ b/android/media/effect/effects/IdentityEffect.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.filterfw.core.Frame;
+import android.media.effect.EffectContext;
+import android.media.effect.FilterEffect;
+
+/**
+ * @hide
+ */
+public class IdentityEffect extends FilterEffect {
+
+ public IdentityEffect(EffectContext context, String name) {
+ super(context, name);
+ }
+
+ @Override
+ public void apply(int inputTexId, int width, int height, int outputTexId) {
+ beginGLEffect();
+
+ Frame inputFrame = frameFromTexture(inputTexId, width, height);
+ Frame outputFrame = frameFromTexture(outputTexId, width, height);
+
+ outputFrame.setDataFromFrame(inputFrame);
+
+ inputFrame.release();
+ outputFrame.release();
+
+ endGLEffect();
+ }
+
+ @Override
+ public void setParameter(String parameterKey, Object value) {
+ throw new IllegalArgumentException("Unknown parameter " + parameterKey
+ + " for IdentityEffect!");
+ }
+
+ @Override
+ public void release() {
+ }
+}
+
diff --git a/android/media/effect/effects/LomoishEffect.java b/android/media/effect/effects/LomoishEffect.java
new file mode 100644
index 00000000..776e53c5
--- /dev/null
+++ b/android/media/effect/effects/LomoishEffect.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.LomoishFilter;
+
+/**
+ * @hide
+ */
+public class LomoishEffect extends SingleFilterEffect {
+ public LomoishEffect(EffectContext context, String name) {
+ super(context, name, LomoishFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/NegativeEffect.java b/android/media/effect/effects/NegativeEffect.java
new file mode 100644
index 00000000..29fc94a1
--- /dev/null
+++ b/android/media/effect/effects/NegativeEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.NegativeFilter;
+
+/**
+ * @hide
+ */
+public class NegativeEffect extends SingleFilterEffect {
+ public NegativeEffect(EffectContext context, String name) {
+ super(context, name, NegativeFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/PosterizeEffect.java b/android/media/effect/effects/PosterizeEffect.java
new file mode 100644
index 00000000..20a8a37b
--- /dev/null
+++ b/android/media/effect/effects/PosterizeEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.PosterizeFilter;
+
+/**
+ * @hide
+ */
+public class PosterizeEffect extends SingleFilterEffect {
+ public PosterizeEffect(EffectContext context, String name) {
+ super(context, name, PosterizeFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/RedEyeEffect.java b/android/media/effect/effects/RedEyeEffect.java
new file mode 100644
index 00000000..8ed9909c
--- /dev/null
+++ b/android/media/effect/effects/RedEyeEffect.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.RedEyeFilter;
+
+/**
+ * @hide
+ */
+public class RedEyeEffect extends SingleFilterEffect {
+ public RedEyeEffect(EffectContext context, String name) {
+ super(context, name, RedEyeFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/RotateEffect.java b/android/media/effect/effects/RotateEffect.java
new file mode 100644
index 00000000..23400152
--- /dev/null
+++ b/android/media/effect/effects/RotateEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SizeChangeEffect;
+import android.filterpacks.imageproc.RotateFilter;
+
+/**
+ * @hide
+ */
+public class RotateEffect extends SizeChangeEffect {
+ public RotateEffect(EffectContext context, String name) {
+ super(context, name, RotateFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/SaturateEffect.java b/android/media/effect/effects/SaturateEffect.java
new file mode 100644
index 00000000..fe9250a7
--- /dev/null
+++ b/android/media/effect/effects/SaturateEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.SaturateFilter;
+
+/**
+ * @hide
+ */
+public class SaturateEffect extends SingleFilterEffect {
+ public SaturateEffect(EffectContext context, String name) {
+ super(context, name, SaturateFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/SepiaEffect.java b/android/media/effect/effects/SepiaEffect.java
new file mode 100644
index 00000000..de85b2d4
--- /dev/null
+++ b/android/media/effect/effects/SepiaEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.SepiaFilter;
+
+/**
+ * @hide
+ */
+public class SepiaEffect extends SingleFilterEffect {
+ public SepiaEffect(EffectContext context, String name) {
+ super(context, name, SepiaFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/SharpenEffect.java b/android/media/effect/effects/SharpenEffect.java
new file mode 100644
index 00000000..46776ebf
--- /dev/null
+++ b/android/media/effect/effects/SharpenEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.SharpenFilter;
+
+/**
+ * @hide
+ */
+public class SharpenEffect extends SingleFilterEffect {
+ public SharpenEffect(EffectContext context, String name) {
+ super(context, name, SharpenFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/StraightenEffect.java b/android/media/effect/effects/StraightenEffect.java
new file mode 100644
index 00000000..49253a00
--- /dev/null
+++ b/android/media/effect/effects/StraightenEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.StraightenFilter;
+
+/**
+ * @hide
+ */
+public class StraightenEffect extends SingleFilterEffect {
+ public StraightenEffect(EffectContext context, String name) {
+ super(context, name, StraightenFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/TintEffect.java b/android/media/effect/effects/TintEffect.java
new file mode 100644
index 00000000..6de9ea8f
--- /dev/null
+++ b/android/media/effect/effects/TintEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.TintFilter;
+
+/**
+ * @hide
+ */
+public class TintEffect extends SingleFilterEffect {
+ public TintEffect(EffectContext context, String name) {
+ super(context, name, TintFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/effect/effects/VignetteEffect.java b/android/media/effect/effects/VignetteEffect.java
new file mode 100644
index 00000000..b143d775
--- /dev/null
+++ b/android/media/effect/effects/VignetteEffect.java
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.media.effect.effects;
+
+import android.media.effect.EffectContext;
+import android.media.effect.SingleFilterEffect;
+import android.filterpacks.imageproc.VignetteFilter;
+
+/**
+ * @hide
+ */
+public class VignetteEffect extends SingleFilterEffect {
+ public VignetteEffect(EffectContext context, String name) {
+ super(context, name, VignetteFilter.class, "image", "image");
+ }
+}
diff --git a/android/media/midi/MidiDevice.java b/android/media/midi/MidiDevice.java
new file mode 100644
index 00000000..a9957369
--- /dev/null
+++ b/android/media/midi/MidiDevice.java
@@ -0,0 +1,308 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.Process;
+import android.os.RemoteException;
+import android.util.Log;
+
+import dalvik.system.CloseGuard;
+
+import libcore.io.IoUtils;
+
+import java.io.Closeable;
+import java.io.FileDescriptor;
+import java.io.IOException;
+
+import java.util.HashSet;
+
+/**
+ * This class is used for sending and receiving data to and from a MIDI device
+ * Instances of this class are created by {@link MidiManager#openDevice}.
+ */
+public final class MidiDevice implements Closeable {
+ static {
+ System.loadLibrary("media_jni");
+ }
+
+ private static final String TAG = "MidiDevice";
+
+ private final MidiDeviceInfo mDeviceInfo;
+ private final IMidiDeviceServer mDeviceServer;
+ private final IMidiManager mMidiManager;
+ private final IBinder mClientToken;
+ private final IBinder mDeviceToken;
+ private boolean mIsDeviceClosed;
+
+ // Native API Helpers
+ /**
+ * Keep a static list of MidiDevice objects that are mirrorToNative()'d so they
+ * don't get inadvertantly garbage collected.
+ */
+ private static HashSet<MidiDevice> mMirroredDevices = new HashSet<MidiDevice>();
+
+ /**
+ * If this device is mirrorToNatived(), this is the native device handler.
+ */
+ private long mNativeHandle;
+
+ private final CloseGuard mGuard = CloseGuard.get();
+
+ /**
+ * This class represents a connection between the output port of one device
+ * and the input port of another. Created by {@link #connectPorts}.
+ * Close this object to terminate the connection.
+ */
+ public class MidiConnection implements Closeable {
+ private final IMidiDeviceServer mInputPortDeviceServer;
+ private final IBinder mInputPortToken;
+ private final IBinder mOutputPortToken;
+ private final CloseGuard mGuard = CloseGuard.get();
+ private boolean mIsClosed;
+
+ MidiConnection(IBinder outputPortToken, MidiInputPort inputPort) {
+ mInputPortDeviceServer = inputPort.getDeviceServer();
+ mInputPortToken = inputPort.getToken();
+ mOutputPortToken = outputPortToken;
+ mGuard.open("close");
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ if (mIsClosed) return;
+ mGuard.close();
+ try {
+ // close input port
+ mInputPortDeviceServer.closePort(mInputPortToken);
+ // close output port
+ mDeviceServer.closePort(mOutputPortToken);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MidiConnection.close");
+ }
+ mIsClosed = true;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mGuard != null) {
+ mGuard.warnIfOpen();
+ }
+
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+ }
+
+ /* package */ MidiDevice(MidiDeviceInfo deviceInfo, IMidiDeviceServer server,
+ IMidiManager midiManager, IBinder clientToken, IBinder deviceToken) {
+ mDeviceInfo = deviceInfo;
+ mDeviceServer = server;
+ mMidiManager = midiManager;
+ mClientToken = clientToken;
+ mDeviceToken = deviceToken;
+ mGuard.open("close");
+ }
+
+ /**
+ * Returns a {@link MidiDeviceInfo} object, which describes this device.
+ *
+ * @return the {@link MidiDeviceInfo} object
+ */
+ public MidiDeviceInfo getInfo() {
+ return mDeviceInfo;
+ }
+
+ /**
+ * Called to open a {@link MidiInputPort} for the specified port number.
+ *
+ * An input port can only be used by one sender at a time.
+ * Opening an input port will fail if another application has already opened it for use.
+ * A {@link MidiDeviceStatus} can be used to determine if an input port is already open.
+ *
+ * @param portNumber the number of the input port to open
+ * @return the {@link MidiInputPort} if the open is successful,
+ * or null in case of failure.
+ */
+ public MidiInputPort openInputPort(int portNumber) {
+ if (mIsDeviceClosed) {
+ return null;
+ }
+ try {
+ IBinder token = new Binder();
+ FileDescriptor fd = mDeviceServer.openInputPort(token, portNumber);
+ if (fd == null) {
+ return null;
+ }
+ return new MidiInputPort(mDeviceServer, token, fd, portNumber);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in openInputPort");
+ return null;
+ }
+ }
+
+ /**
+ * Called to open a {@link MidiOutputPort} for the specified port number.
+ *
+ * An output port may be opened by multiple applications.
+ *
+ * @param portNumber the number of the output port to open
+ * @return the {@link MidiOutputPort} if the open is successful,
+ * or null in case of failure.
+ */
+ public MidiOutputPort openOutputPort(int portNumber) {
+ if (mIsDeviceClosed) {
+ return null;
+ }
+ try {
+ IBinder token = new Binder();
+ FileDescriptor fd = mDeviceServer.openOutputPort(token, portNumber);
+ if (fd == null) {
+ return null;
+ }
+ return new MidiOutputPort(mDeviceServer, token, fd, portNumber);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in openOutputPort");
+ return null;
+ }
+ }
+
+ /**
+ * Connects the supplied {@link MidiInputPort} to the output port of this device
+ * with the specified port number. Once the connection is made, the MidiInput port instance
+ * can no longer receive data via its {@link MidiReceiver#onSend} method.
+ * This method returns a {@link MidiDevice.MidiConnection} object, which can be used
+ * to close the connection.
+ *
+ * @param inputPort the inputPort to connect
+ * @param outputPortNumber the port number of the output port to connect inputPort to.
+ * @return {@link MidiDevice.MidiConnection} object if the connection is successful,
+ * or null in case of failure.
+ */
+ public MidiConnection connectPorts(MidiInputPort inputPort, int outputPortNumber) {
+ if (outputPortNumber < 0 || outputPortNumber >= mDeviceInfo.getOutputPortCount()) {
+ throw new IllegalArgumentException("outputPortNumber out of range");
+ }
+ if (mIsDeviceClosed) {
+ return null;
+ }
+
+ FileDescriptor fd = inputPort.claimFileDescriptor();
+ if (fd == null) {
+ return null;
+ }
+ try {
+ IBinder token = new Binder();
+ int calleePid = mDeviceServer.connectPorts(token, fd, outputPortNumber);
+ // If the service is a different Process then it will duplicate the fd
+ // and we can safely close this one.
+ // But if the service is in the same Process then closing the fd will
+ // kill the connection. So don't do that.
+ if (calleePid != Process.myPid()) {
+ // close our copy of the file descriptor
+ IoUtils.closeQuietly(fd);
+ }
+
+ return new MidiConnection(token, inputPort);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in connectPorts");
+ return null;
+ }
+ }
+
+ /**
+ * Makes Midi Device available to the Native API
+ * @hide
+ */
+ public long mirrorToNative() throws IOException {
+ if (mIsDeviceClosed || mNativeHandle != 0) {
+ return 0;
+ }
+
+ mNativeHandle = native_mirrorToNative(mDeviceServer.asBinder(), mDeviceInfo.getId());
+ if (mNativeHandle == 0) {
+ throw new IOException("Failed mirroring to native");
+ }
+
+ synchronized (mMirroredDevices) {
+ mMirroredDevices.add(this);
+ }
+ return mNativeHandle;
+ }
+
+ /**
+ * Makes Midi Device no longer available to the Native API
+ * @hide
+ */
+ public void removeFromNative() {
+ if (mNativeHandle == 0) {
+ return;
+ }
+
+ synchronized (mGuard) {
+ native_removeFromNative(mNativeHandle);
+ mNativeHandle = 0;
+ }
+
+ synchronized (mMirroredDevices) {
+ mMirroredDevices.remove(this);
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ if (!mIsDeviceClosed) {
+ removeFromNative();
+ mGuard.close();
+ mIsDeviceClosed = true;
+ try {
+ mMidiManager.closeDevice(mClientToken, mDeviceToken);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in closeDevice");
+ }
+ }
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mGuard != null) {
+ mGuard.warnIfOpen();
+ }
+
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ @Override
+ public String toString() {
+ return ("MidiDevice: " + mDeviceInfo.toString());
+ }
+
+ private native long native_mirrorToNative(IBinder deviceServerBinder, int id);
+ private native void native_removeFromNative(long deviceHandle);
+}
diff --git a/android/media/midi/MidiDeviceInfo.java b/android/media/midi/MidiDeviceInfo.java
new file mode 100644
index 00000000..5fd9006d
--- /dev/null
+++ b/android/media/midi/MidiDeviceInfo.java
@@ -0,0 +1,390 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+import android.util.Log;
+
+/**
+ * This class contains information to describe a MIDI device.
+ * For now we only have information that can be retrieved easily for USB devices,
+ * but we will probably expand this in the future.
+ *
+ * This class is just an immutable object to encapsulate the MIDI device description.
+ * Use the MidiDevice class to actually communicate with devices.
+ */
+public final class MidiDeviceInfo implements Parcelable {
+
+ private static final String TAG = "MidiDeviceInfo";
+
+ /*
+ * Please note that constants and (un)marshalling code need to be kept in sync
+ * with the native implementation (MidiDeviceInfo.h|cpp)
+ */
+
+ /**
+ * Constant representing USB MIDI devices for {@link #getType}
+ */
+ public static final int TYPE_USB = 1;
+
+ /**
+ * Constant representing virtual (software based) MIDI devices for {@link #getType}
+ */
+ public static final int TYPE_VIRTUAL = 2;
+
+ /**
+ * Constant representing Bluetooth MIDI devices for {@link #getType}
+ */
+ public static final int TYPE_BLUETOOTH = 3;
+
+ /**
+ * Bundle key for the device's user visible name property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}.
+ * For USB devices, this is a concatenation of the manufacturer and product names.
+ */
+ public static final String PROPERTY_NAME = "name";
+
+ /**
+ * Bundle key for the device's manufacturer name property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}.
+ * Matches the USB device manufacturer name string for USB MIDI devices.
+ */
+ public static final String PROPERTY_MANUFACTURER = "manufacturer";
+
+ /**
+ * Bundle key for the device's product name property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ * Matches the USB device product name string for USB MIDI devices.
+ */
+ public static final String PROPERTY_PRODUCT = "product";
+
+ /**
+ * Bundle key for the device's version property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ * Matches the USB device version number for USB MIDI devices.
+ */
+ public static final String PROPERTY_VERSION = "version";
+
+ /**
+ * Bundle key for the device's serial number property.
+ * The value for this property is of type {@link java.lang.String}.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ * Matches the USB device serial number for USB MIDI devices.
+ */
+ public static final String PROPERTY_SERIAL_NUMBER = "serial_number";
+
+ /**
+ * Bundle key for the device's corresponding USB device.
+ * The value for this property is of type {@link android.hardware.usb.UsbDevice}.
+ * Only set for USB MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ */
+ public static final String PROPERTY_USB_DEVICE = "usb_device";
+
+ /**
+ * Bundle key for the device's corresponding Bluetooth device.
+ * The value for this property is of type {@link android.bluetooth.BluetoothDevice}.
+ * Only set for Bluetooth MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ */
+ public static final String PROPERTY_BLUETOOTH_DEVICE = "bluetooth_device";
+
+ /**
+ * Bundle key for the device's ALSA card number.
+ * The value for this property is an integer.
+ * Only set for USB MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ *
+ * @hide
+ */
+ public static final String PROPERTY_ALSA_CARD = "alsa_card";
+
+ /**
+ * Bundle key for the device's ALSA device number.
+ * The value for this property is an integer.
+ * Only set for USB MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ *
+ * @hide
+ */
+ public static final String PROPERTY_ALSA_DEVICE = "alsa_device";
+
+ /**
+ * ServiceInfo for the service hosting the device implementation.
+ * The value for this property is of type {@link android.content.pm.ServiceInfo}.
+ * Only set for Virtual MIDI devices.
+ * Used with the {@link android.os.Bundle} returned by {@link #getProperties}
+ *
+ * @hide
+ */
+ public static final String PROPERTY_SERVICE_INFO = "service_info";
+
+ /**
+ * Contains information about an input or output port.
+ */
+ public static final class PortInfo {
+ /**
+ * Port type for input ports
+ */
+ public static final int TYPE_INPUT = 1;
+
+ /**
+ * Port type for output ports
+ */
+ public static final int TYPE_OUTPUT = 2;
+
+ private final int mPortType;
+ private final int mPortNumber;
+ private final String mName;
+
+ PortInfo(int type, int portNumber, String name) {
+ mPortType = type;
+ mPortNumber = portNumber;
+ mName = (name == null ? "" : name);
+ }
+
+ /**
+ * Returns the port type of the port (either {@link #TYPE_INPUT} or {@link #TYPE_OUTPUT})
+ * @return the port type
+ */
+ public int getType() {
+ return mPortType;
+ }
+
+ /**
+ * Returns the port number of the port
+ * @return the port number
+ */
+ public int getPortNumber() {
+ return mPortNumber;
+ }
+
+ /**
+ * Returns the name of the port, or empty string if the port has no name
+ * @return the port name
+ */
+ public String getName() {
+ return mName;
+ }
+ }
+
+ private final int mType; // USB or virtual
+ private final int mId; // unique ID generated by MidiService
+ private final int mInputPortCount;
+ private final int mOutputPortCount;
+ private final String[] mInputPortNames;
+ private final String[] mOutputPortNames;
+ private final Bundle mProperties;
+ private final boolean mIsPrivate;
+
+ /**
+ * MidiDeviceInfo should only be instantiated by MidiService implementation
+ * @hide
+ */
+ public MidiDeviceInfo(int type, int id, int numInputPorts, int numOutputPorts,
+ String[] inputPortNames, String[] outputPortNames, Bundle properties,
+ boolean isPrivate) {
+ mType = type;
+ mId = id;
+ mInputPortCount = numInputPorts;
+ mOutputPortCount = numOutputPorts;
+ if (inputPortNames == null) {
+ mInputPortNames = new String[numInputPorts];
+ } else {
+ mInputPortNames = inputPortNames;
+ }
+ if (outputPortNames == null) {
+ mOutputPortNames = new String[numOutputPorts];
+ } else {
+ mOutputPortNames = outputPortNames;
+ }
+ mProperties = properties;
+ mIsPrivate = isPrivate;
+ }
+
+ /**
+ * Returns the type of the device.
+ *
+ * @return the device's type
+ */
+ public int getType() {
+ return mType;
+ }
+
+ /**
+ * Returns the ID of the device.
+ * This ID is generated by the MIDI service and is not persistent across device unplugs.
+ *
+ * @return the device's ID
+ */
+ public int getId() {
+ return mId;
+ }
+
+ /**
+ * Returns the device's number of input ports.
+ *
+ * @return the number of input ports
+ */
+ public int getInputPortCount() {
+ return mInputPortCount;
+ }
+
+ /**
+ * Returns the device's number of output ports.
+ *
+ * @return the number of output ports
+ */
+ public int getOutputPortCount() {
+ return mOutputPortCount;
+ }
+
+ /**
+ * Returns information about the device's ports.
+ * The ports are in unspecified order.
+ *
+ * @return array of {@link PortInfo}
+ */
+ public PortInfo[] getPorts() {
+ PortInfo[] ports = new PortInfo[mInputPortCount + mOutputPortCount];
+
+ int index = 0;
+ for (int i = 0; i < mInputPortCount; i++) {
+ ports[index++] = new PortInfo(PortInfo.TYPE_INPUT, i, mInputPortNames[i]);
+ }
+ for (int i = 0; i < mOutputPortCount; i++) {
+ ports[index++] = new PortInfo(PortInfo.TYPE_OUTPUT, i, mOutputPortNames[i]);
+ }
+
+ return ports;
+ }
+
+ /**
+ * Returns the {@link android.os.Bundle} containing the device's properties.
+ *
+ * @return the device's properties
+ */
+ public Bundle getProperties() {
+ return mProperties;
+ }
+
+ /**
+ * Returns true if the device is private. Private devices are only visible and accessible
+ * to clients with the same UID as the application that is hosting the device.
+ *
+ * @return true if the device is private
+ */
+ public boolean isPrivate() {
+ return mIsPrivate;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof MidiDeviceInfo) {
+ return (((MidiDeviceInfo)o).mId == mId);
+ } else {
+ return false;
+ }
+ }
+
+ @Override
+ public int hashCode() {
+ return mId;
+ }
+
+ @Override
+ public String toString() {
+ // This is a hack to force the mProperties Bundle to unparcel so we can
+ // print all the names and values.
+ mProperties.getString(PROPERTY_NAME);
+ return ("MidiDeviceInfo[mType=" + mType +
+ ",mInputPortCount=" + mInputPortCount +
+ ",mOutputPortCount=" + mOutputPortCount +
+ ",mProperties=" + mProperties +
+ ",mIsPrivate=" + mIsPrivate);
+ }
+
+ public static final Parcelable.Creator<MidiDeviceInfo> CREATOR =
+ new Parcelable.Creator<MidiDeviceInfo>() {
+ public MidiDeviceInfo createFromParcel(Parcel in) {
+ // Needs to be kept in sync with code in MidiDeviceInfo.cpp
+ int type = in.readInt();
+ int id = in.readInt();
+ int inputPortCount = in.readInt();
+ int outputPortCount = in.readInt();
+ String[] inputPortNames = in.createStringArray();
+ String[] outputPortNames = in.createStringArray();
+ boolean isPrivate = (in.readInt() == 1);
+ Bundle basicPropertiesIgnored = in.readBundle();
+ Bundle properties = in.readBundle();
+ return new MidiDeviceInfo(type, id, inputPortCount, outputPortCount,
+ inputPortNames, outputPortNames, properties, isPrivate);
+ }
+
+ public MidiDeviceInfo[] newArray(int size) {
+ return new MidiDeviceInfo[size];
+ }
+ };
+
+ public int describeContents() {
+ return 0;
+ }
+
+ private Bundle getBasicProperties(String[] keys) {
+ Bundle basicProperties = new Bundle();
+ for (String key : keys) {
+ Object val = mProperties.get(key);
+ if (val != null) {
+ if (val instanceof String) {
+ basicProperties.putString(key, (String) val);
+ } else if (val instanceof Integer) {
+ basicProperties.putInt(key, (Integer) val);
+ } else {
+ Log.w(TAG, "Unsupported property type: " + val.getClass().getName());
+ }
+ }
+ }
+ return basicProperties;
+ }
+
+ public void writeToParcel(Parcel parcel, int flags) {
+ // Needs to be kept in sync with code in MidiDeviceInfo.cpp
+ parcel.writeInt(mType);
+ parcel.writeInt(mId);
+ parcel.writeInt(mInputPortCount);
+ parcel.writeInt(mOutputPortCount);
+ parcel.writeStringArray(mInputPortNames);
+ parcel.writeStringArray(mOutputPortNames);
+ parcel.writeInt(mIsPrivate ? 1 : 0);
+ // "Basic" properties only contain properties of primitive types
+ // and thus can be read back by native code. "Extra" properties is
+ // a superset that contains all properties.
+ parcel.writeBundle(getBasicProperties(new String[] {
+ PROPERTY_NAME, PROPERTY_MANUFACTURER, PROPERTY_PRODUCT, PROPERTY_VERSION,
+ PROPERTY_SERIAL_NUMBER, PROPERTY_ALSA_CARD, PROPERTY_ALSA_DEVICE
+ }));
+ // Must be serialized last so native code can safely ignore it.
+ parcel.writeBundle(mProperties);
+ }
+}
diff --git a/android/media/midi/MidiDeviceServer.java b/android/media/midi/MidiDeviceServer.java
new file mode 100644
index 00000000..51d55206
--- /dev/null
+++ b/android/media/midi/MidiDeviceServer.java
@@ -0,0 +1,452 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.Process;
+import android.os.RemoteException;
+import android.system.ErrnoException;
+import android.system.Os;
+import android.system.OsConstants;
+import android.util.Log;
+
+import com.android.internal.midi.MidiDispatcher;
+
+import dalvik.system.CloseGuard;
+
+import libcore.io.IoUtils;
+
+import java.io.Closeable;
+import java.io.FileDescriptor;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+
+/**
+ * Internal class used for providing an implementation for a MIDI device.
+ *
+ * @hide
+ */
+public final class MidiDeviceServer implements Closeable {
+ private static final String TAG = "MidiDeviceServer";
+
+ private final IMidiManager mMidiManager;
+
+ // MidiDeviceInfo for the device implemented by this server
+ private MidiDeviceInfo mDeviceInfo;
+ private final int mInputPortCount;
+ private final int mOutputPortCount;
+
+ // MidiReceivers for receiving data on our input ports
+ private final MidiReceiver[] mInputPortReceivers;
+
+ // MidiDispatchers for sending data on our output ports
+ private MidiDispatcher[] mOutputPortDispatchers;
+
+ // MidiOutputPorts for clients connected to our input ports
+ private final MidiOutputPort[] mInputPortOutputPorts;
+
+ // List of all MidiInputPorts we created
+ private final CopyOnWriteArrayList<MidiInputPort> mInputPorts
+ = new CopyOnWriteArrayList<MidiInputPort>();
+
+
+ // for reporting device status
+ private final boolean[] mInputPortOpen;
+ private final int[] mOutputPortOpenCount;
+
+ private final CloseGuard mGuard = CloseGuard.get();
+ private boolean mIsClosed;
+
+ private final Callback mCallback;
+
+ private final HashMap<IBinder, PortClient> mPortClients = new HashMap<IBinder, PortClient>();
+ private final HashMap<MidiInputPort, PortClient> mInputPortClients =
+ new HashMap<MidiInputPort, PortClient>();
+
+ public interface Callback {
+ /**
+ * Called to notify when an our device status has changed
+ * @param server the {@link MidiDeviceServer} that changed
+ * @param status the {@link MidiDeviceStatus} for the device
+ */
+ public void onDeviceStatusChanged(MidiDeviceServer server, MidiDeviceStatus status);
+
+ /**
+ * Called to notify when the device is closed
+ */
+ public void onClose();
+ }
+
+ abstract private class PortClient implements IBinder.DeathRecipient {
+ final IBinder mToken;
+
+ PortClient(IBinder token) {
+ mToken = token;
+
+ try {
+ token.linkToDeath(this, 0);
+ } catch (RemoteException e) {
+ close();
+ }
+ }
+
+ abstract void close();
+
+ MidiInputPort getInputPort() {
+ return null;
+ }
+
+ @Override
+ public void binderDied() {
+ close();
+ }
+ }
+
+ private class InputPortClient extends PortClient {
+ private final MidiOutputPort mOutputPort;
+
+ InputPortClient(IBinder token, MidiOutputPort outputPort) {
+ super(token);
+ mOutputPort = outputPort;
+ }
+
+ @Override
+ void close() {
+ mToken.unlinkToDeath(this, 0);
+ synchronized (mInputPortOutputPorts) {
+ int portNumber = mOutputPort.getPortNumber();
+ mInputPortOutputPorts[portNumber] = null;
+ mInputPortOpen[portNumber] = false;
+ updateDeviceStatus();
+ }
+ IoUtils.closeQuietly(mOutputPort);
+ }
+ }
+
+ private class OutputPortClient extends PortClient {
+ private final MidiInputPort mInputPort;
+
+ OutputPortClient(IBinder token, MidiInputPort inputPort) {
+ super(token);
+ mInputPort = inputPort;
+ }
+
+ @Override
+ void close() {
+ mToken.unlinkToDeath(this, 0);
+ int portNumber = mInputPort.getPortNumber();
+ MidiDispatcher dispatcher = mOutputPortDispatchers[portNumber];
+ synchronized (dispatcher) {
+ dispatcher.getSender().disconnect(mInputPort);
+ int openCount = dispatcher.getReceiverCount();
+ mOutputPortOpenCount[portNumber] = openCount;
+ updateDeviceStatus();
+ }
+
+ mInputPorts.remove(mInputPort);
+ IoUtils.closeQuietly(mInputPort);
+ }
+
+ @Override
+ MidiInputPort getInputPort() {
+ return mInputPort;
+ }
+ }
+
+ private static FileDescriptor[] createSeqPacketSocketPair() throws IOException {
+ try {
+ final FileDescriptor fd0 = new FileDescriptor();
+ final FileDescriptor fd1 = new FileDescriptor();
+ Os.socketpair(OsConstants.AF_UNIX, OsConstants.SOCK_SEQPACKET, 0, fd0, fd1);
+ return new FileDescriptor[] { fd0, fd1 };
+ } catch (ErrnoException e) {
+ throw e.rethrowAsIOException();
+ }
+ }
+
+ // Binder interface stub for receiving connection requests from clients
+ private final IMidiDeviceServer mServer = new IMidiDeviceServer.Stub() {
+
+ @Override
+ public FileDescriptor openInputPort(IBinder token, int portNumber) {
+ if (mDeviceInfo.isPrivate()) {
+ if (Binder.getCallingUid() != Process.myUid()) {
+ throw new SecurityException("Can't access private device from different UID");
+ }
+ }
+
+ if (portNumber < 0 || portNumber >= mInputPortCount) {
+ Log.e(TAG, "portNumber out of range in openInputPort: " + portNumber);
+ return null;
+ }
+
+ synchronized (mInputPortOutputPorts) {
+ if (mInputPortOutputPorts[portNumber] != null) {
+ Log.d(TAG, "port " + portNumber + " already open");
+ return null;
+ }
+
+ try {
+ FileDescriptor[] pair = createSeqPacketSocketPair();
+ MidiOutputPort outputPort = new MidiOutputPort(pair[0], portNumber);
+ mInputPortOutputPorts[portNumber] = outputPort;
+ outputPort.connect(mInputPortReceivers[portNumber]);
+ InputPortClient client = new InputPortClient(token, outputPort);
+ synchronized (mPortClients) {
+ mPortClients.put(token, client);
+ }
+ mInputPortOpen[portNumber] = true;
+ updateDeviceStatus();
+ return pair[1];
+ } catch (IOException e) {
+ Log.e(TAG, "unable to create FileDescriptors in openInputPort");
+ return null;
+ }
+ }
+ }
+
+ @Override
+ public FileDescriptor openOutputPort(IBinder token, int portNumber) {
+ if (mDeviceInfo.isPrivate()) {
+ if (Binder.getCallingUid() != Process.myUid()) {
+ throw new SecurityException("Can't access private device from different UID");
+ }
+ }
+
+ if (portNumber < 0 || portNumber >= mOutputPortCount) {
+ Log.e(TAG, "portNumber out of range in openOutputPort: " + portNumber);
+ return null;
+ }
+
+ try {
+ FileDescriptor[] pair = createSeqPacketSocketPair();
+ MidiInputPort inputPort = new MidiInputPort(pair[0], portNumber);
+ // Undo the default blocking-mode of the server-side socket for
+ // physical devices to avoid stalling the Java device handler if
+ // client app code gets stuck inside 'onSend' handler.
+ if (mDeviceInfo.getType() != MidiDeviceInfo.TYPE_VIRTUAL) {
+ IoUtils.setBlocking(pair[0], false);
+ }
+ MidiDispatcher dispatcher = mOutputPortDispatchers[portNumber];
+ synchronized (dispatcher) {
+ dispatcher.getSender().connect(inputPort);
+ int openCount = dispatcher.getReceiverCount();
+ mOutputPortOpenCount[portNumber] = openCount;
+ updateDeviceStatus();
+ }
+
+ mInputPorts.add(inputPort);
+ OutputPortClient client = new OutputPortClient(token, inputPort);
+ synchronized (mPortClients) {
+ mPortClients.put(token, client);
+ }
+ synchronized (mInputPortClients) {
+ mInputPortClients.put(inputPort, client);
+ }
+ return pair[1];
+ } catch (IOException e) {
+ Log.e(TAG, "unable to create FileDescriptors in openOutputPort");
+ return null;
+ }
+ }
+
+ @Override
+ public void closePort(IBinder token) {
+ MidiInputPort inputPort = null;
+ synchronized (mPortClients) {
+ PortClient client = mPortClients.remove(token);
+ if (client != null) {
+ inputPort = client.getInputPort();
+ client.close();
+ }
+ }
+ if (inputPort != null) {
+ synchronized (mInputPortClients) {
+ mInputPortClients.remove(inputPort);
+ }
+ }
+ }
+
+ @Override
+ public void closeDevice() {
+ if (mCallback != null) {
+ mCallback.onClose();
+ }
+ IoUtils.closeQuietly(MidiDeviceServer.this);
+ }
+
+ @Override
+ public int connectPorts(IBinder token, FileDescriptor fd,
+ int outputPortNumber) {
+ MidiInputPort inputPort = new MidiInputPort(fd, outputPortNumber);
+ MidiDispatcher dispatcher = mOutputPortDispatchers[outputPortNumber];
+ synchronized (dispatcher) {
+ dispatcher.getSender().connect(inputPort);
+ int openCount = dispatcher.getReceiverCount();
+ mOutputPortOpenCount[outputPortNumber] = openCount;
+ updateDeviceStatus();
+ }
+
+ mInputPorts.add(inputPort);
+ OutputPortClient client = new OutputPortClient(token, inputPort);
+ synchronized (mPortClients) {
+ mPortClients.put(token, client);
+ }
+ synchronized (mInputPortClients) {
+ mInputPortClients.put(inputPort, client);
+ }
+ return Process.myPid(); // for caller to detect same process ID
+ }
+
+ @Override
+ public MidiDeviceInfo getDeviceInfo() {
+ return mDeviceInfo;
+ }
+
+ @Override
+ public void setDeviceInfo(MidiDeviceInfo deviceInfo) {
+ if (Binder.getCallingUid() != Process.SYSTEM_UID) {
+ throw new SecurityException("setDeviceInfo should only be called by MidiService");
+ }
+ if (mDeviceInfo != null) {
+ throw new IllegalStateException("setDeviceInfo should only be called once");
+ }
+ mDeviceInfo = deviceInfo;
+ }
+ };
+
+ // Constructor for MidiManager.createDeviceServer()
+ /* package */ MidiDeviceServer(IMidiManager midiManager, MidiReceiver[] inputPortReceivers,
+ int numOutputPorts, Callback callback) {
+ mMidiManager = midiManager;
+ mInputPortReceivers = inputPortReceivers;
+ mInputPortCount = inputPortReceivers.length;
+ mOutputPortCount = numOutputPorts;
+ mCallback = callback;
+
+ mInputPortOutputPorts = new MidiOutputPort[mInputPortCount];
+
+ mOutputPortDispatchers = new MidiDispatcher[numOutputPorts];
+ for (int i = 0; i < numOutputPorts; i++) {
+ mOutputPortDispatchers[i] = new MidiDispatcher(mInputPortFailureHandler);
+ }
+
+ mInputPortOpen = new boolean[mInputPortCount];
+ mOutputPortOpenCount = new int[numOutputPorts];
+
+ mGuard.open("close");
+ }
+
+ private final MidiDispatcher.MidiReceiverFailureHandler mInputPortFailureHandler =
+ new MidiDispatcher.MidiReceiverFailureHandler() {
+ public void onReceiverFailure(MidiReceiver receiver, IOException failure) {
+ Log.e(TAG, "MidiInputPort failed to send data", failure);
+ PortClient client = null;
+ synchronized (mInputPortClients) {
+ client = mInputPortClients.remove(receiver);
+ }
+ if (client != null) {
+ client.close();
+ }
+ }
+ };
+
+ // Constructor for MidiDeviceService.onCreate()
+ /* package */ MidiDeviceServer(IMidiManager midiManager, MidiReceiver[] inputPortReceivers,
+ MidiDeviceInfo deviceInfo, Callback callback) {
+ this(midiManager, inputPortReceivers, deviceInfo.getOutputPortCount(), callback);
+ mDeviceInfo = deviceInfo;
+ }
+
+ /* package */ IMidiDeviceServer getBinderInterface() {
+ return mServer;
+ }
+
+ public IBinder asBinder() {
+ return mServer.asBinder();
+ }
+
+ private void updateDeviceStatus() {
+ // clear calling identity, since we may be in a Binder call from one of our clients
+ long identityToken = Binder.clearCallingIdentity();
+
+ MidiDeviceStatus status = new MidiDeviceStatus(mDeviceInfo, mInputPortOpen,
+ mOutputPortOpenCount);
+ if (mCallback != null) {
+ mCallback.onDeviceStatusChanged(this, status);
+ }
+ try {
+ mMidiManager.setDeviceStatus(mServer, status);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in updateDeviceStatus");
+ } finally {
+ Binder.restoreCallingIdentity(identityToken);
+ }
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ if (mIsClosed) return;
+ mGuard.close();
+
+ for (int i = 0; i < mInputPortCount; i++) {
+ MidiOutputPort outputPort = mInputPortOutputPorts[i];
+ if (outputPort != null) {
+ IoUtils.closeQuietly(outputPort);
+ mInputPortOutputPorts[i] = null;
+ }
+ }
+ for (MidiInputPort inputPort : mInputPorts) {
+ IoUtils.closeQuietly(inputPort);
+ }
+ mInputPorts.clear();
+ try {
+ mMidiManager.unregisterDeviceServer(mServer);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in unregisterDeviceServer");
+ }
+ mIsClosed = true;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mGuard != null) {
+ mGuard.warnIfOpen();
+ }
+
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Returns an array of {@link MidiReceiver} for the device's output ports.
+ * Clients can use these receivers to send data out the device's output ports.
+ * @return array of MidiReceivers
+ */
+ public MidiReceiver[] getOutputPortReceivers() {
+ MidiReceiver[] receivers = new MidiReceiver[mOutputPortCount];
+ System.arraycopy(mOutputPortDispatchers, 0, receivers, 0, mOutputPortCount);
+ return receivers;
+ }
+}
diff --git a/android/media/midi/MidiDeviceService.java b/android/media/midi/MidiDeviceService.java
new file mode 100644
index 00000000..388d95bb
--- /dev/null
+++ b/android/media/midi/MidiDeviceService.java
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.app.Service;
+import android.content.Context;
+import android.content.Intent;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.Log;
+
+/**
+ * A service that implements a virtual MIDI device.
+ * Subclasses must implement the {@link #onGetInputPortReceivers} method to provide a
+ * list of {@link MidiReceiver}s to receive data sent to the device's input ports.
+ * Similarly, subclasses can call {@link #getOutputPortReceivers} to fetch a list
+ * of {@link MidiReceiver}s for sending data out the output ports.
+ *
+ * <p>To extend this class, you must declare the service in your manifest file with
+ * an intent filter with the {@link #SERVICE_INTERFACE} action
+ * and meta-data to describe the virtual device.
+ For example:</p>
+ * <pre>
+ * &lt;service android:name=".VirtualDeviceService"
+ * android:label="&#64;string/service_name">
+ * &lt;intent-filter>
+ * &lt;action android:name="android.media.midi.MidiDeviceService" />
+ * &lt;/intent-filter>
+ * &lt;meta-data android:name="android.media.midi.MidiDeviceService"
+ android:resource="@xml/device_info" />
+ * &lt;/service></pre>
+ */
+abstract public class MidiDeviceService extends Service {
+ private static final String TAG = "MidiDeviceService";
+
+ public static final String SERVICE_INTERFACE = "android.media.midi.MidiDeviceService";
+
+ private IMidiManager mMidiManager;
+ private MidiDeviceServer mServer;
+ private MidiDeviceInfo mDeviceInfo;
+
+ private final MidiDeviceServer.Callback mCallback = new MidiDeviceServer.Callback() {
+ @Override
+ public void onDeviceStatusChanged(MidiDeviceServer server, MidiDeviceStatus status) {
+ MidiDeviceService.this.onDeviceStatusChanged(status);
+ }
+
+ @Override
+ public void onClose() {
+ MidiDeviceService.this.onClose();
+ }
+ };
+
+ @Override
+ public void onCreate() {
+ mMidiManager = IMidiManager.Stub.asInterface(
+ ServiceManager.getService(Context.MIDI_SERVICE));
+ MidiDeviceServer server;
+ try {
+ MidiDeviceInfo deviceInfo = mMidiManager.getServiceDeviceInfo(getPackageName(),
+ this.getClass().getName());
+ if (deviceInfo == null) {
+ Log.e(TAG, "Could not find MidiDeviceInfo for MidiDeviceService " + this);
+ return;
+ }
+ mDeviceInfo = deviceInfo;
+ MidiReceiver[] inputPortReceivers = onGetInputPortReceivers();
+ if (inputPortReceivers == null) {
+ inputPortReceivers = new MidiReceiver[0];
+ }
+ server = new MidiDeviceServer(mMidiManager, inputPortReceivers, deviceInfo, mCallback);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in IMidiManager.getServiceDeviceInfo");
+ server = null;
+ }
+ mServer = server;
+ }
+
+ /**
+ * Returns an array of {@link MidiReceiver} for the device's input ports.
+ * Subclasses must override this to provide the receivers which will receive
+ * data sent to the device's input ports. An empty array should be returned if
+ * the device has no input ports.
+ * @return array of MidiReceivers
+ */
+ abstract public MidiReceiver[] onGetInputPortReceivers();
+
+ /**
+ * Returns an array of {@link MidiReceiver} for the device's output ports.
+ * These can be used to send data out the device's output ports.
+ * @return array of MidiReceivers
+ */
+ public final MidiReceiver[] getOutputPortReceivers() {
+ if (mServer == null) {
+ return null;
+ } else {
+ return mServer.getOutputPortReceivers();
+ }
+ }
+
+ /**
+ * returns the {@link MidiDeviceInfo} instance for this service
+ * @return our MidiDeviceInfo
+ */
+ public final MidiDeviceInfo getDeviceInfo() {
+ return mDeviceInfo;
+ }
+
+ /**
+ * Called to notify when an our {@link MidiDeviceStatus} has changed
+ * @param status the number of the port that was opened
+ */
+ public void onDeviceStatusChanged(MidiDeviceStatus status) {
+ }
+
+ /**
+ * Called to notify when our device has been closed by all its clients
+ */
+ public void onClose() {
+ }
+
+ @Override
+ public IBinder onBind(Intent intent) {
+ if (SERVICE_INTERFACE.equals(intent.getAction()) && mServer != null) {
+ return mServer.getBinderInterface().asBinder();
+ } else {
+ return null;
+ }
+ }
+}
diff --git a/android/media/midi/MidiDeviceStatus.java b/android/media/midi/MidiDeviceStatus.java
new file mode 100644
index 00000000..acb54de0
--- /dev/null
+++ b/android/media/midi/MidiDeviceStatus.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * This is an immutable class that describes the current status of a MIDI device's ports.
+ */
+public final class MidiDeviceStatus implements Parcelable {
+
+ private static final String TAG = "MidiDeviceStatus";
+
+ private final MidiDeviceInfo mDeviceInfo;
+ // true if input ports are open
+ private final boolean mInputPortOpen[];
+ // open counts for output ports
+ private final int mOutputPortOpenCount[];
+
+ /**
+ * @hide
+ */
+ public MidiDeviceStatus(MidiDeviceInfo deviceInfo, boolean inputPortOpen[],
+ int outputPortOpenCount[]) {
+ // MidiDeviceInfo is immutable so we can share references
+ mDeviceInfo = deviceInfo;
+
+ // make copies of the arrays
+ mInputPortOpen = new boolean[inputPortOpen.length];
+ System.arraycopy(inputPortOpen, 0, mInputPortOpen, 0, inputPortOpen.length);
+ mOutputPortOpenCount = new int[outputPortOpenCount.length];
+ System.arraycopy(outputPortOpenCount, 0, mOutputPortOpenCount, 0,
+ outputPortOpenCount.length);
+ }
+
+ /**
+ * Creates a MidiDeviceStatus with zero for all port open counts
+ * @hide
+ */
+ public MidiDeviceStatus(MidiDeviceInfo deviceInfo) {
+ mDeviceInfo = deviceInfo;
+ mInputPortOpen = new boolean[deviceInfo.getInputPortCount()];
+ mOutputPortOpenCount = new int[deviceInfo.getOutputPortCount()];
+ }
+
+ /**
+ * Returns the {@link MidiDeviceInfo} of the device.
+ *
+ * @return the device info
+ */
+ public MidiDeviceInfo getDeviceInfo() {
+ return mDeviceInfo;
+ }
+
+ /**
+ * Returns true if an input port is open.
+ * An input port can only be opened by one client at a time.
+ *
+ * @param portNumber the input port's port number
+ * @return input port open status
+ */
+ public boolean isInputPortOpen(int portNumber) {
+ return mInputPortOpen[portNumber];
+ }
+
+ /**
+ * Returns the number of clients currently connected to the specified output port.
+ * Unlike input ports, an output port can be opened by multiple clients at the same time.
+ *
+ * @param portNumber the output port's port number
+ * @return output port open count
+ */
+ public int getOutputPortOpenCount(int portNumber) {
+ return mOutputPortOpenCount[portNumber];
+ }
+
+ @Override
+ public String toString() {
+ int inputPortCount = mDeviceInfo.getInputPortCount();
+ int outputPortCount = mDeviceInfo.getOutputPortCount();
+ StringBuilder builder = new StringBuilder("mInputPortOpen=[");
+ for (int i = 0; i < inputPortCount; i++) {
+ builder.append(mInputPortOpen[i]);
+ if (i < inputPortCount -1) {
+ builder.append(",");
+ }
+ }
+ builder.append("] mOutputPortOpenCount=[");
+ for (int i = 0; i < outputPortCount; i++) {
+ builder.append(mOutputPortOpenCount[i]);
+ if (i < outputPortCount -1) {
+ builder.append(",");
+ }
+ }
+ builder.append("]");
+ return builder.toString();
+ }
+
+ public static final Parcelable.Creator<MidiDeviceStatus> CREATOR =
+ new Parcelable.Creator<MidiDeviceStatus>() {
+ public MidiDeviceStatus createFromParcel(Parcel in) {
+ ClassLoader classLoader = MidiDeviceInfo.class.getClassLoader();
+ MidiDeviceInfo deviceInfo = in.readParcelable(classLoader);
+ boolean[] inputPortOpen = in.createBooleanArray();
+ int[] outputPortOpenCount = in.createIntArray();
+ return new MidiDeviceStatus(deviceInfo, inputPortOpen, outputPortOpenCount);
+ }
+
+ public MidiDeviceStatus[] newArray(int size) {
+ return new MidiDeviceStatus[size];
+ }
+ };
+
+ public int describeContents() {
+ return 0;
+ }
+
+ public void writeToParcel(Parcel parcel, int flags) {
+ parcel.writeParcelable(mDeviceInfo, flags);
+ parcel.writeBooleanArray(mInputPortOpen);
+ parcel.writeIntArray(mOutputPortOpenCount);
+ }
+}
diff --git a/android/media/midi/MidiInputPort.java b/android/media/midi/MidiInputPort.java
new file mode 100644
index 00000000..a300886e
--- /dev/null
+++ b/android/media/midi/MidiInputPort.java
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.util.Log;
+
+import dalvik.system.CloseGuard;
+
+import libcore.io.IoUtils;
+
+import java.io.Closeable;
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.io.IOException;
+
+/**
+ * This class is used for sending data to a port on a MIDI device
+ */
+public final class MidiInputPort extends MidiReceiver implements Closeable {
+ private static final String TAG = "MidiInputPort";
+
+ private IMidiDeviceServer mDeviceServer;
+ private final IBinder mToken;
+ private final int mPortNumber;
+ private FileDescriptor mFileDescriptor;
+ private FileOutputStream mOutputStream;
+
+ private final CloseGuard mGuard = CloseGuard.get();
+ private boolean mIsClosed;
+
+ // buffer to use for sending data out our output stream
+ private final byte[] mBuffer = new byte[MidiPortImpl.MAX_PACKET_SIZE];
+
+ /* package */ MidiInputPort(IMidiDeviceServer server, IBinder token,
+ FileDescriptor fd, int portNumber) {
+ super(MidiPortImpl.MAX_PACKET_DATA_SIZE);
+
+ mDeviceServer = server;
+ mToken = token;
+ mFileDescriptor = fd;
+ mPortNumber = portNumber;
+ mOutputStream = new FileOutputStream(fd);
+ mGuard.open("close");
+ }
+
+ /* package */ MidiInputPort(FileDescriptor fd, int portNumber) {
+ this(null, null, fd, portNumber);
+ }
+
+ /**
+ * Returns the port number of this port
+ *
+ * @return the port's port number
+ */
+ public final int getPortNumber() {
+ return mPortNumber;
+ }
+
+ @Override
+ public void onSend(byte[] msg, int offset, int count, long timestamp) throws IOException {
+ if (offset < 0 || count < 0 || offset + count > msg.length) {
+ throw new IllegalArgumentException("offset or count out of range");
+ }
+ if (count > MidiPortImpl.MAX_PACKET_DATA_SIZE) {
+ throw new IllegalArgumentException("count exceeds max message size");
+ }
+
+ synchronized (mBuffer) {
+ if (mOutputStream == null) {
+ throw new IOException("MidiInputPort is closed");
+ }
+ int length = MidiPortImpl.packData(msg, offset, count, timestamp, mBuffer);
+ mOutputStream.write(mBuffer, 0, length);
+ }
+ }
+
+ @Override
+ public void onFlush() throws IOException {
+ synchronized (mBuffer) {
+ if (mOutputStream == null) {
+ throw new IOException("MidiInputPort is closed");
+ }
+ int length = MidiPortImpl.packFlush(mBuffer);
+ mOutputStream.write(mBuffer, 0, length);
+ }
+ }
+
+ // used by MidiDevice.connectInputPort() to connect our socket directly to another device
+ /* package */ FileDescriptor claimFileDescriptor() {
+ synchronized (mGuard) {
+ FileDescriptor fd;
+ synchronized (mBuffer) {
+ fd = mFileDescriptor;
+ if (fd == null) return null;
+ IoUtils.closeQuietly(mOutputStream);
+ mFileDescriptor = null;
+ mOutputStream = null;
+ }
+
+ // Set mIsClosed = true so we will not call mDeviceServer.closePort() in close().
+ // MidiDevice.MidiConnection.close() will do the cleanup instead.
+ mIsClosed = true;
+ return fd;
+ }
+ }
+
+ // used by MidiDevice.MidiConnection to close this port after the connection is closed
+ /* package */ IBinder getToken() {
+ return mToken;
+ }
+
+ // used by MidiDevice.MidiConnection to close this port after the connection is closed
+ /* package */ IMidiDeviceServer getDeviceServer() {
+ return mDeviceServer;
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ if (mIsClosed) return;
+ mGuard.close();
+ synchronized (mBuffer) {
+ if (mFileDescriptor != null) {
+ IoUtils.closeQuietly(mFileDescriptor);
+ mFileDescriptor = null;
+ }
+ if (mOutputStream != null) {
+ mOutputStream.close();
+ mOutputStream = null;
+ }
+ }
+ if (mDeviceServer != null) {
+ try {
+ mDeviceServer.closePort(mToken);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MidiInputPort.close()");
+ }
+ }
+ mIsClosed = true;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mGuard != null) {
+ mGuard.warnIfOpen();
+ }
+
+ // not safe to make binder calls from finalize()
+ mDeviceServer = null;
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+}
diff --git a/android/media/midi/MidiManager.java b/android/media/midi/MidiManager.java
new file mode 100644
index 00000000..a015732d
--- /dev/null
+++ b/android/media/midi/MidiManager.java
@@ -0,0 +1,327 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.annotation.SystemService;
+import android.bluetooth.BluetoothDevice;
+import android.content.Context;
+import android.os.Binder;
+import android.os.IBinder;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.RemoteException;
+import android.util.Log;
+
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * This class is the public application interface to the MIDI service.
+ */
+@SystemService(Context.MIDI_SERVICE)
+public final class MidiManager {
+ private static final String TAG = "MidiManager";
+
+ /**
+ * Intent for starting BluetoothMidiService
+ * @hide
+ */
+ public static final String BLUETOOTH_MIDI_SERVICE_INTENT =
+ "android.media.midi.BluetoothMidiService";
+
+ /**
+ * BluetoothMidiService package name
+ * @hide
+ */
+ public static final String BLUETOOTH_MIDI_SERVICE_PACKAGE = "com.android.bluetoothmidiservice";
+
+ /**
+ * BluetoothMidiService class name
+ * @hide
+ */
+ public static final String BLUETOOTH_MIDI_SERVICE_CLASS =
+ "com.android.bluetoothmidiservice.BluetoothMidiService";
+
+ private final IMidiManager mService;
+ private final IBinder mToken = new Binder();
+
+ private ConcurrentHashMap<DeviceCallback,DeviceListener> mDeviceListeners =
+ new ConcurrentHashMap<DeviceCallback,DeviceListener>();
+
+ // Binder stub for receiving device notifications from MidiService
+ private class DeviceListener extends IMidiDeviceListener.Stub {
+ private final DeviceCallback mCallback;
+ private final Handler mHandler;
+
+ public DeviceListener(DeviceCallback callback, Handler handler) {
+ mCallback = callback;
+ mHandler = handler;
+ }
+
+ @Override
+ public void onDeviceAdded(MidiDeviceInfo device) {
+ if (mHandler != null) {
+ final MidiDeviceInfo deviceF = device;
+ mHandler.post(new Runnable() {
+ @Override public void run() {
+ mCallback.onDeviceAdded(deviceF);
+ }
+ });
+ } else {
+ mCallback.onDeviceAdded(device);
+ }
+ }
+
+ @Override
+ public void onDeviceRemoved(MidiDeviceInfo device) {
+ if (mHandler != null) {
+ final MidiDeviceInfo deviceF = device;
+ mHandler.post(new Runnable() {
+ @Override public void run() {
+ mCallback.onDeviceRemoved(deviceF);
+ }
+ });
+ } else {
+ mCallback.onDeviceRemoved(device);
+ }
+ }
+
+ @Override
+ public void onDeviceStatusChanged(MidiDeviceStatus status) {
+ if (mHandler != null) {
+ final MidiDeviceStatus statusF = status;
+ mHandler.post(new Runnable() {
+ @Override public void run() {
+ mCallback.onDeviceStatusChanged(statusF);
+ }
+ });
+ } else {
+ mCallback.onDeviceStatusChanged(status);
+ }
+ }
+ }
+
+ /**
+ * Callback class used for clients to receive MIDI device added and removed notifications
+ */
+ public static class DeviceCallback {
+ /**
+ * Called to notify when a new MIDI device has been added
+ *
+ * @param device a {@link MidiDeviceInfo} for the newly added device
+ */
+ public void onDeviceAdded(MidiDeviceInfo device) {
+ }
+
+ /**
+ * Called to notify when a MIDI device has been removed
+ *
+ * @param device a {@link MidiDeviceInfo} for the removed device
+ */
+ public void onDeviceRemoved(MidiDeviceInfo device) {
+ }
+
+ /**
+ * Called to notify when the status of a MIDI device has changed
+ *
+ * @param status a {@link MidiDeviceStatus} for the changed device
+ */
+ public void onDeviceStatusChanged(MidiDeviceStatus status) {
+ }
+ }
+
+ /**
+ * Listener class used for receiving the results of {@link #openDevice} and
+ * {@link #openBluetoothDevice}
+ */
+ public interface OnDeviceOpenedListener {
+ /**
+ * Called to respond to a {@link #openDevice} request
+ *
+ * @param device a {@link MidiDevice} for opened device, or null if opening failed
+ */
+ abstract public void onDeviceOpened(MidiDevice device);
+ }
+
+ /**
+ * @hide
+ */
+ public MidiManager(IMidiManager service) {
+ mService = service;
+ }
+
+ /**
+ * Registers a callback to receive notifications when MIDI devices are added and removed.
+ *
+ * The {@link DeviceCallback#onDeviceStatusChanged} method will be called immediately
+ * for any devices that have open ports. This allows applications to know which input
+ * ports are already in use and, therefore, unavailable.
+ *
+ * Applications should call {@link #getDevices} before registering the callback
+ * to get a list of devices already added.
+ *
+ * @param callback a {@link DeviceCallback} for MIDI device notifications
+ * @param handler The {@link android.os.Handler Handler} that will be used for delivering the
+ * device notifications. If handler is null, then the thread used for the
+ * callback is unspecified.
+ */
+ public void registerDeviceCallback(DeviceCallback callback, Handler handler) {
+ DeviceListener deviceListener = new DeviceListener(callback, handler);
+ try {
+ mService.registerListener(mToken, deviceListener);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ mDeviceListeners.put(callback, deviceListener);
+ }
+
+ /**
+ * Unregisters a {@link DeviceCallback}.
+ *
+ * @param callback a {@link DeviceCallback} to unregister
+ */
+ public void unregisterDeviceCallback(DeviceCallback callback) {
+ DeviceListener deviceListener = mDeviceListeners.remove(callback);
+ if (deviceListener != null) {
+ try {
+ mService.unregisterListener(mToken, deviceListener);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+ }
+
+ /**
+ * Gets the list of all connected MIDI devices.
+ *
+ * @return an array of all MIDI devices
+ */
+ public MidiDeviceInfo[] getDevices() {
+ try {
+ return mService.getDevices();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ private void sendOpenDeviceResponse(final MidiDevice device,
+ final OnDeviceOpenedListener listener, Handler handler) {
+ if (handler != null) {
+ handler.post(new Runnable() {
+ @Override public void run() {
+ listener.onDeviceOpened(device);
+ }
+ });
+ } else {
+ listener.onDeviceOpened(device);
+ }
+ }
+
+ /**
+ * Opens a MIDI device for reading and writing.
+ *
+ * @param deviceInfo a {@link android.media.midi.MidiDeviceInfo} to open
+ * @param listener a {@link MidiManager.OnDeviceOpenedListener} to be called
+ * to receive the result
+ * @param handler the {@link android.os.Handler Handler} that will be used for delivering
+ * the result. If handler is null, then the thread used for the
+ * listener is unspecified.
+ */
+ public void openDevice(MidiDeviceInfo deviceInfo, OnDeviceOpenedListener listener,
+ Handler handler) {
+ final MidiDeviceInfo deviceInfoF = deviceInfo;
+ final OnDeviceOpenedListener listenerF = listener;
+ final Handler handlerF = handler;
+
+ IMidiDeviceOpenCallback callback = new IMidiDeviceOpenCallback.Stub() {
+ @Override
+ public void onDeviceOpened(IMidiDeviceServer server, IBinder deviceToken) {
+ MidiDevice device;
+ if (server != null) {
+ device = new MidiDevice(deviceInfoF, server, mService, mToken, deviceToken);
+ } else {
+ device = null;
+ }
+ sendOpenDeviceResponse(device, listenerF, handlerF);
+ }
+ };
+
+ try {
+ mService.openDevice(mToken, deviceInfo, callback);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Opens a Bluetooth MIDI device for reading and writing.
+ *
+ * @param bluetoothDevice a {@link android.bluetooth.BluetoothDevice} to open as a MIDI device
+ * @param listener a {@link MidiManager.OnDeviceOpenedListener} to be called to receive the
+ * result
+ * @param handler the {@link android.os.Handler Handler} that will be used for delivering
+ * the result. If handler is null, then the thread used for the
+ * listener is unspecified.
+ */
+ public void openBluetoothDevice(BluetoothDevice bluetoothDevice,
+ OnDeviceOpenedListener listener, Handler handler) {
+ final OnDeviceOpenedListener listenerF = listener;
+ final Handler handlerF = handler;
+
+ IMidiDeviceOpenCallback callback = new IMidiDeviceOpenCallback.Stub() {
+ @Override
+ public void onDeviceOpened(IMidiDeviceServer server, IBinder deviceToken) {
+ MidiDevice device = null;
+ if (server != null) {
+ try {
+ // fetch MidiDeviceInfo from the server
+ MidiDeviceInfo deviceInfo = server.getDeviceInfo();
+ device = new MidiDevice(deviceInfo, server, mService, mToken, deviceToken);
+ } catch (RemoteException e) {
+ Log.e(TAG, "remote exception in getDeviceInfo()");
+ }
+ }
+ sendOpenDeviceResponse(device, listenerF, handlerF);
+ }
+ };
+
+ try {
+ mService.openBluetoothDevice(mToken, bluetoothDevice, callback);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /** @hide */
+ public MidiDeviceServer createDeviceServer(MidiReceiver[] inputPortReceivers,
+ int numOutputPorts, String[] inputPortNames, String[] outputPortNames,
+ Bundle properties, int type, MidiDeviceServer.Callback callback) {
+ try {
+ MidiDeviceServer server = new MidiDeviceServer(mService, inputPortReceivers,
+ numOutputPorts, callback);
+ MidiDeviceInfo deviceInfo = mService.registerDeviceServer(server.getBinderInterface(),
+ inputPortReceivers.length, numOutputPorts, inputPortNames, outputPortNames,
+ properties, type);
+ if (deviceInfo == null) {
+ Log.e(TAG, "registerVirtualDevice failed");
+ return null;
+ }
+ return server;
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+}
diff --git a/android/media/midi/MidiOutputPort.java b/android/media/midi/MidiOutputPort.java
new file mode 100644
index 00000000..511f6cd5
--- /dev/null
+++ b/android/media/midi/MidiOutputPort.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import android.os.IBinder;
+import android.os.ParcelFileDescriptor;
+import android.os.RemoteException;
+import android.util.Log;
+
+import com.android.internal.midi.MidiDispatcher;
+
+import dalvik.system.CloseGuard;
+
+import libcore.io.IoUtils;
+
+import java.io.Closeable;
+import java.io.FileDescriptor;
+import java.io.FileInputStream;
+import java.io.IOException;
+
+/**
+ * This class is used for receiving data from a port on a MIDI device
+ */
+public final class MidiOutputPort extends MidiSender implements Closeable {
+ private static final String TAG = "MidiOutputPort";
+
+ private IMidiDeviceServer mDeviceServer;
+ private final IBinder mToken;
+ private final int mPortNumber;
+ private final FileInputStream mInputStream;
+ private final MidiDispatcher mDispatcher = new MidiDispatcher();
+
+ private final CloseGuard mGuard = CloseGuard.get();
+ private boolean mIsClosed;
+
+ // This thread reads MIDI events from a socket and distributes them to the list of
+ // MidiReceivers attached to this device.
+ private final Thread mThread = new Thread() {
+ @Override
+ public void run() {
+ byte[] buffer = new byte[MidiPortImpl.MAX_PACKET_SIZE];
+
+ try {
+ while (true) {
+ // read next event
+ int count = mInputStream.read(buffer);
+ if (count < 0) {
+ break;
+ // FIXME - inform receivers here?
+ }
+
+ int packetType = MidiPortImpl.getPacketType(buffer, count);
+ switch (packetType) {
+ case MidiPortImpl.PACKET_TYPE_DATA: {
+ int offset = MidiPortImpl.getDataOffset(buffer, count);
+ int size = MidiPortImpl.getDataSize(buffer, count);
+ long timestamp = MidiPortImpl.getPacketTimestamp(buffer, count);
+
+ // dispatch to all our receivers
+ mDispatcher.send(buffer, offset, size, timestamp);
+ break;
+ }
+ case MidiPortImpl.PACKET_TYPE_FLUSH:
+ mDispatcher.flush();
+ break;
+ default:
+ Log.e(TAG, "Unknown packet type " + packetType);
+ break;
+ }
+ }
+ } catch (IOException e) {
+ // FIXME report I/O failure?
+ Log.e(TAG, "read failed", e);
+ } finally {
+ IoUtils.closeQuietly(mInputStream);
+ }
+ }
+ };
+
+ /* package */ MidiOutputPort(IMidiDeviceServer server, IBinder token,
+ FileDescriptor fd, int portNumber) {
+ mDeviceServer = server;
+ mToken = token;
+ mPortNumber = portNumber;
+ mInputStream = new ParcelFileDescriptor.AutoCloseInputStream(new ParcelFileDescriptor(fd));
+ mThread.start();
+ mGuard.open("close");
+ }
+
+ /* package */ MidiOutputPort(FileDescriptor fd, int portNumber) {
+ this(null, null, fd, portNumber);
+ }
+
+ /**
+ * Returns the port number of this port
+ *
+ * @return the port's port number
+ */
+ public final int getPortNumber() {
+ return mPortNumber;
+ }
+
+ @Override
+ public void onConnect(MidiReceiver receiver) {
+ mDispatcher.getSender().connect(receiver);
+ }
+
+ @Override
+ public void onDisconnect(MidiReceiver receiver) {
+ mDispatcher.getSender().disconnect(receiver);
+ }
+
+ @Override
+ public void close() throws IOException {
+ synchronized (mGuard) {
+ if (mIsClosed) return;
+
+ mGuard.close();
+ mInputStream.close();
+ if (mDeviceServer != null) {
+ try {
+ mDeviceServer.closePort(mToken);
+ } catch (RemoteException e) {
+ Log.e(TAG, "RemoteException in MidiOutputPort.close()");
+ }
+ }
+ mIsClosed = true;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ if (mGuard != null) {
+ mGuard.warnIfOpen();
+ }
+
+ // not safe to make binder calls from finalize()
+ mDeviceServer = null;
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+}
diff --git a/android/media/midi/MidiPortImpl.java b/android/media/midi/MidiPortImpl.java
new file mode 100644
index 00000000..1cd9ed22
--- /dev/null
+++ b/android/media/midi/MidiPortImpl.java
@@ -0,0 +1,134 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+/**
+ * This class contains utilities for socket communication between a
+ * MidiInputPort and MidiOutputPort
+ */
+/* package */ class MidiPortImpl {
+ private static final String TAG = "MidiPort";
+
+ /**
+ * Packet type for data packet
+ */
+ public static final int PACKET_TYPE_DATA = 1;
+
+ /**
+ * Packet type for flush packet
+ */
+ public static final int PACKET_TYPE_FLUSH = 2;
+
+ /**
+ * Maximum size of a packet that can be passed between processes.
+ */
+ public static final int MAX_PACKET_SIZE = 1024;
+
+ /**
+ * size of message timestamp in bytes
+ */
+ private static final int TIMESTAMP_SIZE = 8;
+
+ /**
+ * Data packet overhead is timestamp size plus packet type byte
+ */
+ private static final int DATA_PACKET_OVERHEAD = TIMESTAMP_SIZE + 1;
+
+ /**
+ * Maximum amount of MIDI data that can be included in a packet
+ */
+ public static final int MAX_PACKET_DATA_SIZE = MAX_PACKET_SIZE - DATA_PACKET_OVERHEAD;
+
+ /**
+ * Utility function for packing MIDI data to be passed between processes
+ *
+ * message byte array contains variable length MIDI message.
+ * messageSize is size of variable length MIDI message
+ * timestamp is message timestamp to pack
+ * dest is buffer to pack into
+ * returns size of packed message
+ */
+ public static int packData(byte[] message, int offset, int size, long timestamp,
+ byte[] dest) {
+ if (size > MAX_PACKET_DATA_SIZE) {
+ size = MAX_PACKET_DATA_SIZE;
+ }
+ int length = 0;
+ // packet type goes first
+ dest[length++] = PACKET_TYPE_DATA;
+ // data goes next
+ System.arraycopy(message, offset, dest, length, size);
+ length += size;
+
+ // followed by timestamp
+ for (int i = 0; i < TIMESTAMP_SIZE; i++) {
+ dest[length++] = (byte)timestamp;
+ timestamp >>= 8;
+ }
+
+ return length;
+ }
+
+ /**
+ * Utility function for packing a flush command to be passed between processes
+ */
+ public static int packFlush(byte[] dest) {
+ dest[0] = PACKET_TYPE_FLUSH;
+ return 1;
+ }
+
+ /**
+ * Returns the packet type (PACKET_TYPE_DATA or PACKET_TYPE_FLUSH)
+ */
+ public static int getPacketType(byte[] buffer, int bufferLength) {
+ return buffer[0];
+ }
+
+ /**
+ * Utility function for unpacking MIDI data received from other process
+ * returns the offset of the MIDI message in packed buffer
+ */
+ public static int getDataOffset(byte[] buffer, int bufferLength) {
+ // data follows packet type byte
+ return 1;
+ }
+
+ /**
+ * Utility function for unpacking MIDI data received from other process
+ * returns size of MIDI data in packed buffer
+ */
+ public static int getDataSize(byte[] buffer, int bufferLength) {
+ // message length is total buffer length minus size of the timestamp
+ return bufferLength - DATA_PACKET_OVERHEAD;
+ }
+
+ /**
+ * Utility function for unpacking MIDI data received from other process
+ * unpacks timestamp from packed buffer
+ */
+ public static long getPacketTimestamp(byte[] buffer, int bufferLength) {
+ // timestamp is at end of the packet
+ int offset = bufferLength;
+ long timestamp = 0;
+
+ for (int i = 0; i < TIMESTAMP_SIZE; i++) {
+ int b = (int)buffer[--offset] & 0xFF;
+ timestamp = (timestamp << 8) | b;
+ }
+ return timestamp;
+ }
+}
diff --git a/android/media/midi/MidiReceiver.java b/android/media/midi/MidiReceiver.java
new file mode 100644
index 00000000..12a5f044
--- /dev/null
+++ b/android/media/midi/MidiReceiver.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+import java.io.IOException;
+
+/**
+ * Interface for sending and receiving data to and from a MIDI device.
+ */
+abstract public class MidiReceiver {
+
+ private final int mMaxMessageSize;
+
+ /**
+ * Default MidiReceiver constructor. Maximum message size is set to
+ * {@link java.lang.Integer#MAX_VALUE}
+ */
+ public MidiReceiver() {
+ mMaxMessageSize = Integer.MAX_VALUE;
+ }
+
+ /**
+ * MidiReceiver constructor.
+ * @param maxMessageSize the maximum size of a message this receiver can receive
+ */
+ public MidiReceiver(int maxMessageSize) {
+ mMaxMessageSize = maxMessageSize;
+ }
+
+ /**
+ * Called whenever the receiver is passed new MIDI data.
+ * Subclasses override this method to receive MIDI data.
+ * May fail if count exceeds {@link #getMaxMessageSize}.
+ *
+ * NOTE: the msg array parameter is only valid within the context of this call.
+ * The msg bytes should be copied by the receiver rather than retaining a reference
+ * to this parameter.
+ * Also, modifying the contents of the msg array parameter may result in other receivers
+ * in the same application receiving incorrect values in their {link #onSend} method.
+ *
+ * @param msg a byte array containing the MIDI data
+ * @param offset the offset of the first byte of the data in the array to be processed
+ * @param count the number of bytes of MIDI data in the array to be processed
+ * @param timestamp the timestamp of the message (based on {@link java.lang.System#nanoTime}
+ * @throws IOException
+ */
+ abstract public void onSend(byte[] msg, int offset, int count, long timestamp)
+ throws IOException;
+
+ /**
+ * Instructs the receiver to discard all pending MIDI data.
+ * @throws IOException
+ */
+ public void flush() throws IOException {
+ onFlush();
+ }
+
+ /**
+ * Called when the receiver is instructed to discard all pending MIDI data.
+ * Subclasses should override this method if they maintain a list or queue of MIDI data
+ * to be processed in the future.
+ * @throws IOException
+ */
+ public void onFlush() throws IOException {
+ }
+
+ /**
+ * Returns the maximum size of a message this receiver can receive.
+ * @return maximum message size
+ */
+ public final int getMaxMessageSize() {
+ return mMaxMessageSize;
+ }
+
+ /**
+ * Called to send MIDI data to the receiver without a timestamp.
+ * Data will be processed by receiver in the order sent.
+ * Data will get split into multiple calls to {@link #onSend} if count exceeds
+ * {@link #getMaxMessageSize}. Blocks until all the data is sent or an exception occurs.
+ * In the latter case, the amount of data sent prior to the exception is not provided to caller.
+ * The communication should be considered corrupt. The sender should reestablish
+ * communication, reset all controllers and send all notes off.
+ *
+ * @param msg a byte array containing the MIDI data
+ * @param offset the offset of the first byte of the data in the array to be sent
+ * @param count the number of bytes of MIDI data in the array to be sent
+ * @throws IOException if the data could not be sent in entirety
+ */
+ public void send(byte[] msg, int offset, int count) throws IOException {
+ // TODO add public static final TIMESTAMP_NONE = 0L
+ send(msg, offset, count, 0L);
+ }
+
+ /**
+ * Called to send MIDI data to the receiver with a specified timestamp.
+ * Data will be processed by receiver in order first by timestamp, then in the order sent.
+ * Data will get split into multiple calls to {@link #onSend} if count exceeds
+ * {@link #getMaxMessageSize}. Blocks until all the data is sent or an exception occurs.
+ * In the latter case, the amount of data sent prior to the exception is not provided to caller.
+ * The communication should be considered corrupt. The sender should reestablish
+ * communication, reset all controllers and send all notes off.
+ *
+ * @param msg a byte array containing the MIDI data
+ * @param offset the offset of the first byte of the data in the array to be sent
+ * @param count the number of bytes of MIDI data in the array to be sent
+ * @param timestamp the timestamp of the message, based on {@link java.lang.System#nanoTime}
+ * @throws IOException if the data could not be sent in entirety
+ */
+ public void send(byte[] msg, int offset, int count, long timestamp)
+ throws IOException {
+ int messageSize = getMaxMessageSize();
+ while (count > 0) {
+ int length = (count > messageSize ? messageSize : count);
+ onSend(msg, offset, length, timestamp);
+ offset += length;
+ count -= length;
+ }
+ }
+}
diff --git a/android/media/midi/MidiSender.java b/android/media/midi/MidiSender.java
new file mode 100644
index 00000000..c5f1edc4
--- /dev/null
+++ b/android/media/midi/MidiSender.java
@@ -0,0 +1,62 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.midi;
+
+/**
+ * Interface provided by a device to allow attaching
+ * MidiReceivers to a MIDI device.
+ */
+abstract public class MidiSender {
+
+ /**
+ * Connects a {@link MidiReceiver} to the sender
+ *
+ * @param receiver the receiver to connect
+ */
+ public void connect(MidiReceiver receiver) {
+ if (receiver == null) {
+ throw new NullPointerException("receiver null in MidiSender.connect");
+ }
+ onConnect(receiver);
+ }
+
+ /**
+ * Disconnects a {@link MidiReceiver} from the sender
+ *
+ * @param receiver the receiver to disconnect
+ */
+ public void disconnect(MidiReceiver receiver) {
+ if (receiver == null) {
+ throw new NullPointerException("receiver null in MidiSender.disconnect");
+ }
+ onDisconnect(receiver);
+ }
+
+ /**
+ * Called to connect a {@link MidiReceiver} to the sender
+ *
+ * @param receiver the receiver to connect
+ */
+ abstract public void onConnect(MidiReceiver receiver);
+
+ /**
+ * Called to disconnect a {@link MidiReceiver} from the sender
+ *
+ * @param receiver the receiver to disconnect
+ */
+ abstract public void onDisconnect(MidiReceiver receiver);
+}
diff --git a/android/media/projection/MediaProjection.java b/android/media/projection/MediaProjection.java
new file mode 100644
index 00000000..f9c5b8d7
--- /dev/null
+++ b/android/media/projection/MediaProjection.java
@@ -0,0 +1,212 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.projection;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.content.Context;
+import android.hardware.display.DisplayManager;
+import android.hardware.display.VirtualDisplay;
+import android.media.AudioRecord;
+import android.media.projection.IMediaProjection;
+import android.media.projection.IMediaProjectionCallback;
+import android.os.Handler;
+import android.os.RemoteException;
+import android.util.ArrayMap;
+import android.util.Log;
+import android.view.Surface;
+
+import java.util.Map;
+
+/**
+ * A token granting applications the ability to capture screen contents and/or
+ * record system audio. The exact capabilities granted depend on the type of
+ * MediaProjection.
+ *
+ * <p>
+ * A screen capture session can be started through {@link
+ * MediaProjectionManager#createScreenCaptureIntent}. This grants the ability to
+ * capture screen contents, but not system audio.
+ * </p>
+ */
+public final class MediaProjection {
+ private static final String TAG = "MediaProjection";
+
+ private final IMediaProjection mImpl;
+ private final Context mContext;
+ private final Map<Callback, CallbackRecord> mCallbacks;
+
+ /** @hide */
+ public MediaProjection(Context context, IMediaProjection impl) {
+ mCallbacks = new ArrayMap<Callback, CallbackRecord>();
+ mContext = context;
+ mImpl = impl;
+ try {
+ mImpl.start(new MediaProjectionCallback());
+ } catch (RemoteException e) {
+ throw new RuntimeException("Failed to start media projection", e);
+ }
+ }
+
+ /** Register a listener to receive notifications about when the {@link
+ * MediaProjection} changes state.
+ *
+ * @param callback The callback to call.
+ * @param handler The handler on which the callback should be invoked, or
+ * null if the callback should be invoked on the calling thread's looper.
+ *
+ * @see #unregisterCallback
+ */
+ public void registerCallback(Callback callback, Handler handler) {
+ if (callback == null) {
+ throw new IllegalArgumentException("callback should not be null");
+ }
+ if (handler == null) {
+ handler = new Handler();
+ }
+ mCallbacks.put(callback, new CallbackRecord(callback, handler));
+ }
+
+ /** Unregister a MediaProjection listener.
+ *
+ * @param callback The callback to unregister.
+ *
+ * @see #registerCallback
+ */
+ public void unregisterCallback(Callback callback) {
+ if (callback == null) {
+ throw new IllegalArgumentException("callback should not be null");
+ }
+ mCallbacks.remove(callback);
+ }
+
+ /**
+ * @hide
+ */
+ public VirtualDisplay createVirtualDisplay(@NonNull String name,
+ int width, int height, int dpi, boolean isSecure, @Nullable Surface surface,
+ @Nullable VirtualDisplay.Callback callback, @Nullable Handler handler) {
+ DisplayManager dm = (DisplayManager) mContext.getSystemService(Context.DISPLAY_SERVICE);
+ int flags = isSecure ? DisplayManager.VIRTUAL_DISPLAY_FLAG_SECURE : 0;
+ return dm.createVirtualDisplay(this, name, width, height, dpi, surface,
+ flags | DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR |
+ DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION, callback, handler,
+ null /* uniqueId */);
+ }
+
+ /**
+ * Creates a {@link android.hardware.display.VirtualDisplay} to capture the
+ * contents of the screen.
+ *
+ * @param name The name of the virtual display, must be non-empty.
+ * @param width The width of the virtual display in pixels. Must be
+ * greater than 0.
+ * @param height The height of the virtual display in pixels. Must be
+ * greater than 0.
+ * @param dpi The density of the virtual display in dpi. Must be greater
+ * than 0.
+ * @param surface The surface to which the content of the virtual display
+ * should be rendered, or null if there is none initially.
+ * @param flags A combination of virtual display flags. See {@link DisplayManager} for the full
+ * list of flags.
+ * @param callback Callback to call when the virtual display's state
+ * changes, or null if none.
+ * @param handler The {@link android.os.Handler} on which the callback should be
+ * invoked, or null if the callback should be invoked on the calling
+ * thread's main {@link android.os.Looper}.
+ *
+ * @see android.hardware.display.VirtualDisplay
+ */
+ public VirtualDisplay createVirtualDisplay(@NonNull String name,
+ int width, int height, int dpi, int flags, @Nullable Surface surface,
+ @Nullable VirtualDisplay.Callback callback, @Nullable Handler handler) {
+ DisplayManager dm = (DisplayManager) mContext.getSystemService(Context.DISPLAY_SERVICE);
+ return dm.createVirtualDisplay(this, name, width, height, dpi, surface, flags, callback,
+ handler, null /* uniqueId */);
+ }
+
+ /**
+ * Creates an AudioRecord to capture audio played back by the system.
+ * @hide
+ */
+ public AudioRecord createAudioRecord(
+ int sampleRateInHz, int channelConfig,
+ int audioFormat, int bufferSizeInBytes) {
+ return null;
+ }
+
+ /**
+ * Stops projection.
+ */
+ public void stop() {
+ try {
+ mImpl.stop();
+ } catch (RemoteException e) {
+ Log.e(TAG, "Unable to stop projection", e);
+ }
+ }
+
+ /**
+ * Get the underlying IMediaProjection.
+ * @hide
+ */
+ public IMediaProjection getProjection() {
+ return mImpl;
+ }
+
+ /**
+ * Callbacks for the projection session.
+ */
+ public static abstract class Callback {
+ /**
+ * Called when the MediaProjection session is no longer valid.
+ * <p>
+ * Once a MediaProjection has been stopped, it's up to the application to release any
+ * resources it may be holding (e.g. {@link android.hardware.display.VirtualDisplay}s).
+ * </p>
+ */
+ public void onStop() { }
+ }
+
+ private final class MediaProjectionCallback extends IMediaProjectionCallback.Stub {
+ @Override
+ public void onStop() {
+ for (CallbackRecord cbr : mCallbacks.values()) {
+ cbr.onStop();
+ }
+ }
+ }
+
+ private final static class CallbackRecord {
+ private final Callback mCallback;
+ private final Handler mHandler;
+
+ public CallbackRecord(Callback callback, Handler handler) {
+ mCallback = callback;
+ mHandler = handler;
+ }
+
+ public void onStop() {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onStop();
+ }
+ });
+ }
+ }
+}
diff --git a/android/media/projection/MediaProjectionInfo.java b/android/media/projection/MediaProjectionInfo.java
new file mode 100644
index 00000000..5a65e65b
--- /dev/null
+++ b/android/media/projection/MediaProjectionInfo.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.projection;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.UserHandle;
+
+import java.util.Objects;
+
+/** @hide */
+public final class MediaProjectionInfo implements Parcelable {
+ private final String mPackageName;
+ private final UserHandle mUserHandle;
+
+ public MediaProjectionInfo(String packageName, UserHandle handle) {
+ mPackageName = packageName;
+ mUserHandle = handle;
+ }
+
+ public MediaProjectionInfo(Parcel in) {
+ mPackageName = in.readString();
+ mUserHandle = UserHandle.readFromParcel(in);
+ }
+
+ public String getPackageName() {
+ return mPackageName;
+ }
+
+ public UserHandle getUserHandle() {
+ return mUserHandle;
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o instanceof MediaProjectionInfo) {
+ final MediaProjectionInfo other = (MediaProjectionInfo) o;
+ return Objects.equals(other.mPackageName, mPackageName)
+ && Objects.equals(other.mUserHandle, mUserHandle);
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(mPackageName, mUserHandle);
+ }
+
+ @Override
+ public String toString() {
+ return "MediaProjectionInfo{mPackageName="
+ + mPackageName + ", mUserHandle="
+ + mUserHandle + "}";
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel out, int flags) {
+ out.writeString(mPackageName);
+ UserHandle.writeToParcel(mUserHandle, out);
+ }
+
+ public static final Parcelable.Creator<MediaProjectionInfo> CREATOR =
+ new Parcelable.Creator<MediaProjectionInfo>() {
+ @Override
+ public MediaProjectionInfo createFromParcel(Parcel in) {
+ return new MediaProjectionInfo (in);
+ }
+
+ @Override
+ public MediaProjectionInfo[] newArray(int size) {
+ return new MediaProjectionInfo[size];
+ }
+ };
+}
diff --git a/android/media/projection/MediaProjectionManager.java b/android/media/projection/MediaProjectionManager.java
new file mode 100644
index 00000000..9f2c08e5
--- /dev/null
+++ b/android/media/projection/MediaProjectionManager.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.projection;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.SystemService;
+import android.app.Activity;
+import android.content.Context;
+import android.content.Intent;
+import android.media.projection.IMediaProjection;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.util.ArrayMap;
+import android.util.Log;
+
+import java.util.Map;
+
+/**
+ * Manages the retrieval of certain types of {@link MediaProjection} tokens.
+ */
+@SystemService(Context.MEDIA_PROJECTION_SERVICE)
+public final class MediaProjectionManager {
+ private static final String TAG = "MediaProjectionManager";
+ /** @hide */
+ public static final String EXTRA_APP_TOKEN = "android.media.projection.extra.EXTRA_APP_TOKEN";
+ /** @hide */
+ public static final String EXTRA_MEDIA_PROJECTION =
+ "android.media.projection.extra.EXTRA_MEDIA_PROJECTION";
+
+ /** @hide */
+ public static final int TYPE_SCREEN_CAPTURE = 0;
+ /** @hide */
+ public static final int TYPE_MIRRORING = 1;
+ /** @hide */
+ public static final int TYPE_PRESENTATION = 2;
+
+ private Context mContext;
+ private Map<Callback, CallbackDelegate> mCallbacks;
+ private IMediaProjectionManager mService;
+
+ /** @hide */
+ public MediaProjectionManager(Context context) {
+ mContext = context;
+ IBinder b = ServiceManager.getService(Context.MEDIA_PROJECTION_SERVICE);
+ mService = IMediaProjectionManager.Stub.asInterface(b);
+ mCallbacks = new ArrayMap<>();
+ }
+
+ /**
+ * Returns an Intent that <b>must</b> passed to startActivityForResult()
+ * in order to start screen capture. The activity will prompt
+ * the user whether to allow screen capture. The result of this
+ * activity should be passed to getMediaProjection.
+ */
+ public Intent createScreenCaptureIntent() {
+ Intent i = new Intent();
+ i.setClassName("com.android.systemui",
+ "com.android.systemui.media.MediaProjectionPermissionActivity");
+ return i;
+ }
+
+ /**
+ * Retrieve the MediaProjection obtained from a succesful screen
+ * capture request. Will be null if the result from the
+ * startActivityForResult() is anything other than RESULT_OK.
+ *
+ * @param resultCode The result code from {@link android.app.Activity#onActivityResult(int,
+ * int, android.content.Intent)}
+ * @param resultData The resulting data from {@link android.app.Activity#onActivityResult(int,
+ * int, android.content.Intent)}
+ */
+ public MediaProjection getMediaProjection(int resultCode, @NonNull Intent resultData) {
+ if (resultCode != Activity.RESULT_OK || resultData == null) {
+ return null;
+ }
+ IBinder projection = resultData.getIBinderExtra(EXTRA_MEDIA_PROJECTION);
+ if (projection == null) {
+ return null;
+ }
+ return new MediaProjection(mContext, IMediaProjection.Stub.asInterface(projection));
+ }
+
+ /**
+ * Get the {@link MediaProjectionInfo} for the active {@link MediaProjection}.
+ * @hide
+ */
+ public MediaProjectionInfo getActiveProjectionInfo() {
+ try {
+ return mService.getActiveProjectionInfo();
+ } catch (RemoteException e) {
+ Log.e(TAG, "Unable to get the active projection info", e);
+ }
+ return null;
+ }
+
+ /**
+ * Stop the current projection if there is one.
+ * @hide
+ */
+ public void stopActiveProjection() {
+ try {
+ mService.stopActiveProjection();
+ } catch (RemoteException e) {
+ Log.e(TAG, "Unable to stop the currently active media projection", e);
+ }
+ }
+
+ /**
+ * Add a callback to monitor all of the {@link MediaProjection}s activity.
+ * Not for use by regular applications, must have the MANAGE_MEDIA_PROJECTION permission.
+ * @hide
+ */
+ public void addCallback(@NonNull Callback callback, @Nullable Handler handler) {
+ if (callback == null) {
+ throw new IllegalArgumentException("callback must not be null");
+ }
+ CallbackDelegate delegate = new CallbackDelegate(callback, handler);
+ mCallbacks.put(callback, delegate);
+ try {
+ mService.addCallback(delegate);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Unable to add callbacks to MediaProjection service", e);
+ }
+ }
+
+ /**
+ * Remove a MediaProjection monitoring callback.
+ * @hide
+ */
+ public void removeCallback(@NonNull Callback callback) {
+ if (callback == null) {
+ throw new IllegalArgumentException("callback must not be null");
+ }
+ CallbackDelegate delegate = mCallbacks.remove(callback);
+ try {
+ if (delegate != null) {
+ mService.removeCallback(delegate);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Unable to add callbacks to MediaProjection service", e);
+ }
+ }
+
+ /** @hide */
+ public static abstract class Callback {
+ public abstract void onStart(MediaProjectionInfo info);
+ public abstract void onStop(MediaProjectionInfo info);
+ }
+
+ /** @hide */
+ private final static class CallbackDelegate extends IMediaProjectionWatcherCallback.Stub {
+ private Callback mCallback;
+ private Handler mHandler;
+
+ public CallbackDelegate(Callback callback, Handler handler) {
+ mCallback = callback;
+ if (handler == null) {
+ handler = new Handler();
+ }
+ mHandler = handler;
+ }
+
+ @Override
+ public void onStart(final MediaProjectionInfo info) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onStart(info);
+ }
+ });
+ }
+
+ @Override
+ public void onStop(final MediaProjectionInfo info) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onStop(info);
+ }
+ });
+ }
+ }
+}
diff --git a/android/media/session/MediaController.java b/android/media/session/MediaController.java
new file mode 100644
index 00000000..622900f5
--- /dev/null
+++ b/android/media/session/MediaController.java
@@ -0,0 +1,1116 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.session;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.app.PendingIntent;
+import android.content.Context;
+import android.content.pm.ParceledListSlice;
+import android.media.AudioAttributes;
+import android.media.AudioManager;
+import android.media.MediaMetadata;
+import android.media.Rating;
+import android.media.VolumeProvider;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.os.RemoteException;
+import android.os.ResultReceiver;
+import android.text.TextUtils;
+import android.util.Log;
+import android.view.KeyEvent;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Allows an app to interact with an ongoing media session. Media buttons and
+ * other commands can be sent to the session. A callback may be registered to
+ * receive updates from the session, such as metadata and play state changes.
+ * <p>
+ * A MediaController can be created through {@link MediaSessionManager} if you
+ * hold the "android.permission.MEDIA_CONTENT_CONTROL" permission or are an
+ * enabled notification listener or by getting a {@link MediaSession.Token}
+ * directly from the session owner.
+ * <p>
+ * MediaController objects are thread-safe.
+ */
+public final class MediaController {
+ private static final String TAG = "MediaController";
+
+ private static final int MSG_EVENT = 1;
+ private static final int MSG_UPDATE_PLAYBACK_STATE = 2;
+ private static final int MSG_UPDATE_METADATA = 3;
+ private static final int MSG_UPDATE_VOLUME = 4;
+ private static final int MSG_UPDATE_QUEUE = 5;
+ private static final int MSG_UPDATE_QUEUE_TITLE = 6;
+ private static final int MSG_UPDATE_EXTRAS = 7;
+ private static final int MSG_DESTROYED = 8;
+
+ private final ISessionController mSessionBinder;
+
+ private final MediaSession.Token mToken;
+ private final Context mContext;
+ private final CallbackStub mCbStub = new CallbackStub(this);
+ private final ArrayList<MessageHandler> mCallbacks = new ArrayList<MessageHandler>();
+ private final Object mLock = new Object();
+
+ private boolean mCbRegistered = false;
+ private String mPackageName;
+ private String mTag;
+
+ private final TransportControls mTransportControls;
+
+ /**
+ * Call for creating a MediaController directly from a binder. Should only
+ * be used by framework code.
+ *
+ * @hide
+ */
+ public MediaController(Context context, ISessionController sessionBinder) {
+ if (sessionBinder == null) {
+ throw new IllegalArgumentException("Session token cannot be null");
+ }
+ if (context == null) {
+ throw new IllegalArgumentException("Context cannot be null");
+ }
+ mSessionBinder = sessionBinder;
+ mTransportControls = new TransportControls();
+ mToken = new MediaSession.Token(sessionBinder);
+ mContext = context;
+ }
+
+ /**
+ * Create a new MediaController from a session's token.
+ *
+ * @param context The caller's context.
+ * @param token The token for the session.
+ */
+ public MediaController(@NonNull Context context, @NonNull MediaSession.Token token) {
+ this(context, token.getBinder());
+ }
+
+ /**
+ * Get a {@link TransportControls} instance to send transport actions to
+ * the associated session.
+ *
+ * @return A transport controls instance.
+ */
+ public @NonNull TransportControls getTransportControls() {
+ return mTransportControls;
+ }
+
+ /**
+ * Send the specified media button event to the session. Only media keys can
+ * be sent by this method, other keys will be ignored.
+ *
+ * @param keyEvent The media button event to dispatch.
+ * @return true if the event was sent to the session, false otherwise.
+ */
+ public boolean dispatchMediaButtonEvent(@NonNull KeyEvent keyEvent) {
+ if (keyEvent == null) {
+ throw new IllegalArgumentException("KeyEvent may not be null");
+ }
+ if (!KeyEvent.isMediaKey(keyEvent.getKeyCode())) {
+ return false;
+ }
+ try {
+ return mSessionBinder.sendMediaButton(keyEvent);
+ } catch (RemoteException e) {
+ // System is dead. =(
+ }
+ return false;
+ }
+
+ /**
+ * Get the current playback state for this session.
+ *
+ * @return The current PlaybackState or null
+ */
+ public @Nullable PlaybackState getPlaybackState() {
+ try {
+ return mSessionBinder.getPlaybackState();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getPlaybackState.", e);
+ return null;
+ }
+ }
+
+ /**
+ * Get the current metadata for this session.
+ *
+ * @return The current MediaMetadata or null.
+ */
+ public @Nullable MediaMetadata getMetadata() {
+ try {
+ return mSessionBinder.getMetadata();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getMetadata.", e);
+ return null;
+ }
+ }
+
+ /**
+ * Get the current play queue for this session if one is set. If you only
+ * care about the current item {@link #getMetadata()} should be used.
+ *
+ * @return The current play queue or null.
+ */
+ public @Nullable List<MediaSession.QueueItem> getQueue() {
+ try {
+ ParceledListSlice queue = mSessionBinder.getQueue();
+ if (queue != null) {
+ return queue.getList();
+ }
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getQueue.", e);
+ }
+ return null;
+ }
+
+ /**
+ * Get the queue title for this session.
+ */
+ public @Nullable CharSequence getQueueTitle() {
+ try {
+ return mSessionBinder.getQueueTitle();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getQueueTitle", e);
+ }
+ return null;
+ }
+
+ /**
+ * Get the extras for this session.
+ */
+ public @Nullable Bundle getExtras() {
+ try {
+ return mSessionBinder.getExtras();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getExtras", e);
+ }
+ return null;
+ }
+
+ /**
+ * Get the rating type supported by the session. One of:
+ * <ul>
+ * <li>{@link Rating#RATING_NONE}</li>
+ * <li>{@link Rating#RATING_HEART}</li>
+ * <li>{@link Rating#RATING_THUMB_UP_DOWN}</li>
+ * <li>{@link Rating#RATING_3_STARS}</li>
+ * <li>{@link Rating#RATING_4_STARS}</li>
+ * <li>{@link Rating#RATING_5_STARS}</li>
+ * <li>{@link Rating#RATING_PERCENTAGE}</li>
+ * </ul>
+ *
+ * @return The supported rating type
+ */
+ public int getRatingType() {
+ try {
+ return mSessionBinder.getRatingType();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getRatingType.", e);
+ return Rating.RATING_NONE;
+ }
+ }
+
+ /**
+ * Get the flags for this session. Flags are defined in {@link MediaSession}.
+ *
+ * @return The current set of flags for the session.
+ */
+ public @MediaSession.SessionFlags long getFlags() {
+ try {
+ return mSessionBinder.getFlags();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getFlags.", e);
+ }
+ return 0;
+ }
+
+ /**
+ * Get the current playback info for this session.
+ *
+ * @return The current playback info or null.
+ */
+ public @Nullable PlaybackInfo getPlaybackInfo() {
+ try {
+ ParcelableVolumeInfo result = mSessionBinder.getVolumeAttributes();
+ return new PlaybackInfo(result.volumeType, result.audioAttrs, result.controlType,
+ result.maxVolume, result.currentVolume);
+
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getAudioInfo.", e);
+ }
+ return null;
+ }
+
+ /**
+ * Get an intent for launching UI associated with this session if one
+ * exists.
+ *
+ * @return A {@link PendingIntent} to launch UI or null.
+ */
+ public @Nullable PendingIntent getSessionActivity() {
+ try {
+ return mSessionBinder.getLaunchPendingIntent();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling getPendingIntent.", e);
+ }
+ return null;
+ }
+
+ /**
+ * Get the token for the session this is connected to.
+ *
+ * @return The token for the connected session.
+ */
+ public @NonNull MediaSession.Token getSessionToken() {
+ return mToken;
+ }
+
+ /**
+ * Set the volume of the output this session is playing on. The command will
+ * be ignored if it does not support
+ * {@link VolumeProvider#VOLUME_CONTROL_ABSOLUTE}. The flags in
+ * {@link AudioManager} may be used to affect the handling.
+ *
+ * @see #getPlaybackInfo()
+ * @param value The value to set it to, between 0 and the reported max.
+ * @param flags Flags from {@link AudioManager} to include with the volume
+ * request.
+ */
+ public void setVolumeTo(int value, int flags) {
+ try {
+ mSessionBinder.setVolumeTo(value, flags, mContext.getPackageName());
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling setVolumeTo.", e);
+ }
+ }
+
+ /**
+ * Adjust the volume of the output this session is playing on. The direction
+ * must be one of {@link AudioManager#ADJUST_LOWER},
+ * {@link AudioManager#ADJUST_RAISE}, or {@link AudioManager#ADJUST_SAME}.
+ * The command will be ignored if the session does not support
+ * {@link VolumeProvider#VOLUME_CONTROL_RELATIVE} or
+ * {@link VolumeProvider#VOLUME_CONTROL_ABSOLUTE}. The flags in
+ * {@link AudioManager} may be used to affect the handling.
+ *
+ * @see #getPlaybackInfo()
+ * @param direction The direction to adjust the volume in.
+ * @param flags Any flags to pass with the command.
+ */
+ public void adjustVolume(int direction, int flags) {
+ try {
+ mSessionBinder.adjustVolume(direction, flags, mContext.getPackageName());
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling adjustVolumeBy.", e);
+ }
+ }
+
+ /**
+ * Registers a callback to receive updates from the Session. Updates will be
+ * posted on the caller's thread.
+ *
+ * @param callback The callback object, must not be null.
+ */
+ public void registerCallback(@NonNull Callback callback) {
+ registerCallback(callback, null);
+ }
+
+ /**
+ * Registers a callback to receive updates from the session. Updates will be
+ * posted on the specified handler's thread.
+ *
+ * @param callback The callback object, must not be null.
+ * @param handler The handler to post updates on. If null the callers thread
+ * will be used.
+ */
+ public void registerCallback(@NonNull Callback callback, @Nullable Handler handler) {
+ if (callback == null) {
+ throw new IllegalArgumentException("callback must not be null");
+ }
+ if (handler == null) {
+ handler = new Handler();
+ }
+ synchronized (mLock) {
+ addCallbackLocked(callback, handler);
+ }
+ }
+
+ /**
+ * Unregisters the specified callback. If an update has already been posted
+ * you may still receive it after calling this method.
+ *
+ * @param callback The callback to remove.
+ */
+ public void unregisterCallback(@NonNull Callback callback) {
+ if (callback == null) {
+ throw new IllegalArgumentException("callback must not be null");
+ }
+ synchronized (mLock) {
+ removeCallbackLocked(callback);
+ }
+ }
+
+ /**
+ * Sends a generic command to the session. It is up to the session creator
+ * to decide what commands and parameters they will support. As such,
+ * commands should only be sent to sessions that the controller owns.
+ *
+ * @param command The command to send
+ * @param args Any parameters to include with the command
+ * @param cb The callback to receive the result on
+ */
+ public void sendCommand(@NonNull String command, @Nullable Bundle args,
+ @Nullable ResultReceiver cb) {
+ if (TextUtils.isEmpty(command)) {
+ throw new IllegalArgumentException("command cannot be null or empty");
+ }
+ try {
+ mSessionBinder.sendCommand(command, args, cb);
+ } catch (RemoteException e) {
+ Log.d(TAG, "Dead object in sendCommand.", e);
+ }
+ }
+
+ /**
+ * Get the session owner's package name.
+ *
+ * @return The package name of of the session owner.
+ */
+ public String getPackageName() {
+ if (mPackageName == null) {
+ try {
+ mPackageName = mSessionBinder.getPackageName();
+ } catch (RemoteException e) {
+ Log.d(TAG, "Dead object in getPackageName.", e);
+ }
+ }
+ return mPackageName;
+ }
+
+ /**
+ * Get the session's tag for debugging purposes.
+ *
+ * @return The session's tag.
+ * @hide
+ */
+ public String getTag() {
+ if (mTag == null) {
+ try {
+ mTag = mSessionBinder.getTag();
+ } catch (RemoteException e) {
+ Log.d(TAG, "Dead object in getTag.", e);
+ }
+ }
+ return mTag;
+ }
+
+ /*
+ * @hide
+ */
+ ISessionController getSessionBinder() {
+ return mSessionBinder;
+ }
+
+ /**
+ * @hide
+ */
+ public boolean controlsSameSession(MediaController other) {
+ if (other == null) return false;
+ return mSessionBinder.asBinder() == other.getSessionBinder().asBinder();
+ }
+
+ private void addCallbackLocked(Callback cb, Handler handler) {
+ if (getHandlerForCallbackLocked(cb) != null) {
+ Log.w(TAG, "Callback is already added, ignoring");
+ return;
+ }
+ MessageHandler holder = new MessageHandler(handler.getLooper(), cb);
+ mCallbacks.add(holder);
+ holder.mRegistered = true;
+
+ if (!mCbRegistered) {
+ try {
+ mSessionBinder.registerCallbackListener(mCbStub);
+ mCbRegistered = true;
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in registerCallback", e);
+ }
+ }
+ }
+
+ private boolean removeCallbackLocked(Callback cb) {
+ boolean success = false;
+ for (int i = mCallbacks.size() - 1; i >= 0; i--) {
+ MessageHandler handler = mCallbacks.get(i);
+ if (cb == handler.mCallback) {
+ mCallbacks.remove(i);
+ success = true;
+ handler.mRegistered = false;
+ }
+ }
+ if (mCbRegistered && mCallbacks.size() == 0) {
+ try {
+ mSessionBinder.unregisterCallbackListener(mCbStub);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Dead object in removeCallbackLocked");
+ }
+ mCbRegistered = false;
+ }
+ return success;
+ }
+
+ private MessageHandler getHandlerForCallbackLocked(Callback cb) {
+ if (cb == null) {
+ throw new IllegalArgumentException("Callback cannot be null");
+ }
+ for (int i = mCallbacks.size() - 1; i >= 0; i--) {
+ MessageHandler handler = mCallbacks.get(i);
+ if (cb == handler.mCallback) {
+ return handler;
+ }
+ }
+ return null;
+ }
+
+ private final void postMessage(int what, Object obj, Bundle data) {
+ synchronized (mLock) {
+ for (int i = mCallbacks.size() - 1; i >= 0; i--) {
+ mCallbacks.get(i).post(what, obj, data);
+ }
+ }
+ }
+
+ /**
+ * Callback for receiving updates from the session. A Callback can be
+ * registered using {@link #registerCallback}.
+ */
+ public static abstract class Callback {
+ /**
+ * Override to handle the session being destroyed. The session is no
+ * longer valid after this call and calls to it will be ignored.
+ */
+ public void onSessionDestroyed() {
+ }
+
+ /**
+ * Override to handle custom events sent by the session owner without a
+ * specified interface. Controllers should only handle these for
+ * sessions they own.
+ *
+ * @param event The event from the session.
+ * @param extras Optional parameters for the event, may be null.
+ */
+ public void onSessionEvent(@NonNull String event, @Nullable Bundle extras) {
+ }
+
+ /**
+ * Override to handle changes in playback state.
+ *
+ * @param state The new playback state of the session
+ */
+ public void onPlaybackStateChanged(@NonNull PlaybackState state) {
+ }
+
+ /**
+ * Override to handle changes to the current metadata.
+ *
+ * @param metadata The current metadata for the session or null if none.
+ * @see MediaMetadata
+ */
+ public void onMetadataChanged(@Nullable MediaMetadata metadata) {
+ }
+
+ /**
+ * Override to handle changes to items in the queue.
+ *
+ * @param queue A list of items in the current play queue. It should
+ * include the currently playing item as well as previous and
+ * upcoming items if applicable.
+ * @see MediaSession.QueueItem
+ */
+ public void onQueueChanged(@Nullable List<MediaSession.QueueItem> queue) {
+ }
+
+ /**
+ * Override to handle changes to the queue title.
+ *
+ * @param title The title that should be displayed along with the play queue such as
+ * "Now Playing". May be null if there is no such title.
+ */
+ public void onQueueTitleChanged(@Nullable CharSequence title) {
+ }
+
+ /**
+ * Override to handle changes to the {@link MediaSession} extras.
+ *
+ * @param extras The extras that can include other information associated with the
+ * {@link MediaSession}.
+ */
+ public void onExtrasChanged(@Nullable Bundle extras) {
+ }
+
+ /**
+ * Override to handle changes to the audio info.
+ *
+ * @param info The current audio info for this session.
+ */
+ public void onAudioInfoChanged(PlaybackInfo info) {
+ }
+ }
+
+ /**
+ * Interface for controlling media playback on a session. This allows an app
+ * to send media transport commands to the session.
+ */
+ public final class TransportControls {
+ private static final String TAG = "TransportController";
+
+ private TransportControls() {
+ }
+
+ /**
+ * Request that the player prepare its playback. In other words, other sessions can continue
+ * to play during the preparation of this session. This method can be used to speed up the
+ * start of the playback. Once the preparation is done, the session will change its playback
+ * state to {@link PlaybackState#STATE_PAUSED}. Afterwards, {@link #play} can be called to
+ * start playback.
+ */
+ public void prepare() {
+ try {
+ mSessionBinder.prepare();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling prepare.", e);
+ }
+ }
+
+ /**
+ * Request that the player prepare playback for a specific media id. In other words, other
+ * sessions can continue to play during the preparation of this session. This method can be
+ * used to speed up the start of the playback. Once the preparation is done, the session
+ * will change its playback state to {@link PlaybackState#STATE_PAUSED}. Afterwards,
+ * {@link #play} can be called to start playback. If the preparation is not needed,
+ * {@link #playFromMediaId} can be directly called without this method.
+ *
+ * @param mediaId The id of the requested media.
+ * @param extras Optional extras that can include extra information about the media item
+ * to be prepared.
+ */
+ public void prepareFromMediaId(String mediaId, Bundle extras) {
+ if (TextUtils.isEmpty(mediaId)) {
+ throw new IllegalArgumentException(
+ "You must specify a non-empty String for prepareFromMediaId.");
+ }
+ try {
+ mSessionBinder.prepareFromMediaId(mediaId, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling prepare(" + mediaId + ").", e);
+ }
+ }
+
+ /**
+ * Request that the player prepare playback for a specific search query. An empty or null
+ * query should be treated as a request to prepare any music. In other words, other sessions
+ * can continue to play during the preparation of this session. This method can be used to
+ * speed up the start of the playback. Once the preparation is done, the session will
+ * change its playback state to {@link PlaybackState#STATE_PAUSED}. Afterwards,
+ * {@link #play} can be called to start playback. If the preparation is not needed,
+ * {@link #playFromSearch} can be directly called without this method.
+ *
+ * @param query The search query.
+ * @param extras Optional extras that can include extra information
+ * about the query.
+ */
+ public void prepareFromSearch(String query, Bundle extras) {
+ if (query == null) {
+ // This is to remain compatible with
+ // INTENT_ACTION_MEDIA_PLAY_FROM_SEARCH
+ query = "";
+ }
+ try {
+ mSessionBinder.prepareFromSearch(query, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling prepare(" + query + ").", e);
+ }
+ }
+
+ /**
+ * Request that the player prepare playback for a specific {@link Uri}. In other words,
+ * other sessions can continue to play during the preparation of this session. This method
+ * can be used to speed up the start of the playback. Once the preparation is done, the
+ * session will change its playback state to {@link PlaybackState#STATE_PAUSED}. Afterwards,
+ * {@link #play} can be called to start playback. If the preparation is not needed,
+ * {@link #playFromUri} can be directly called without this method.
+ *
+ * @param uri The URI of the requested media.
+ * @param extras Optional extras that can include extra information about the media item
+ * to be prepared.
+ */
+ public void prepareFromUri(Uri uri, Bundle extras) {
+ if (uri == null || Uri.EMPTY.equals(uri)) {
+ throw new IllegalArgumentException(
+ "You must specify a non-empty Uri for prepareFromUri.");
+ }
+ try {
+ mSessionBinder.prepareFromUri(uri, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling prepare(" + uri + ").", e);
+ }
+ }
+
+ /**
+ * Request that the player start its playback at its current position.
+ */
+ public void play() {
+ try {
+ mSessionBinder.play();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling play.", e);
+ }
+ }
+
+ /**
+ * Request that the player start playback for a specific media id.
+ *
+ * @param mediaId The id of the requested media.
+ * @param extras Optional extras that can include extra information about the media item
+ * to be played.
+ */
+ public void playFromMediaId(String mediaId, Bundle extras) {
+ if (TextUtils.isEmpty(mediaId)) {
+ throw new IllegalArgumentException(
+ "You must specify a non-empty String for playFromMediaId.");
+ }
+ try {
+ mSessionBinder.playFromMediaId(mediaId, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling play(" + mediaId + ").", e);
+ }
+ }
+
+ /**
+ * Request that the player start playback for a specific search query.
+ * An empty or null query should be treated as a request to play any
+ * music.
+ *
+ * @param query The search query.
+ * @param extras Optional extras that can include extra information
+ * about the query.
+ */
+ public void playFromSearch(String query, Bundle extras) {
+ if (query == null) {
+ // This is to remain compatible with
+ // INTENT_ACTION_MEDIA_PLAY_FROM_SEARCH
+ query = "";
+ }
+ try {
+ mSessionBinder.playFromSearch(query, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling play(" + query + ").", e);
+ }
+ }
+
+ /**
+ * Request that the player start playback for a specific {@link Uri}.
+ *
+ * @param uri The URI of the requested media.
+ * @param extras Optional extras that can include extra information about the media item
+ * to be played.
+ */
+ public void playFromUri(Uri uri, Bundle extras) {
+ if (uri == null || Uri.EMPTY.equals(uri)) {
+ throw new IllegalArgumentException(
+ "You must specify a non-empty Uri for playFromUri.");
+ }
+ try {
+ mSessionBinder.playFromUri(uri, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling play(" + uri + ").", e);
+ }
+ }
+
+ /**
+ * Play an item with a specific id in the play queue. If you specify an
+ * id that is not in the play queue, the behavior is undefined.
+ */
+ public void skipToQueueItem(long id) {
+ try {
+ mSessionBinder.skipToQueueItem(id);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling skipToItem(" + id + ").", e);
+ }
+ }
+
+ /**
+ * Request that the player pause its playback and stay at its current
+ * position.
+ */
+ public void pause() {
+ try {
+ mSessionBinder.pause();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling pause.", e);
+ }
+ }
+
+ /**
+ * Request that the player stop its playback; it may clear its state in
+ * whatever way is appropriate.
+ */
+ public void stop() {
+ try {
+ mSessionBinder.stop();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling stop.", e);
+ }
+ }
+
+ /**
+ * Move to a new location in the media stream.
+ *
+ * @param pos Position to move to, in milliseconds.
+ */
+ public void seekTo(long pos) {
+ try {
+ mSessionBinder.seekTo(pos);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling seekTo.", e);
+ }
+ }
+
+ /**
+ * Start fast forwarding. If playback is already fast forwarding this
+ * may increase the rate.
+ */
+ public void fastForward() {
+ try {
+ mSessionBinder.fastForward();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling fastForward.", e);
+ }
+ }
+
+ /**
+ * Skip to the next item.
+ */
+ public void skipToNext() {
+ try {
+ mSessionBinder.next();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling next.", e);
+ }
+ }
+
+ /**
+ * Start rewinding. If playback is already rewinding this may increase
+ * the rate.
+ */
+ public void rewind() {
+ try {
+ mSessionBinder.rewind();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling rewind.", e);
+ }
+ }
+
+ /**
+ * Skip to the previous item.
+ */
+ public void skipToPrevious() {
+ try {
+ mSessionBinder.previous();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling previous.", e);
+ }
+ }
+
+ /**
+ * Rate the current content. This will cause the rating to be set for
+ * the current user. The Rating type must match the type returned by
+ * {@link #getRatingType()}.
+ *
+ * @param rating The rating to set for the current content
+ */
+ public void setRating(Rating rating) {
+ try {
+ mSessionBinder.rate(rating);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error calling rate.", e);
+ }
+ }
+
+ /**
+ * Send a custom action back for the {@link MediaSession} to perform.
+ *
+ * @param customAction The action to perform.
+ * @param args Optional arguments to supply to the {@link MediaSession} for this
+ * custom action.
+ */
+ public void sendCustomAction(@NonNull PlaybackState.CustomAction customAction,
+ @Nullable Bundle args) {
+ if (customAction == null) {
+ throw new IllegalArgumentException("CustomAction cannot be null.");
+ }
+ sendCustomAction(customAction.getAction(), args);
+ }
+
+ /**
+ * Send the id and args from a custom action back for the {@link MediaSession} to perform.
+ *
+ * @see #sendCustomAction(PlaybackState.CustomAction action, Bundle args)
+ * @param action The action identifier of the {@link PlaybackState.CustomAction} as
+ * specified by the {@link MediaSession}.
+ * @param args Optional arguments to supply to the {@link MediaSession} for this
+ * custom action.
+ */
+ public void sendCustomAction(@NonNull String action, @Nullable Bundle args) {
+ if (TextUtils.isEmpty(action)) {
+ throw new IllegalArgumentException("CustomAction cannot be null.");
+ }
+ try {
+ mSessionBinder.sendCustomAction(action, args);
+ } catch (RemoteException e) {
+ Log.d(TAG, "Dead object in sendCustomAction.", e);
+ }
+ }
+ }
+
+ /**
+ * Holds information about the current playback and how audio is handled for
+ * this session.
+ */
+ public static final class PlaybackInfo {
+ /**
+ * The session uses remote playback.
+ */
+ public static final int PLAYBACK_TYPE_REMOTE = 2;
+ /**
+ * The session uses local playback.
+ */
+ public static final int PLAYBACK_TYPE_LOCAL = 1;
+
+ private final int mVolumeType;
+ private final int mVolumeControl;
+ private final int mMaxVolume;
+ private final int mCurrentVolume;
+ private final AudioAttributes mAudioAttrs;
+
+ /**
+ * @hide
+ */
+ public PlaybackInfo(int type, AudioAttributes attrs, int control, int max, int current) {
+ mVolumeType = type;
+ mAudioAttrs = attrs;
+ mVolumeControl = control;
+ mMaxVolume = max;
+ mCurrentVolume = current;
+ }
+
+ /**
+ * Get the type of playback which affects volume handling. One of:
+ * <ul>
+ * <li>{@link #PLAYBACK_TYPE_LOCAL}</li>
+ * <li>{@link #PLAYBACK_TYPE_REMOTE}</li>
+ * </ul>
+ *
+ * @return The type of playback this session is using.
+ */
+ public int getPlaybackType() {
+ return mVolumeType;
+ }
+
+ /**
+ * Get the audio attributes for this session. The attributes will affect
+ * volume handling for the session. When the volume type is
+ * {@link PlaybackInfo#PLAYBACK_TYPE_REMOTE} these may be ignored by the
+ * remote volume handler.
+ *
+ * @return The attributes for this session.
+ */
+ public AudioAttributes getAudioAttributes() {
+ return mAudioAttrs;
+ }
+
+ /**
+ * Get the type of volume control that can be used. One of:
+ * <ul>
+ * <li>{@link VolumeProvider#VOLUME_CONTROL_ABSOLUTE}</li>
+ * <li>{@link VolumeProvider#VOLUME_CONTROL_RELATIVE}</li>
+ * <li>{@link VolumeProvider#VOLUME_CONTROL_FIXED}</li>
+ * </ul>
+ *
+ * @return The type of volume control that may be used with this
+ * session.
+ */
+ public int getVolumeControl() {
+ return mVolumeControl;
+ }
+
+ /**
+ * Get the maximum volume that may be set for this session.
+ *
+ * @return The maximum allowed volume where this session is playing.
+ */
+ public int getMaxVolume() {
+ return mMaxVolume;
+ }
+
+ /**
+ * Get the current volume for this session.
+ *
+ * @return The current volume where this session is playing.
+ */
+ public int getCurrentVolume() {
+ return mCurrentVolume;
+ }
+ }
+
+ private final static class CallbackStub extends ISessionControllerCallback.Stub {
+ private final WeakReference<MediaController> mController;
+
+ public CallbackStub(MediaController controller) {
+ mController = new WeakReference<MediaController>(controller);
+ }
+
+ @Override
+ public void onSessionDestroyed() {
+ MediaController controller = mController.get();
+ if (controller != null) {
+ controller.postMessage(MSG_DESTROYED, null, null);
+ }
+ }
+
+ @Override
+ public void onEvent(String event, Bundle extras) {
+ MediaController controller = mController.get();
+ if (controller != null) {
+ controller.postMessage(MSG_EVENT, event, extras);
+ }
+ }
+
+ @Override
+ public void onPlaybackStateChanged(PlaybackState state) {
+ MediaController controller = mController.get();
+ if (controller != null) {
+ controller.postMessage(MSG_UPDATE_PLAYBACK_STATE, state, null);
+ }
+ }
+
+ @Override
+ public void onMetadataChanged(MediaMetadata metadata) {
+ MediaController controller = mController.get();
+ if (controller != null) {
+ controller.postMessage(MSG_UPDATE_METADATA, metadata, null);
+ }
+ }
+
+ @Override
+ public void onQueueChanged(ParceledListSlice parceledQueue) {
+ List<MediaSession.QueueItem> queue = parceledQueue == null ? null : parceledQueue
+ .getList();
+ MediaController controller = mController.get();
+ if (controller != null) {
+ controller.postMessage(MSG_UPDATE_QUEUE, queue, null);
+ }
+ }
+
+ @Override
+ public void onQueueTitleChanged(CharSequence title) {
+ MediaController controller = mController.get();
+ if (controller != null) {
+ controller.postMessage(MSG_UPDATE_QUEUE_TITLE, title, null);
+ }
+ }
+
+ @Override
+ public void onExtrasChanged(Bundle extras) {
+ MediaController controller = mController.get();
+ if (controller != null) {
+ controller.postMessage(MSG_UPDATE_EXTRAS, extras, null);
+ }
+ }
+
+ @Override
+ public void onVolumeInfoChanged(ParcelableVolumeInfo pvi) {
+ MediaController controller = mController.get();
+ if (controller != null) {
+ PlaybackInfo info = new PlaybackInfo(pvi.volumeType, pvi.audioAttrs, pvi.controlType,
+ pvi.maxVolume, pvi.currentVolume);
+ controller.postMessage(MSG_UPDATE_VOLUME, info, null);
+ }
+ }
+
+ }
+
+ private final static class MessageHandler extends Handler {
+ private final MediaController.Callback mCallback;
+ private boolean mRegistered = false;
+
+ public MessageHandler(Looper looper, MediaController.Callback cb) {
+ super(looper, null, true);
+ mCallback = cb;
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (!mRegistered) {
+ return;
+ }
+ switch (msg.what) {
+ case MSG_EVENT:
+ mCallback.onSessionEvent((String) msg.obj, msg.getData());
+ break;
+ case MSG_UPDATE_PLAYBACK_STATE:
+ mCallback.onPlaybackStateChanged((PlaybackState) msg.obj);
+ break;
+ case MSG_UPDATE_METADATA:
+ mCallback.onMetadataChanged((MediaMetadata) msg.obj);
+ break;
+ case MSG_UPDATE_QUEUE:
+ mCallback.onQueueChanged((List<MediaSession.QueueItem>) msg.obj);
+ break;
+ case MSG_UPDATE_QUEUE_TITLE:
+ mCallback.onQueueTitleChanged((CharSequence) msg.obj);
+ break;
+ case MSG_UPDATE_EXTRAS:
+ mCallback.onExtrasChanged((Bundle) msg.obj);
+ break;
+ case MSG_UPDATE_VOLUME:
+ mCallback.onAudioInfoChanged((PlaybackInfo) msg.obj);
+ break;
+ case MSG_DESTROYED:
+ mCallback.onSessionDestroyed();
+ break;
+ }
+ }
+
+ public void post(int what, Object obj, Bundle data) {
+ Message msg = obtainMessage(what, obj);
+ msg.setData(data);
+ msg.sendToTarget();
+ }
+ }
+
+}
diff --git a/android/media/session/MediaSession.java b/android/media/session/MediaSession.java
new file mode 100644
index 00000000..b8184a07
--- /dev/null
+++ b/android/media/session/MediaSession.java
@@ -0,0 +1,1468 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.session;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.app.Activity;
+import android.app.PendingIntent;
+import android.content.Context;
+import android.content.Intent;
+import android.content.pm.ParceledListSlice;
+import android.media.AudioAttributes;
+import android.media.MediaDescription;
+import android.media.MediaMetadata;
+import android.media.Rating;
+import android.media.VolumeProvider;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.RemoteException;
+import android.os.ResultReceiver;
+import android.os.UserHandle;
+import android.service.media.MediaBrowserService;
+import android.text.TextUtils;
+import android.util.Log;
+import android.view.KeyEvent;
+import android.view.ViewConfiguration;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.ref.WeakReference;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Allows interaction with media controllers, volume keys, media buttons, and
+ * transport controls.
+ * <p>
+ * A MediaSession should be created when an app wants to publish media playback
+ * information or handle media keys. In general an app only needs one session
+ * for all playback, though multiple sessions can be created to provide finer
+ * grain controls of media.
+ * <p>
+ * Once a session is created the owner of the session may pass its
+ * {@link #getSessionToken() session token} to other processes to allow them to
+ * create a {@link MediaController} to interact with the session.
+ * <p>
+ * To receive commands, media keys, and other events a {@link Callback} must be
+ * set with {@link #setCallback(Callback)} and {@link #setActive(boolean)
+ * setActive(true)} must be called.
+ * <p>
+ * When an app is finished performing playback it must call {@link #release()}
+ * to clean up the session and notify any controllers.
+ * <p>
+ * MediaSession objects are thread safe.
+ */
+public final class MediaSession {
+ private static final String TAG = "MediaSession";
+
+ /**
+ * Set this flag on the session to indicate that it can handle media button
+ * events.
+ * @deprecated This flag is no longer used. All media sessions are expected to handle media
+ * button events now.
+ */
+ @Deprecated
+ public static final int FLAG_HANDLES_MEDIA_BUTTONS = 1 << 0;
+
+ /**
+ * Set this flag on the session to indicate that it handles transport
+ * control commands through its {@link Callback}.
+ * @deprecated This flag is no longer used. All media sessions are expected to handle transport
+ * controls now.
+ */
+ @Deprecated
+ public static final int FLAG_HANDLES_TRANSPORT_CONTROLS = 1 << 1;
+
+ /**
+ * System only flag for a session that needs to have priority over all other
+ * sessions. This flag ensures this session will receive media button events
+ * regardless of the current ordering in the system.
+ *
+ * @hide
+ */
+ public static final int FLAG_EXCLUSIVE_GLOBAL_PRIORITY = 1 << 16;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(flag = true, value = {
+ FLAG_HANDLES_MEDIA_BUTTONS,
+ FLAG_HANDLES_TRANSPORT_CONTROLS,
+ FLAG_EXCLUSIVE_GLOBAL_PRIORITY })
+ public @interface SessionFlags { }
+
+ private final Object mLock = new Object();
+ private final int mMaxBitmapSize;
+
+ private final MediaSession.Token mSessionToken;
+ private final MediaController mController;
+ private final ISession mBinder;
+ private final CallbackStub mCbStub;
+
+ private CallbackMessageHandler mCallbackHandler;
+ private VolumeProvider mVolumeProvider;
+ private PlaybackState mPlaybackState;
+
+ private boolean mActive = false;
+
+ /**
+ * Creates a new session. The session will automatically be registered with
+ * the system but will not be published until {@link #setActive(boolean)
+ * setActive(true)} is called. You must call {@link #release()} when
+ * finished with the session.
+ *
+ * @param context The context to use to create the session.
+ * @param tag A short name for debugging purposes.
+ */
+ public MediaSession(@NonNull Context context, @NonNull String tag) {
+ this(context, tag, UserHandle.myUserId());
+ }
+
+ /**
+ * Creates a new session as the specified user. To create a session as a
+ * user other than your own you must hold the
+ * {@link android.Manifest.permission#INTERACT_ACROSS_USERS_FULL}
+ * permission.
+ *
+ * @param context The context to use to create the session.
+ * @param tag A short name for debugging purposes.
+ * @param userId The user id to create the session as.
+ * @hide
+ */
+ public MediaSession(@NonNull Context context, @NonNull String tag, int userId) {
+ if (context == null) {
+ throw new IllegalArgumentException("context cannot be null.");
+ }
+ if (TextUtils.isEmpty(tag)) {
+ throw new IllegalArgumentException("tag cannot be null or empty");
+ }
+ mMaxBitmapSize = context.getResources().getDimensionPixelSize(
+ com.android.internal.R.dimen.config_mediaMetadataBitmapMaxSize);
+ mCbStub = new CallbackStub(this);
+ MediaSessionManager manager = (MediaSessionManager) context
+ .getSystemService(Context.MEDIA_SESSION_SERVICE);
+ try {
+ mBinder = manager.createSession(mCbStub, tag, userId);
+ mSessionToken = new Token(mBinder.getController());
+ mController = new MediaController(context, mSessionToken);
+ } catch (RemoteException e) {
+ throw new RuntimeException("Remote error creating session.", e);
+ }
+ }
+
+ /**
+ * Set the callback to receive updates for the MediaSession. This includes
+ * media button events and transport controls. The caller's thread will be
+ * used to post updates.
+ * <p>
+ * Set the callback to null to stop receiving updates.
+ *
+ * @param callback The callback object
+ */
+ public void setCallback(@Nullable Callback callback) {
+ setCallback(callback, null);
+ }
+
+ /**
+ * Set the callback to receive updates for the MediaSession. This includes
+ * media button events and transport controls.
+ * <p>
+ * Set the callback to null to stop receiving updates.
+ *
+ * @param callback The callback to receive updates on.
+ * @param handler The handler that events should be posted on.
+ */
+ public void setCallback(@Nullable Callback callback, @Nullable Handler handler) {
+ synchronized (mLock) {
+ if (mCallbackHandler != null) {
+ // We're updating the callback, clear the session from the old one.
+ mCallbackHandler.mCallback.mSession = null;
+ mCallbackHandler.removeCallbacksAndMessages(null);
+ }
+ if (callback == null) {
+ mCallbackHandler = null;
+ return;
+ }
+ if (handler == null) {
+ handler = new Handler();
+ }
+ callback.mSession = this;
+ CallbackMessageHandler msgHandler = new CallbackMessageHandler(handler.getLooper(),
+ callback);
+ mCallbackHandler = msgHandler;
+ }
+ }
+
+ /**
+ * Set an intent for launching UI for this Session. This can be used as a
+ * quick link to an ongoing media screen. The intent should be for an
+ * activity that may be started using {@link Activity#startActivity(Intent)}.
+ *
+ * @param pi The intent to launch to show UI for this Session.
+ */
+ public void setSessionActivity(@Nullable PendingIntent pi) {
+ try {
+ mBinder.setLaunchPendingIntent(pi);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Failure in setLaunchPendingIntent.", e);
+ }
+ }
+
+ /**
+ * Set a pending intent for your media button receiver to allow restarting
+ * playback after the session has been stopped. If your app is started in
+ * this way an {@link Intent#ACTION_MEDIA_BUTTON} intent will be sent via
+ * the pending intent.
+ *
+ * @param mbr The {@link PendingIntent} to send the media button event to.
+ */
+ public void setMediaButtonReceiver(@Nullable PendingIntent mbr) {
+ try {
+ mBinder.setMediaButtonReceiver(mbr);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Failure in setMediaButtonReceiver.", e);
+ }
+ }
+
+ /**
+ * Set any flags for the session.
+ *
+ * @param flags The flags to set for this session.
+ */
+ public void setFlags(@SessionFlags int flags) {
+ try {
+ mBinder.setFlags(flags);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Failure in setFlags.", e);
+ }
+ }
+
+ /**
+ * Set the attributes for this session's audio. This will affect the
+ * system's volume handling for this session. If
+ * {@link #setPlaybackToRemote} was previously called it will stop receiving
+ * volume commands and the system will begin sending volume changes to the
+ * appropriate stream.
+ * <p>
+ * By default sessions use attributes for media.
+ *
+ * @param attributes The {@link AudioAttributes} for this session's audio.
+ */
+ public void setPlaybackToLocal(AudioAttributes attributes) {
+ if (attributes == null) {
+ throw new IllegalArgumentException("Attributes cannot be null for local playback.");
+ }
+ try {
+ mBinder.setPlaybackToLocal(attributes);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Failure in setPlaybackToLocal.", e);
+ }
+ }
+
+ /**
+ * Configure this session to use remote volume handling. This must be called
+ * to receive volume button events, otherwise the system will adjust the
+ * appropriate stream volume for this session. If
+ * {@link #setPlaybackToLocal} was previously called the system will stop
+ * handling volume changes for this session and pass them to the volume
+ * provider instead.
+ *
+ * @param volumeProvider The provider that will handle volume changes. May
+ * not be null.
+ */
+ public void setPlaybackToRemote(@NonNull VolumeProvider volumeProvider) {
+ if (volumeProvider == null) {
+ throw new IllegalArgumentException("volumeProvider may not be null!");
+ }
+ synchronized (mLock) {
+ mVolumeProvider = volumeProvider;
+ }
+ volumeProvider.setCallback(new VolumeProvider.Callback() {
+ @Override
+ public void onVolumeChanged(VolumeProvider volumeProvider) {
+ notifyRemoteVolumeChanged(volumeProvider);
+ }
+ });
+
+ try {
+ mBinder.setPlaybackToRemote(volumeProvider.getVolumeControl(),
+ volumeProvider.getMaxVolume());
+ mBinder.setCurrentVolume(volumeProvider.getCurrentVolume());
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Failure in setPlaybackToRemote.", e);
+ }
+ }
+
+ /**
+ * Set if this session is currently active and ready to receive commands. If
+ * set to false your session's controller may not be discoverable. You must
+ * set the session to active before it can start receiving media button
+ * events or transport commands.
+ *
+ * @param active Whether this session is active or not.
+ */
+ public void setActive(boolean active) {
+ if (mActive == active) {
+ return;
+ }
+ try {
+ mBinder.setActive(active);
+ mActive = active;
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Failure in setActive.", e);
+ }
+ }
+
+ /**
+ * Get the current active state of this session.
+ *
+ * @return True if the session is active, false otherwise.
+ */
+ public boolean isActive() {
+ return mActive;
+ }
+
+ /**
+ * Send a proprietary event to all MediaControllers listening to this
+ * Session. It's up to the Controller/Session owner to determine the meaning
+ * of any events.
+ *
+ * @param event The name of the event to send
+ * @param extras Any extras included with the event
+ */
+ public void sendSessionEvent(@NonNull String event, @Nullable Bundle extras) {
+ if (TextUtils.isEmpty(event)) {
+ throw new IllegalArgumentException("event cannot be null or empty");
+ }
+ try {
+ mBinder.sendEvent(event, extras);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error sending event", e);
+ }
+ }
+
+ /**
+ * This must be called when an app has finished performing playback. If
+ * playback is expected to start again shortly the session can be left open,
+ * but it must be released if your activity or service is being destroyed.
+ */
+ public void release() {
+ try {
+ mBinder.destroy();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Error releasing session: ", e);
+ }
+ }
+
+ /**
+ * Retrieve a token object that can be used by apps to create a
+ * {@link MediaController} for interacting with this session. The owner of
+ * the session is responsible for deciding how to distribute these tokens.
+ *
+ * @return A token that can be used to create a MediaController for this
+ * session
+ */
+ public @NonNull Token getSessionToken() {
+ return mSessionToken;
+ }
+
+ /**
+ * Get a controller for this session. This is a convenience method to avoid
+ * having to cache your own controller in process.
+ *
+ * @return A controller for this session.
+ */
+ public @NonNull MediaController getController() {
+ return mController;
+ }
+
+ /**
+ * Update the current playback state.
+ *
+ * @param state The current state of playback
+ */
+ public void setPlaybackState(@Nullable PlaybackState state) {
+ mPlaybackState = state;
+ try {
+ mBinder.setPlaybackState(state);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Dead object in setPlaybackState.", e);
+ }
+ }
+
+ /**
+ * Update the current metadata. New metadata can be created using
+ * {@link android.media.MediaMetadata.Builder}. This operation may take time proportional to
+ * the size of the bitmap to replace large bitmaps with a scaled down copy.
+ *
+ * @param metadata The new metadata
+ * @see android.media.MediaMetadata.Builder#putBitmap
+ */
+ public void setMetadata(@Nullable MediaMetadata metadata) {
+ if (metadata != null) {
+ metadata = (new MediaMetadata.Builder(metadata, mMaxBitmapSize)).build();
+ }
+ try {
+ mBinder.setMetadata(metadata);
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Dead object in setPlaybackState.", e);
+ }
+ }
+
+ /**
+ * Update the list of items in the play queue. It is an ordered list and
+ * should contain the current item, and previous or upcoming items if they
+ * exist. Specify null if there is no current play queue.
+ * <p>
+ * The queue should be of reasonable size. If the play queue is unbounded
+ * within your app, it is better to send a reasonable amount in a sliding
+ * window instead.
+ *
+ * @param queue A list of items in the play queue.
+ */
+ public void setQueue(@Nullable List<QueueItem> queue) {
+ try {
+ mBinder.setQueue(queue == null ? null : new ParceledListSlice<QueueItem>(queue));
+ } catch (RemoteException e) {
+ Log.wtf("Dead object in setQueue.", e);
+ }
+ }
+
+ /**
+ * Set the title of the play queue. The UI should display this title along
+ * with the play queue itself.
+ * e.g. "Play Queue", "Now Playing", or an album name.
+ *
+ * @param title The title of the play queue.
+ */
+ public void setQueueTitle(@Nullable CharSequence title) {
+ try {
+ mBinder.setQueueTitle(title);
+ } catch (RemoteException e) {
+ Log.wtf("Dead object in setQueueTitle.", e);
+ }
+ }
+
+ /**
+ * Set the style of rating used by this session. Apps trying to set the
+ * rating should use this style. Must be one of the following:
+ * <ul>
+ * <li>{@link Rating#RATING_NONE}</li>
+ * <li>{@link Rating#RATING_3_STARS}</li>
+ * <li>{@link Rating#RATING_4_STARS}</li>
+ * <li>{@link Rating#RATING_5_STARS}</li>
+ * <li>{@link Rating#RATING_HEART}</li>
+ * <li>{@link Rating#RATING_PERCENTAGE}</li>
+ * <li>{@link Rating#RATING_THUMB_UP_DOWN}</li>
+ * </ul>
+ */
+ public void setRatingType(@Rating.Style int type) {
+ try {
+ mBinder.setRatingType(type);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error in setRatingType.", e);
+ }
+ }
+
+ /**
+ * Set some extras that can be associated with the {@link MediaSession}. No assumptions should
+ * be made as to how a {@link MediaController} will handle these extras.
+ * Keys should be fully qualified (e.g. com.example.MY_EXTRA) to avoid conflicts.
+ *
+ * @param extras The extras associated with the {@link MediaSession}.
+ */
+ public void setExtras(@Nullable Bundle extras) {
+ try {
+ mBinder.setExtras(extras);
+ } catch (RemoteException e) {
+ Log.wtf("Dead object in setExtras.", e);
+ }
+ }
+
+ /**
+ * Notify the system that the remote volume changed.
+ *
+ * @param provider The provider that is handling volume changes.
+ * @hide
+ */
+ public void notifyRemoteVolumeChanged(VolumeProvider provider) {
+ synchronized (mLock) {
+ if (provider == null || provider != mVolumeProvider) {
+ Log.w(TAG, "Received update from stale volume provider");
+ return;
+ }
+ }
+ try {
+ mBinder.setCurrentVolume(provider.getCurrentVolume());
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error in notifyVolumeChanged", e);
+ }
+ }
+
+ /**
+ * Returns the name of the package that sent the last media button, transport control, or
+ * command from controllers and the system. This is only valid while in a request callback, such
+ * as {@link Callback#onPlay}.
+ *
+ * @hide
+ */
+ public String getCallingPackage() {
+ try {
+ return mBinder.getCallingPackage();
+ } catch (RemoteException e) {
+ Log.wtf(TAG, "Dead object in getCallingPackage.", e);
+ }
+ return null;
+ }
+
+ private void dispatchPrepare() {
+ postToCallback(CallbackMessageHandler.MSG_PREPARE);
+ }
+
+ private void dispatchPrepareFromMediaId(String mediaId, Bundle extras) {
+ postToCallback(CallbackMessageHandler.MSG_PREPARE_MEDIA_ID, mediaId, extras);
+ }
+
+ private void dispatchPrepareFromSearch(String query, Bundle extras) {
+ postToCallback(CallbackMessageHandler.MSG_PREPARE_SEARCH, query, extras);
+ }
+
+ private void dispatchPrepareFromUri(Uri uri, Bundle extras) {
+ postToCallback(CallbackMessageHandler.MSG_PREPARE_URI, uri, extras);
+ }
+
+ private void dispatchPlay() {
+ postToCallback(CallbackMessageHandler.MSG_PLAY);
+ }
+
+ private void dispatchPlayFromMediaId(String mediaId, Bundle extras) {
+ postToCallback(CallbackMessageHandler.MSG_PLAY_MEDIA_ID, mediaId, extras);
+ }
+
+ private void dispatchPlayFromSearch(String query, Bundle extras) {
+ postToCallback(CallbackMessageHandler.MSG_PLAY_SEARCH, query, extras);
+ }
+
+ private void dispatchPlayFromUri(Uri uri, Bundle extras) {
+ postToCallback(CallbackMessageHandler.MSG_PLAY_URI, uri, extras);
+ }
+
+ private void dispatchSkipToItem(long id) {
+ postToCallback(CallbackMessageHandler.MSG_SKIP_TO_ITEM, id);
+ }
+
+ private void dispatchPause() {
+ postToCallback(CallbackMessageHandler.MSG_PAUSE);
+ }
+
+ private void dispatchStop() {
+ postToCallback(CallbackMessageHandler.MSG_STOP);
+ }
+
+ private void dispatchNext() {
+ postToCallback(CallbackMessageHandler.MSG_NEXT);
+ }
+
+ private void dispatchPrevious() {
+ postToCallback(CallbackMessageHandler.MSG_PREVIOUS);
+ }
+
+ private void dispatchFastForward() {
+ postToCallback(CallbackMessageHandler.MSG_FAST_FORWARD);
+ }
+
+ private void dispatchRewind() {
+ postToCallback(CallbackMessageHandler.MSG_REWIND);
+ }
+
+ private void dispatchSeekTo(long pos) {
+ postToCallback(CallbackMessageHandler.MSG_SEEK_TO, pos);
+ }
+
+ private void dispatchRate(Rating rating) {
+ postToCallback(CallbackMessageHandler.MSG_RATE, rating);
+ }
+
+ private void dispatchCustomAction(String action, Bundle args) {
+ postToCallback(CallbackMessageHandler.MSG_CUSTOM_ACTION, action, args);
+ }
+
+ private void dispatchMediaButton(Intent mediaButtonIntent) {
+ postToCallback(CallbackMessageHandler.MSG_MEDIA_BUTTON, mediaButtonIntent);
+ }
+
+ private void dispatchAdjustVolume(int direction) {
+ postToCallback(CallbackMessageHandler.MSG_ADJUST_VOLUME, direction);
+ }
+
+ private void dispatchSetVolumeTo(int volume) {
+ postToCallback(CallbackMessageHandler.MSG_SET_VOLUME, volume);
+ }
+
+ private void postToCallback(int what) {
+ postToCallback(what, null);
+ }
+
+ private void postCommand(String command, Bundle args, ResultReceiver resultCb) {
+ Command cmd = new Command(command, args, resultCb);
+ postToCallback(CallbackMessageHandler.MSG_COMMAND, cmd);
+ }
+
+ private void postToCallback(int what, Object obj) {
+ postToCallback(what, obj, null);
+ }
+
+ private void postToCallback(int what, Object obj, Bundle extras) {
+ synchronized (mLock) {
+ if (mCallbackHandler != null) {
+ mCallbackHandler.post(what, obj, extras);
+ }
+ }
+ }
+
+ /**
+ * Return true if this is considered an active playback state.
+ *
+ * @hide
+ */
+ public static boolean isActiveState(int state) {
+ switch (state) {
+ case PlaybackState.STATE_FAST_FORWARDING:
+ case PlaybackState.STATE_REWINDING:
+ case PlaybackState.STATE_SKIPPING_TO_PREVIOUS:
+ case PlaybackState.STATE_SKIPPING_TO_NEXT:
+ case PlaybackState.STATE_BUFFERING:
+ case PlaybackState.STATE_CONNECTING:
+ case PlaybackState.STATE_PLAYING:
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Represents an ongoing session. This may be passed to apps by the session
+ * owner to allow them to create a {@link MediaController} to communicate with
+ * the session.
+ */
+ public static final class Token implements Parcelable {
+
+ private ISessionController mBinder;
+
+ /**
+ * @hide
+ */
+ public Token(ISessionController binder) {
+ mBinder = binder;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeStrongBinder(mBinder.asBinder());
+ }
+
+ @Override
+ public int hashCode() {
+ final int prime = 31;
+ int result = 1;
+ result = prime * result + ((mBinder == null) ? 0 : mBinder.asBinder().hashCode());
+ return result;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj)
+ return true;
+ if (obj == null)
+ return false;
+ if (getClass() != obj.getClass())
+ return false;
+ Token other = (Token) obj;
+ if (mBinder == null) {
+ if (other.mBinder != null)
+ return false;
+ } else if (!mBinder.asBinder().equals(other.mBinder.asBinder()))
+ return false;
+ return true;
+ }
+
+ ISessionController getBinder() {
+ return mBinder;
+ }
+
+ public static final Parcelable.Creator<Token> CREATOR
+ = new Parcelable.Creator<Token>() {
+ @Override
+ public Token createFromParcel(Parcel in) {
+ return new Token(ISessionController.Stub.asInterface(in.readStrongBinder()));
+ }
+
+ @Override
+ public Token[] newArray(int size) {
+ return new Token[size];
+ }
+ };
+ }
+
+ /**
+ * Receives media buttons, transport controls, and commands from controllers
+ * and the system. A callback may be set using {@link #setCallback}.
+ */
+ public abstract static class Callback {
+ private MediaSession mSession;
+ private CallbackMessageHandler mHandler;
+ private boolean mMediaPlayPauseKeyPending;
+
+ public Callback() {
+ }
+
+ /**
+ * Called when a controller has sent a command to this session.
+ * The owner of the session may handle custom commands but is not
+ * required to.
+ *
+ * @param command The command name.
+ * @param args Optional parameters for the command, may be null.
+ * @param cb A result receiver to which a result may be sent by the command, may be null.
+ */
+ public void onCommand(@NonNull String command, @Nullable Bundle args,
+ @Nullable ResultReceiver cb) {
+ }
+
+ /**
+ * Called when a media button is pressed and this session has the
+ * highest priority or a controller sends a media button event to the
+ * session. The default behavior will call the relevant method if the
+ * action for it was set.
+ * <p>
+ * The intent will be of type {@link Intent#ACTION_MEDIA_BUTTON} with a
+ * KeyEvent in {@link Intent#EXTRA_KEY_EVENT}
+ *
+ * @param mediaButtonIntent an intent containing the KeyEvent as an
+ * extra
+ * @return True if the event was handled, false otherwise.
+ */
+ public boolean onMediaButtonEvent(@NonNull Intent mediaButtonIntent) {
+ if (mSession != null && mHandler != null
+ && Intent.ACTION_MEDIA_BUTTON.equals(mediaButtonIntent.getAction())) {
+ KeyEvent ke = mediaButtonIntent.getParcelableExtra(Intent.EXTRA_KEY_EVENT);
+ if (ke != null && ke.getAction() == KeyEvent.ACTION_DOWN) {
+ PlaybackState state = mSession.mPlaybackState;
+ long validActions = state == null ? 0 : state.getActions();
+ switch (ke.getKeyCode()) {
+ case KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE:
+ case KeyEvent.KEYCODE_HEADSETHOOK:
+ if (ke.getRepeatCount() > 0) {
+ // Consider long-press as a single tap.
+ handleMediaPlayPauseKeySingleTapIfPending();
+ } else if (mMediaPlayPauseKeyPending) {
+ // Consider double tap as the next.
+ mHandler.removeMessages(CallbackMessageHandler
+ .MSG_PLAY_PAUSE_KEY_DOUBLE_TAP_TIMEOUT);
+ mMediaPlayPauseKeyPending = false;
+ if ((validActions & PlaybackState.ACTION_SKIP_TO_NEXT) != 0) {
+ onSkipToNext();
+ }
+ } else {
+ mMediaPlayPauseKeyPending = true;
+ mHandler.sendEmptyMessageDelayed(CallbackMessageHandler
+ .MSG_PLAY_PAUSE_KEY_DOUBLE_TAP_TIMEOUT,
+ ViewConfiguration.getDoubleTapTimeout());
+ }
+ return true;
+ default:
+ // If another key is pressed within double tap timeout, consider the
+ // pending play/pause as a single tap to handle media keys in order.
+ handleMediaPlayPauseKeySingleTapIfPending();
+ break;
+ }
+
+ switch (ke.getKeyCode()) {
+ case KeyEvent.KEYCODE_MEDIA_PLAY:
+ if ((validActions & PlaybackState.ACTION_PLAY) != 0) {
+ onPlay();
+ return true;
+ }
+ break;
+ case KeyEvent.KEYCODE_MEDIA_PAUSE:
+ if ((validActions & PlaybackState.ACTION_PAUSE) != 0) {
+ onPause();
+ return true;
+ }
+ break;
+ case KeyEvent.KEYCODE_MEDIA_NEXT:
+ if ((validActions & PlaybackState.ACTION_SKIP_TO_NEXT) != 0) {
+ onSkipToNext();
+ return true;
+ }
+ break;
+ case KeyEvent.KEYCODE_MEDIA_PREVIOUS:
+ if ((validActions & PlaybackState.ACTION_SKIP_TO_PREVIOUS) != 0) {
+ onSkipToPrevious();
+ return true;
+ }
+ break;
+ case KeyEvent.KEYCODE_MEDIA_STOP:
+ if ((validActions & PlaybackState.ACTION_STOP) != 0) {
+ onStop();
+ return true;
+ }
+ break;
+ case KeyEvent.KEYCODE_MEDIA_FAST_FORWARD:
+ if ((validActions & PlaybackState.ACTION_FAST_FORWARD) != 0) {
+ onFastForward();
+ return true;
+ }
+ break;
+ case KeyEvent.KEYCODE_MEDIA_REWIND:
+ if ((validActions & PlaybackState.ACTION_REWIND) != 0) {
+ onRewind();
+ return true;
+ }
+ break;
+ }
+ }
+ }
+ return false;
+ }
+
+ private void handleMediaPlayPauseKeySingleTapIfPending() {
+ if (!mMediaPlayPauseKeyPending) {
+ return;
+ }
+ mMediaPlayPauseKeyPending = false;
+ mHandler.removeMessages(CallbackMessageHandler.MSG_PLAY_PAUSE_KEY_DOUBLE_TAP_TIMEOUT);
+ PlaybackState state = mSession.mPlaybackState;
+ long validActions = state == null ? 0 : state.getActions();
+ boolean isPlaying = state != null
+ && state.getState() == PlaybackState.STATE_PLAYING;
+ boolean canPlay = (validActions & (PlaybackState.ACTION_PLAY_PAUSE
+ | PlaybackState.ACTION_PLAY)) != 0;
+ boolean canPause = (validActions & (PlaybackState.ACTION_PLAY_PAUSE
+ | PlaybackState.ACTION_PAUSE)) != 0;
+ if (isPlaying && canPause) {
+ onPause();
+ } else if (!isPlaying && canPlay) {
+ onPlay();
+ }
+ }
+
+ /**
+ * Override to handle requests to prepare playback. During the preparation, a session should
+ * not hold audio focus in order to allow other sessions play seamlessly. The state of
+ * playback should be updated to {@link PlaybackState#STATE_PAUSED} after the preparation is
+ * done.
+ */
+ public void onPrepare() {
+ }
+
+ /**
+ * Override to handle requests to prepare for playing a specific mediaId that was provided
+ * by your app's {@link MediaBrowserService}. During the preparation, a session should not
+ * hold audio focus in order to allow other sessions play seamlessly. The state of playback
+ * should be updated to {@link PlaybackState#STATE_PAUSED} after the preparation is done.
+ * The playback of the prepared content should start in the implementation of
+ * {@link #onPlay}. Override {@link #onPlayFromMediaId} to handle requests for starting
+ * playback without preparation.
+ */
+ public void onPrepareFromMediaId(String mediaId, Bundle extras) {
+ }
+
+ /**
+ * Override to handle requests to prepare playback from a search query. An empty query
+ * indicates that the app may prepare any music. The implementation should attempt to make a
+ * smart choice about what to play. During the preparation, a session should not hold audio
+ * focus in order to allow other sessions play seamlessly. The state of playback should be
+ * updated to {@link PlaybackState#STATE_PAUSED} after the preparation is done. The playback
+ * of the prepared content should start in the implementation of {@link #onPlay}. Override
+ * {@link #onPlayFromSearch} to handle requests for starting playback without preparation.
+ */
+ public void onPrepareFromSearch(String query, Bundle extras) {
+ }
+
+ /**
+ * Override to handle requests to prepare a specific media item represented by a URI.
+ * During the preparation, a session should not hold audio focus in order to allow
+ * other sessions play seamlessly. The state of playback should be updated to
+ * {@link PlaybackState#STATE_PAUSED} after the preparation is done.
+ * The playback of the prepared content should start in the implementation of
+ * {@link #onPlay}. Override {@link #onPlayFromUri} to handle requests
+ * for starting playback without preparation.
+ */
+ public void onPrepareFromUri(Uri uri, Bundle extras) {
+ }
+
+ /**
+ * Override to handle requests to begin playback.
+ */
+ public void onPlay() {
+ }
+
+ /**
+ * Override to handle requests to begin playback from a search query. An
+ * empty query indicates that the app may play any music. The
+ * implementation should attempt to make a smart choice about what to
+ * play.
+ */
+ public void onPlayFromSearch(String query, Bundle extras) {
+ }
+
+ /**
+ * Override to handle requests to play a specific mediaId that was
+ * provided by your app's {@link MediaBrowserService}.
+ */
+ public void onPlayFromMediaId(String mediaId, Bundle extras) {
+ }
+
+ /**
+ * Override to handle requests to play a specific media item represented by a URI.
+ */
+ public void onPlayFromUri(Uri uri, Bundle extras) {
+ }
+
+ /**
+ * Override to handle requests to play an item with a given id from the
+ * play queue.
+ */
+ public void onSkipToQueueItem(long id) {
+ }
+
+ /**
+ * Override to handle requests to pause playback.
+ */
+ public void onPause() {
+ }
+
+ /**
+ * Override to handle requests to skip to the next media item.
+ */
+ public void onSkipToNext() {
+ }
+
+ /**
+ * Override to handle requests to skip to the previous media item.
+ */
+ public void onSkipToPrevious() {
+ }
+
+ /**
+ * Override to handle requests to fast forward.
+ */
+ public void onFastForward() {
+ }
+
+ /**
+ * Override to handle requests to rewind.
+ */
+ public void onRewind() {
+ }
+
+ /**
+ * Override to handle requests to stop playback.
+ */
+ public void onStop() {
+ }
+
+ /**
+ * Override to handle requests to seek to a specific position in ms.
+ *
+ * @param pos New position to move to, in milliseconds.
+ */
+ public void onSeekTo(long pos) {
+ }
+
+ /**
+ * Override to handle the item being rated.
+ *
+ * @param rating
+ */
+ public void onSetRating(@NonNull Rating rating) {
+ }
+
+ /**
+ * Called when a {@link MediaController} wants a {@link PlaybackState.CustomAction} to be
+ * performed.
+ *
+ * @param action The action that was originally sent in the
+ * {@link PlaybackState.CustomAction}.
+ * @param extras Optional extras specified by the {@link MediaController}.
+ */
+ public void onCustomAction(@NonNull String action, @Nullable Bundle extras) {
+ }
+ }
+
+ /**
+ * @hide
+ */
+ public static class CallbackStub extends ISessionCallback.Stub {
+ private WeakReference<MediaSession> mMediaSession;
+
+ public CallbackStub(MediaSession session) {
+ mMediaSession = new WeakReference<MediaSession>(session);
+ }
+
+ @Override
+ public void onCommand(String command, Bundle args, ResultReceiver cb) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.postCommand(command, args, cb);
+ }
+ }
+
+ @Override
+ public void onMediaButton(Intent mediaButtonIntent, int sequenceNumber,
+ ResultReceiver cb) {
+ MediaSession session = mMediaSession.get();
+ try {
+ if (session != null) {
+ session.dispatchMediaButton(mediaButtonIntent);
+ }
+ } finally {
+ if (cb != null) {
+ cb.send(sequenceNumber, null);
+ }
+ }
+ }
+
+ @Override
+ public void onPrepare() {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPrepare();
+ }
+ }
+
+ @Override
+ public void onPrepareFromMediaId(String mediaId, Bundle extras) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPrepareFromMediaId(mediaId, extras);
+ }
+ }
+
+ @Override
+ public void onPrepareFromSearch(String query, Bundle extras) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPrepareFromSearch(query, extras);
+ }
+ }
+
+ @Override
+ public void onPrepareFromUri(Uri uri, Bundle extras) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPrepareFromUri(uri, extras);
+ }
+ }
+
+ @Override
+ public void onPlay() {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPlay();
+ }
+ }
+
+ @Override
+ public void onPlayFromMediaId(String mediaId, Bundle extras) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPlayFromMediaId(mediaId, extras);
+ }
+ }
+
+ @Override
+ public void onPlayFromSearch(String query, Bundle extras) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPlayFromSearch(query, extras);
+ }
+ }
+
+ @Override
+ public void onPlayFromUri(Uri uri, Bundle extras) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPlayFromUri(uri, extras);
+ }
+ }
+
+ @Override
+ public void onSkipToTrack(long id) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchSkipToItem(id);
+ }
+ }
+
+ @Override
+ public void onPause() {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPause();
+ }
+ }
+
+ @Override
+ public void onStop() {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchStop();
+ }
+ }
+
+ @Override
+ public void onNext() {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchNext();
+ }
+ }
+
+ @Override
+ public void onPrevious() {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchPrevious();
+ }
+ }
+
+ @Override
+ public void onFastForward() {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchFastForward();
+ }
+ }
+
+ @Override
+ public void onRewind() {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchRewind();
+ }
+ }
+
+ @Override
+ public void onSeekTo(long pos) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchSeekTo(pos);
+ }
+ }
+
+ @Override
+ public void onRate(Rating rating) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchRate(rating);
+ }
+ }
+
+ @Override
+ public void onCustomAction(String action, Bundle args) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchCustomAction(action, args);
+ }
+ }
+
+ @Override
+ public void onAdjustVolume(int direction) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchAdjustVolume(direction);
+ }
+ }
+
+ @Override
+ public void onSetVolumeTo(int value) {
+ MediaSession session = mMediaSession.get();
+ if (session != null) {
+ session.dispatchSetVolumeTo(value);
+ }
+ }
+
+ }
+
+ /**
+ * A single item that is part of the play queue. It contains a description
+ * of the item and its id in the queue.
+ */
+ public static final class QueueItem implements Parcelable {
+ /**
+ * This id is reserved. No items can be explicitly assigned this id.
+ */
+ public static final int UNKNOWN_ID = -1;
+
+ private final MediaDescription mDescription;
+ private final long mId;
+
+ /**
+ * Create a new {@link MediaSession.QueueItem}.
+ *
+ * @param description The {@link MediaDescription} for this item.
+ * @param id An identifier for this item. It must be unique within the
+ * play queue and cannot be {@link #UNKNOWN_ID}.
+ */
+ public QueueItem(MediaDescription description, long id) {
+ if (description == null) {
+ throw new IllegalArgumentException("Description cannot be null.");
+ }
+ if (id == UNKNOWN_ID) {
+ throw new IllegalArgumentException("Id cannot be QueueItem.UNKNOWN_ID");
+ }
+ mDescription = description;
+ mId = id;
+ }
+
+ private QueueItem(Parcel in) {
+ mDescription = MediaDescription.CREATOR.createFromParcel(in);
+ mId = in.readLong();
+ }
+
+ /**
+ * Get the description for this item.
+ */
+ public MediaDescription getDescription() {
+ return mDescription;
+ }
+
+ /**
+ * Get the queue id for this item.
+ */
+ public long getQueueId() {
+ return mId;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ mDescription.writeToParcel(dest, flags);
+ dest.writeLong(mId);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ public static final Creator<MediaSession.QueueItem> CREATOR = new Creator<MediaSession.QueueItem>() {
+
+ @Override
+ public MediaSession.QueueItem createFromParcel(Parcel p) {
+ return new MediaSession.QueueItem(p);
+ }
+
+ @Override
+ public MediaSession.QueueItem[] newArray(int size) {
+ return new MediaSession.QueueItem[size];
+ }
+ };
+
+ @Override
+ public String toString() {
+ return "MediaSession.QueueItem {" +
+ "Description=" + mDescription +
+ ", Id=" + mId + " }";
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == null) {
+ return false;
+ }
+
+ if (!(o instanceof QueueItem)) {
+ return false;
+ }
+
+ final QueueItem item = (QueueItem) o;
+ if (mId != item.mId) {
+ return false;
+ }
+
+ if (!Objects.equals(mDescription, item.mDescription)) {
+ return false;
+ }
+
+ return true;
+ }
+ }
+
+ private static final class Command {
+ public final String command;
+ public final Bundle extras;
+ public final ResultReceiver stub;
+
+ public Command(String command, Bundle extras, ResultReceiver stub) {
+ this.command = command;
+ this.extras = extras;
+ this.stub = stub;
+ }
+ }
+
+ private class CallbackMessageHandler extends Handler {
+
+ private static final int MSG_COMMAND = 1;
+ private static final int MSG_MEDIA_BUTTON = 2;
+ private static final int MSG_PREPARE = 3;
+ private static final int MSG_PREPARE_MEDIA_ID = 4;
+ private static final int MSG_PREPARE_SEARCH = 5;
+ private static final int MSG_PREPARE_URI = 6;
+ private static final int MSG_PLAY = 7;
+ private static final int MSG_PLAY_MEDIA_ID = 8;
+ private static final int MSG_PLAY_SEARCH = 9;
+ private static final int MSG_PLAY_URI = 10;
+ private static final int MSG_SKIP_TO_ITEM = 11;
+ private static final int MSG_PAUSE = 12;
+ private static final int MSG_STOP = 13;
+ private static final int MSG_NEXT = 14;
+ private static final int MSG_PREVIOUS = 15;
+ private static final int MSG_FAST_FORWARD = 16;
+ private static final int MSG_REWIND = 17;
+ private static final int MSG_SEEK_TO = 18;
+ private static final int MSG_RATE = 19;
+ private static final int MSG_CUSTOM_ACTION = 20;
+ private static final int MSG_ADJUST_VOLUME = 21;
+ private static final int MSG_SET_VOLUME = 22;
+ private static final int MSG_PLAY_PAUSE_KEY_DOUBLE_TAP_TIMEOUT = 23;
+
+ private MediaSession.Callback mCallback;
+
+ public CallbackMessageHandler(Looper looper, MediaSession.Callback callback) {
+ super(looper, null, true);
+ mCallback = callback;
+ mCallback.mHandler = this;
+ }
+
+ public void post(int what, Object obj, Bundle bundle) {
+ Message msg = obtainMessage(what, obj);
+ msg.setData(bundle);
+ msg.sendToTarget();
+ }
+
+ public void post(int what, Object obj) {
+ obtainMessage(what, obj).sendToTarget();
+ }
+
+ public void post(int what) {
+ post(what, null);
+ }
+
+ public void post(int what, Object obj, int arg1) {
+ obtainMessage(what, arg1, 0, obj).sendToTarget();
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ VolumeProvider vp;
+ switch (msg.what) {
+ case MSG_COMMAND:
+ Command cmd = (Command) msg.obj;
+ mCallback.onCommand(cmd.command, cmd.extras, cmd.stub);
+ break;
+ case MSG_MEDIA_BUTTON:
+ mCallback.onMediaButtonEvent((Intent) msg.obj);
+ break;
+ case MSG_PREPARE:
+ mCallback.onPrepare();
+ break;
+ case MSG_PREPARE_MEDIA_ID:
+ mCallback.onPrepareFromMediaId((String) msg.obj, msg.getData());
+ break;
+ case MSG_PREPARE_SEARCH:
+ mCallback.onPrepareFromSearch((String) msg.obj, msg.getData());
+ break;
+ case MSG_PREPARE_URI:
+ mCallback.onPrepareFromUri((Uri) msg.obj, msg.getData());
+ break;
+ case MSG_PLAY:
+ mCallback.onPlay();
+ break;
+ case MSG_PLAY_MEDIA_ID:
+ mCallback.onPlayFromMediaId((String) msg.obj, msg.getData());
+ break;
+ case MSG_PLAY_SEARCH:
+ mCallback.onPlayFromSearch((String) msg.obj, msg.getData());
+ break;
+ case MSG_PLAY_URI:
+ mCallback.onPlayFromUri((Uri) msg.obj, msg.getData());
+ break;
+ case MSG_SKIP_TO_ITEM:
+ mCallback.onSkipToQueueItem((Long) msg.obj);
+ break;
+ case MSG_PAUSE:
+ mCallback.onPause();
+ break;
+ case MSG_STOP:
+ mCallback.onStop();
+ break;
+ case MSG_NEXT:
+ mCallback.onSkipToNext();
+ break;
+ case MSG_PREVIOUS:
+ mCallback.onSkipToPrevious();
+ break;
+ case MSG_FAST_FORWARD:
+ mCallback.onFastForward();
+ break;
+ case MSG_REWIND:
+ mCallback.onRewind();
+ break;
+ case MSG_SEEK_TO:
+ mCallback.onSeekTo((Long) msg.obj);
+ break;
+ case MSG_RATE:
+ mCallback.onSetRating((Rating) msg.obj);
+ break;
+ case MSG_CUSTOM_ACTION:
+ mCallback.onCustomAction((String) msg.obj, msg.getData());
+ break;
+ case MSG_ADJUST_VOLUME:
+ synchronized (mLock) {
+ vp = mVolumeProvider;
+ }
+ if (vp != null) {
+ vp.onAdjustVolume((int) msg.obj);
+ }
+ break;
+ case MSG_SET_VOLUME:
+ synchronized (mLock) {
+ vp = mVolumeProvider;
+ }
+ if (vp != null) {
+ vp.onSetVolumeTo((int) msg.obj);
+ }
+ break;
+ case MSG_PLAY_PAUSE_KEY_DOUBLE_TAP_TIMEOUT:
+ mCallback.handleMediaPlayPauseKeySingleTapIfPending();
+ break;
+ }
+ }
+ }
+}
diff --git a/android/media/session/MediaSessionLegacyHelper.java b/android/media/session/MediaSessionLegacyHelper.java
new file mode 100644
index 00000000..7c3af31a
--- /dev/null
+++ b/android/media/session/MediaSessionLegacyHelper.java
@@ -0,0 +1,512 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.session;
+
+import android.app.PendingIntent;
+import android.app.PendingIntent.CanceledException;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.graphics.Bitmap;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.graphics.RectF;
+import android.media.AudioManager;
+import android.media.MediaMetadata;
+import android.media.MediaMetadataEditor;
+import android.media.MediaMetadataRetriever;
+import android.media.Rating;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
+import android.util.ArrayMap;
+import android.util.Log;
+import android.view.KeyEvent;
+
+/**
+ * Helper for connecting existing APIs up to the new session APIs. This can be
+ * used by RCC, AudioFocus, etc. to create a single session that translates to
+ * all those components.
+ *
+ * @hide
+ */
+public class MediaSessionLegacyHelper {
+ private static final String TAG = "MediaSessionHelper";
+ private static final boolean DEBUG = Log.isLoggable(TAG, Log.DEBUG);
+
+ private static final Object sLock = new Object();
+ private static MediaSessionLegacyHelper sInstance;
+
+ private Context mContext;
+ private MediaSessionManager mSessionManager;
+ private Handler mHandler = new Handler(Looper.getMainLooper());
+ // The legacy APIs use PendingIntents to register/unregister media button
+ // receivers and these are associated with RCC.
+ private ArrayMap<PendingIntent, SessionHolder> mSessions
+ = new ArrayMap<PendingIntent, SessionHolder>();
+
+ private MediaSessionLegacyHelper(Context context) {
+ mContext = context;
+ mSessionManager = (MediaSessionManager) context
+ .getSystemService(Context.MEDIA_SESSION_SERVICE);
+ }
+
+ public static MediaSessionLegacyHelper getHelper(Context context) {
+ synchronized (sLock) {
+ if (sInstance == null) {
+ sInstance = new MediaSessionLegacyHelper(context.getApplicationContext());
+ }
+ }
+ return sInstance;
+ }
+
+ public static Bundle getOldMetadata(MediaMetadata metadata, int artworkWidth,
+ int artworkHeight) {
+ boolean includeArtwork = artworkWidth != -1 && artworkHeight != -1;
+ Bundle oldMetadata = new Bundle();
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_ALBUM)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_ALBUM),
+ metadata.getString(MediaMetadata.METADATA_KEY_ALBUM));
+ }
+ if (includeArtwork && metadata.containsKey(MediaMetadata.METADATA_KEY_ART)) {
+ Bitmap art = metadata.getBitmap(MediaMetadata.METADATA_KEY_ART);
+ oldMetadata.putParcelable(String.valueOf(MediaMetadataEditor.BITMAP_KEY_ARTWORK),
+ scaleBitmapIfTooBig(art, artworkWidth, artworkHeight));
+ } else if (includeArtwork && metadata.containsKey(MediaMetadata.METADATA_KEY_ALBUM_ART)) {
+ // Fall back to album art if the track art wasn't available
+ Bitmap art = metadata.getBitmap(MediaMetadata.METADATA_KEY_ALBUM_ART);
+ oldMetadata.putParcelable(String.valueOf(MediaMetadataEditor.BITMAP_KEY_ARTWORK),
+ scaleBitmapIfTooBig(art, artworkWidth, artworkHeight));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_ALBUM_ARTIST)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_ALBUMARTIST),
+ metadata.getString(MediaMetadata.METADATA_KEY_ALBUM_ARTIST));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_ARTIST)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_ARTIST),
+ metadata.getString(MediaMetadata.METADATA_KEY_ARTIST));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_AUTHOR)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_AUTHOR),
+ metadata.getString(MediaMetadata.METADATA_KEY_AUTHOR));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_COMPILATION)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_COMPILATION),
+ metadata.getString(MediaMetadata.METADATA_KEY_COMPILATION));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_COMPOSER)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_COMPOSER),
+ metadata.getString(MediaMetadata.METADATA_KEY_COMPOSER));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_DATE)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_DATE),
+ metadata.getString(MediaMetadata.METADATA_KEY_DATE));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_DISC_NUMBER)) {
+ oldMetadata.putLong(String.valueOf(MediaMetadataRetriever.METADATA_KEY_DISC_NUMBER),
+ metadata.getLong(MediaMetadata.METADATA_KEY_DISC_NUMBER));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_DURATION)) {
+ oldMetadata.putLong(String.valueOf(MediaMetadataRetriever.METADATA_KEY_DURATION),
+ metadata.getLong(MediaMetadata.METADATA_KEY_DURATION));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_GENRE)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_GENRE),
+ metadata.getString(MediaMetadata.METADATA_KEY_GENRE));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_NUM_TRACKS)) {
+ oldMetadata.putLong(String.valueOf(MediaMetadataRetriever.METADATA_KEY_NUM_TRACKS),
+ metadata.getLong(MediaMetadata.METADATA_KEY_NUM_TRACKS));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_RATING)) {
+ oldMetadata.putParcelable(String.valueOf(MediaMetadataEditor.RATING_KEY_BY_OTHERS),
+ metadata.getRating(MediaMetadata.METADATA_KEY_RATING));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_USER_RATING)) {
+ oldMetadata.putParcelable(String.valueOf(MediaMetadataEditor.RATING_KEY_BY_USER),
+ metadata.getRating(MediaMetadata.METADATA_KEY_USER_RATING));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_TITLE)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_TITLE),
+ metadata.getString(MediaMetadata.METADATA_KEY_TITLE));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_TRACK_NUMBER)) {
+ oldMetadata.putLong(
+ String.valueOf(MediaMetadataRetriever.METADATA_KEY_CD_TRACK_NUMBER),
+ metadata.getLong(MediaMetadata.METADATA_KEY_TRACK_NUMBER));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_WRITER)) {
+ oldMetadata.putString(String.valueOf(MediaMetadataRetriever.METADATA_KEY_WRITER),
+ metadata.getString(MediaMetadata.METADATA_KEY_WRITER));
+ }
+ if (metadata.containsKey(MediaMetadata.METADATA_KEY_YEAR)) {
+ oldMetadata.putLong(String.valueOf(MediaMetadataRetriever.METADATA_KEY_YEAR),
+ metadata.getLong(MediaMetadata.METADATA_KEY_YEAR));
+ }
+ return oldMetadata;
+ }
+
+ public MediaSession getSession(PendingIntent pi) {
+ SessionHolder holder = mSessions.get(pi);
+ return holder == null ? null : holder.mSession;
+ }
+
+ public void sendMediaButtonEvent(KeyEvent keyEvent, boolean needWakeLock) {
+ if (keyEvent == null) {
+ Log.w(TAG, "Tried to send a null key event. Ignoring.");
+ return;
+ }
+ mSessionManager.dispatchMediaKeyEvent(keyEvent, needWakeLock);
+ if (DEBUG) {
+ Log.d(TAG, "dispatched media key " + keyEvent);
+ }
+ }
+
+ public void sendVolumeKeyEvent(KeyEvent keyEvent, int stream, boolean musicOnly) {
+ if (keyEvent == null) {
+ Log.w(TAG, "Tried to send a null key event. Ignoring.");
+ return;
+ }
+ mSessionManager.dispatchVolumeKeyEvent(keyEvent, stream, musicOnly);
+ }
+
+ public void sendAdjustVolumeBy(int suggestedStream, int delta, int flags) {
+ mSessionManager.dispatchAdjustVolume(suggestedStream, delta, flags);
+ if (DEBUG) {
+ Log.d(TAG, "dispatched volume adjustment");
+ }
+ }
+
+ public boolean isGlobalPriorityActive() {
+ return mSessionManager.isGlobalPriorityActive();
+ }
+
+ public void addRccListener(PendingIntent pi, MediaSession.Callback listener) {
+ if (pi == null) {
+ Log.w(TAG, "Pending intent was null, can't add rcc listener.");
+ return;
+ }
+ SessionHolder holder = getHolder(pi, true);
+ if (holder == null) {
+ return;
+ }
+ if (holder.mRccListener != null) {
+ if (holder.mRccListener == listener) {
+ if (DEBUG) {
+ Log.d(TAG, "addRccListener listener already added.");
+ }
+ // This is already the registered listener, ignore
+ return;
+ }
+ }
+ holder.mRccListener = listener;
+ holder.mFlags |= MediaSession.FLAG_HANDLES_TRANSPORT_CONTROLS;
+ holder.mSession.setFlags(holder.mFlags);
+ holder.update();
+ if (DEBUG) {
+ Log.d(TAG, "Added rcc listener for " + pi + ".");
+ }
+ }
+
+ public void removeRccListener(PendingIntent pi) {
+ if (pi == null) {
+ return;
+ }
+ SessionHolder holder = getHolder(pi, false);
+ if (holder != null && holder.mRccListener != null) {
+ holder.mRccListener = null;
+ holder.mFlags &= ~MediaSession.FLAG_HANDLES_TRANSPORT_CONTROLS;
+ holder.mSession.setFlags(holder.mFlags);
+ holder.update();
+ if (DEBUG) {
+ Log.d(TAG, "Removed rcc listener for " + pi + ".");
+ }
+ }
+ }
+
+ public void addMediaButtonListener(PendingIntent pi, ComponentName mbrComponent,
+ Context context) {
+ if (pi == null) {
+ Log.w(TAG, "Pending intent was null, can't addMediaButtonListener.");
+ return;
+ }
+ SessionHolder holder = getHolder(pi, true);
+ if (holder == null) {
+ return;
+ }
+ if (holder.mMediaButtonListener != null) {
+ // Already have this listener registered
+ if (DEBUG) {
+ Log.d(TAG, "addMediaButtonListener already added " + pi);
+ }
+ }
+ holder.mMediaButtonListener = new MediaButtonListener(pi, context);
+ // TODO determine if handling transport performer commands should also
+ // set this flag
+ holder.mFlags |= MediaSession.FLAG_HANDLES_MEDIA_BUTTONS;
+ holder.mSession.setFlags(holder.mFlags);
+ holder.mSession.setMediaButtonReceiver(pi);
+ holder.update();
+ if (DEBUG) {
+ Log.d(TAG, "addMediaButtonListener added " + pi);
+ }
+ }
+
+ public void removeMediaButtonListener(PendingIntent pi) {
+ if (pi == null) {
+ return;
+ }
+ SessionHolder holder = getHolder(pi, false);
+ if (holder != null && holder.mMediaButtonListener != null) {
+ holder.mFlags &= ~MediaSession.FLAG_HANDLES_MEDIA_BUTTONS;
+ holder.mSession.setFlags(holder.mFlags);
+ holder.mMediaButtonListener = null;
+
+ holder.update();
+ if (DEBUG) {
+ Log.d(TAG, "removeMediaButtonListener removed " + pi);
+ }
+ }
+ }
+
+ /**
+ * Scale a bitmap to fit the smallest dimension by uniformly scaling the
+ * incoming bitmap. If the bitmap fits, then do nothing and return the
+ * original.
+ *
+ * @param bitmap
+ * @param maxWidth
+ * @param maxHeight
+ * @return
+ */
+ private static Bitmap scaleBitmapIfTooBig(Bitmap bitmap, int maxWidth, int maxHeight) {
+ if (bitmap != null) {
+ final int width = bitmap.getWidth();
+ final int height = bitmap.getHeight();
+ if (width > maxWidth || height > maxHeight) {
+ float scale = Math.min((float) maxWidth / width, (float) maxHeight / height);
+ int newWidth = Math.round(scale * width);
+ int newHeight = Math.round(scale * height);
+ Bitmap.Config newConfig = bitmap.getConfig();
+ if (newConfig == null) {
+ newConfig = Bitmap.Config.ARGB_8888;
+ }
+ Bitmap outBitmap = Bitmap.createBitmap(newWidth, newHeight, newConfig);
+ Canvas canvas = new Canvas(outBitmap);
+ Paint paint = new Paint();
+ paint.setAntiAlias(true);
+ paint.setFilterBitmap(true);
+ canvas.drawBitmap(bitmap, null,
+ new RectF(0, 0, outBitmap.getWidth(), outBitmap.getHeight()), paint);
+ bitmap = outBitmap;
+ }
+ }
+ return bitmap;
+ }
+
+ private SessionHolder getHolder(PendingIntent pi, boolean createIfMissing) {
+ SessionHolder holder = mSessions.get(pi);
+ if (holder == null && createIfMissing) {
+ MediaSession session;
+ session = new MediaSession(mContext, TAG + "-" + pi.getCreatorPackage());
+ session.setActive(true);
+ holder = new SessionHolder(session, pi);
+ mSessions.put(pi, holder);
+ }
+ return holder;
+ }
+
+ private static void sendKeyEvent(PendingIntent pi, Context context, Intent intent) {
+ try {
+ pi.send(context, 0, intent);
+ } catch (CanceledException e) {
+ Log.e(TAG, "Error sending media key down event:", e);
+ // Don't bother sending up if down failed
+ return;
+ }
+ }
+
+ private static final class MediaButtonListener extends MediaSession.Callback {
+ private final PendingIntent mPendingIntent;
+ private final Context mContext;
+
+ public MediaButtonListener(PendingIntent pi, Context context) {
+ mPendingIntent = pi;
+ mContext = context;
+ }
+
+ @Override
+ public boolean onMediaButtonEvent(Intent mediaButtonIntent) {
+ MediaSessionLegacyHelper.sendKeyEvent(mPendingIntent, mContext, mediaButtonIntent);
+ return true;
+ }
+
+ @Override
+ public void onPlay() {
+ sendKeyEvent(KeyEvent.KEYCODE_MEDIA_PLAY);
+ }
+
+ @Override
+ public void onPause() {
+ sendKeyEvent(KeyEvent.KEYCODE_MEDIA_PAUSE);
+ }
+
+ @Override
+ public void onSkipToNext() {
+ sendKeyEvent(KeyEvent.KEYCODE_MEDIA_NEXT);
+ }
+
+ @Override
+ public void onSkipToPrevious() {
+ sendKeyEvent(KeyEvent.KEYCODE_MEDIA_PREVIOUS);
+ }
+
+ @Override
+ public void onFastForward() {
+ sendKeyEvent(KeyEvent.KEYCODE_MEDIA_FAST_FORWARD);
+ }
+
+ @Override
+ public void onRewind() {
+ sendKeyEvent(KeyEvent.KEYCODE_MEDIA_REWIND);
+ }
+
+ @Override
+ public void onStop() {
+ sendKeyEvent(KeyEvent.KEYCODE_MEDIA_STOP);
+ }
+
+ private void sendKeyEvent(int keyCode) {
+ KeyEvent ke = new KeyEvent(KeyEvent.ACTION_DOWN, keyCode);
+ Intent intent = new Intent(Intent.ACTION_MEDIA_BUTTON);
+ intent.addFlags(Intent.FLAG_RECEIVER_FOREGROUND);
+
+ intent.putExtra(Intent.EXTRA_KEY_EVENT, ke);
+ MediaSessionLegacyHelper.sendKeyEvent(mPendingIntent, mContext, intent);
+
+ ke = new KeyEvent(KeyEvent.ACTION_UP, keyCode);
+ intent.putExtra(Intent.EXTRA_KEY_EVENT, ke);
+ MediaSessionLegacyHelper.sendKeyEvent(mPendingIntent, mContext, intent);
+
+ if (DEBUG) {
+ Log.d(TAG, "Sent " + keyCode + " to pending intent " + mPendingIntent);
+ }
+ }
+ }
+
+ private class SessionHolder {
+ public final MediaSession mSession;
+ public final PendingIntent mPi;
+ public MediaButtonListener mMediaButtonListener;
+ public MediaSession.Callback mRccListener;
+ public int mFlags;
+
+ public SessionCallback mCb;
+
+ public SessionHolder(MediaSession session, PendingIntent pi) {
+ mSession = session;
+ mPi = pi;
+ }
+
+ public void update() {
+ if (mMediaButtonListener == null && mRccListener == null) {
+ mSession.setCallback(null);
+ mSession.release();
+ mCb = null;
+ mSessions.remove(mPi);
+ } else if (mCb == null) {
+ mCb = new SessionCallback();
+ Handler handler = new Handler(Looper.getMainLooper());
+ mSession.setCallback(mCb, handler);
+ }
+ }
+
+ private class SessionCallback extends MediaSession.Callback {
+
+ @Override
+ public boolean onMediaButtonEvent(Intent mediaButtonIntent) {
+ if (mMediaButtonListener != null) {
+ mMediaButtonListener.onMediaButtonEvent(mediaButtonIntent);
+ }
+ return true;
+ }
+
+ @Override
+ public void onPlay() {
+ if (mMediaButtonListener != null) {
+ mMediaButtonListener.onPlay();
+ }
+ }
+
+ @Override
+ public void onPause() {
+ if (mMediaButtonListener != null) {
+ mMediaButtonListener.onPause();
+ }
+ }
+
+ @Override
+ public void onSkipToNext() {
+ if (mMediaButtonListener != null) {
+ mMediaButtonListener.onSkipToNext();
+ }
+ }
+
+ @Override
+ public void onSkipToPrevious() {
+ if (mMediaButtonListener != null) {
+ mMediaButtonListener.onSkipToPrevious();
+ }
+ }
+
+ @Override
+ public void onFastForward() {
+ if (mMediaButtonListener != null) {
+ mMediaButtonListener.onFastForward();
+ }
+ }
+
+ @Override
+ public void onRewind() {
+ if (mMediaButtonListener != null) {
+ mMediaButtonListener.onRewind();
+ }
+ }
+
+ @Override
+ public void onStop() {
+ if (mMediaButtonListener != null) {
+ mMediaButtonListener.onStop();
+ }
+ }
+
+ @Override
+ public void onSeekTo(long pos) {
+ if (mRccListener != null) {
+ mRccListener.onSeekTo(pos);
+ }
+ }
+
+ @Override
+ public void onSetRating(Rating rating) {
+ if (mRccListener != null) {
+ mRccListener.onSetRating(rating);
+ }
+ }
+ }
+ }
+}
diff --git a/android/media/session/MediaSessionManager.java b/android/media/session/MediaSessionManager.java
new file mode 100644
index 00000000..b215825c
--- /dev/null
+++ b/android/media/session/MediaSessionManager.java
@@ -0,0 +1,690 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.session;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.RequiresPermission;
+import android.annotation.SystemApi;
+import android.annotation.SystemService;
+import android.content.ComponentName;
+import android.content.Context;
+import android.media.AudioManager;
+import android.media.IRemoteVolumeController;
+import android.media.session.ISessionManager;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.os.ResultReceiver;
+import android.os.ServiceManager;
+import android.os.UserHandle;
+import android.service.notification.NotificationListenerService;
+import android.util.ArrayMap;
+import android.util.Log;
+import android.view.KeyEvent;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Provides support for interacting with {@link MediaSession media sessions}
+ * that applications have published to express their ongoing media playback
+ * state.
+ *
+ * @see MediaSession
+ * @see MediaController
+ */
+@SystemService(Context.MEDIA_SESSION_SERVICE)
+public final class MediaSessionManager {
+ private static final String TAG = "SessionManager";
+
+ /**
+ * Used by IOnMediaKeyListener to indicate that the media key event isn't handled.
+ * @hide
+ */
+ public static final int RESULT_MEDIA_KEY_NOT_HANDLED = 0;
+
+ /**
+ * Used by IOnMediaKeyListener to indicate that the media key event is handled.
+ * @hide
+ */
+ public static final int RESULT_MEDIA_KEY_HANDLED = 1;
+
+ private final ArrayMap<OnActiveSessionsChangedListener, SessionsChangedWrapper> mListeners
+ = new ArrayMap<OnActiveSessionsChangedListener, SessionsChangedWrapper>();
+ private final Object mLock = new Object();
+ private final ISessionManager mService;
+
+ private Context mContext;
+
+ private CallbackImpl mCallback;
+ private OnVolumeKeyLongPressListenerImpl mOnVolumeKeyLongPressListener;
+ private OnMediaKeyListenerImpl mOnMediaKeyListener;
+
+ /**
+ * @hide
+ */
+ public MediaSessionManager(Context context) {
+ // Consider rewriting like DisplayManagerGlobal
+ // Decide if we need context
+ mContext = context;
+ IBinder b = ServiceManager.getService(Context.MEDIA_SESSION_SERVICE);
+ mService = ISessionManager.Stub.asInterface(b);
+ }
+
+ /**
+ * Create a new session in the system and get the binder for it.
+ *
+ * @param tag A short name for debugging purposes.
+ * @return The binder object from the system
+ * @hide
+ */
+ public @NonNull ISession createSession(@NonNull MediaSession.CallbackStub cbStub,
+ @NonNull String tag, int userId) throws RemoteException {
+ return mService.createSession(mContext.getPackageName(), cbStub, tag, userId);
+ }
+
+ /**
+ * Get a list of controllers for all ongoing sessions. The controllers will
+ * be provided in priority order with the most important controller at index
+ * 0.
+ * <p>
+ * This requires the android.Manifest.permission.MEDIA_CONTENT_CONTROL
+ * permission be held by the calling app. You may also retrieve this list if
+ * your app is an enabled notification listener using the
+ * {@link NotificationListenerService} APIs, in which case you must pass the
+ * {@link ComponentName} of your enabled listener.
+ *
+ * @param notificationListener The enabled notification listener component.
+ * May be null.
+ * @return A list of controllers for ongoing sessions.
+ */
+ public @NonNull List<MediaController> getActiveSessions(
+ @Nullable ComponentName notificationListener) {
+ return getActiveSessionsForUser(notificationListener, UserHandle.myUserId());
+ }
+
+ /**
+ * Get active sessions for a specific user. To retrieve actions for a user
+ * other than your own you must hold the
+ * {@link android.Manifest.permission#INTERACT_ACROSS_USERS_FULL} permission
+ * in addition to any other requirements. If you are an enabled notification
+ * listener you may only get sessions for the users you are enabled for.
+ *
+ * @param notificationListener The enabled notification listener component.
+ * May be null.
+ * @param userId The user id to fetch sessions for.
+ * @return A list of controllers for ongoing sessions.
+ * @hide
+ */
+ public @NonNull List<MediaController> getActiveSessionsForUser(
+ @Nullable ComponentName notificationListener, int userId) {
+ ArrayList<MediaController> controllers = new ArrayList<MediaController>();
+ try {
+ List<IBinder> binders = mService.getSessions(notificationListener, userId);
+ int size = binders.size();
+ for (int i = 0; i < size; i++) {
+ MediaController controller = new MediaController(mContext, ISessionController.Stub
+ .asInterface(binders.get(i)));
+ controllers.add(controller);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Failed to get active sessions: ", e);
+ }
+ return controllers;
+ }
+
+ /**
+ * Add a listener to be notified when the list of active sessions
+ * changes.This requires the
+ * android.Manifest.permission.MEDIA_CONTENT_CONTROL permission be held by
+ * the calling app. You may also retrieve this list if your app is an
+ * enabled notification listener using the
+ * {@link NotificationListenerService} APIs, in which case you must pass the
+ * {@link ComponentName} of your enabled listener. Updates will be posted to
+ * the thread that registered the listener.
+ *
+ * @param sessionListener The listener to add.
+ * @param notificationListener The enabled notification listener component.
+ * May be null.
+ */
+ public void addOnActiveSessionsChangedListener(
+ @NonNull OnActiveSessionsChangedListener sessionListener,
+ @Nullable ComponentName notificationListener) {
+ addOnActiveSessionsChangedListener(sessionListener, notificationListener, null);
+ }
+
+ /**
+ * Add a listener to be notified when the list of active sessions
+ * changes.This requires the
+ * android.Manifest.permission.MEDIA_CONTENT_CONTROL permission be held by
+ * the calling app. You may also retrieve this list if your app is an
+ * enabled notification listener using the
+ * {@link NotificationListenerService} APIs, in which case you must pass the
+ * {@link ComponentName} of your enabled listener. Updates will be posted to
+ * the handler specified or to the caller's thread if the handler is null.
+ *
+ * @param sessionListener The listener to add.
+ * @param notificationListener The enabled notification listener component.
+ * May be null.
+ * @param handler The handler to post events to.
+ */
+ public void addOnActiveSessionsChangedListener(
+ @NonNull OnActiveSessionsChangedListener sessionListener,
+ @Nullable ComponentName notificationListener, @Nullable Handler handler) {
+ addOnActiveSessionsChangedListener(sessionListener, notificationListener,
+ UserHandle.myUserId(), handler);
+ }
+
+ /**
+ * Add a listener to be notified when the list of active sessions
+ * changes.This requires the
+ * android.Manifest.permission.MEDIA_CONTENT_CONTROL permission be held by
+ * the calling app. You may also retrieve this list if your app is an
+ * enabled notification listener using the
+ * {@link NotificationListenerService} APIs, in which case you must pass the
+ * {@link ComponentName} of your enabled listener.
+ *
+ * @param sessionListener The listener to add.
+ * @param notificationListener The enabled notification listener component.
+ * May be null.
+ * @param userId The userId to listen for changes on.
+ * @param handler The handler to post updates on.
+ * @hide
+ */
+ public void addOnActiveSessionsChangedListener(
+ @NonNull OnActiveSessionsChangedListener sessionListener,
+ @Nullable ComponentName notificationListener, int userId, @Nullable Handler handler) {
+ if (sessionListener == null) {
+ throw new IllegalArgumentException("listener may not be null");
+ }
+ if (handler == null) {
+ handler = new Handler();
+ }
+ synchronized (mLock) {
+ if (mListeners.get(sessionListener) != null) {
+ Log.w(TAG, "Attempted to add session listener twice, ignoring.");
+ return;
+ }
+ SessionsChangedWrapper wrapper = new SessionsChangedWrapper(mContext, sessionListener,
+ handler);
+ try {
+ mService.addSessionsListener(wrapper.mStub, notificationListener, userId);
+ mListeners.put(sessionListener, wrapper);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error in addOnActiveSessionsChangedListener.", e);
+ }
+ }
+ }
+
+ /**
+ * Stop receiving active sessions updates on the specified listener.
+ *
+ * @param listener The listener to remove.
+ */
+ public void removeOnActiveSessionsChangedListener(
+ @NonNull OnActiveSessionsChangedListener listener) {
+ if (listener == null) {
+ throw new IllegalArgumentException("listener may not be null");
+ }
+ synchronized (mLock) {
+ SessionsChangedWrapper wrapper = mListeners.remove(listener);
+ if (wrapper != null) {
+ try {
+ mService.removeSessionsListener(wrapper.mStub);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error in removeOnActiveSessionsChangedListener.", e);
+ } finally {
+ wrapper.release();
+ }
+ }
+ }
+ }
+
+ /**
+ * Set the remote volume controller to receive volume updates on. Only for
+ * use by system UI.
+ *
+ * @param rvc The volume controller to receive updates on.
+ * @hide
+ */
+ public void setRemoteVolumeController(IRemoteVolumeController rvc) {
+ try {
+ mService.setRemoteVolumeController(rvc);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Error in setRemoteVolumeController.", e);
+ }
+ }
+
+ /**
+ * Send a media key event. The receiver will be selected automatically.
+ *
+ * @param keyEvent The KeyEvent to send.
+ * @hide
+ */
+ public void dispatchMediaKeyEvent(@NonNull KeyEvent keyEvent) {
+ dispatchMediaKeyEvent(keyEvent, false);
+ }
+
+ /**
+ * Send a media key event. The receiver will be selected automatically.
+ *
+ * @param keyEvent The KeyEvent to send.
+ * @param needWakeLock True if a wake lock should be held while sending the key.
+ * @hide
+ */
+ public void dispatchMediaKeyEvent(@NonNull KeyEvent keyEvent, boolean needWakeLock) {
+ try {
+ mService.dispatchMediaKeyEvent(keyEvent, needWakeLock);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Failed to send key event.", e);
+ }
+ }
+
+ /**
+ * Send a volume key event. The receiver will be selected automatically.
+ *
+ * @param keyEvent The volume KeyEvent to send.
+ * @param needWakeLock True if a wake lock should be held while sending the key.
+ * @hide
+ */
+ public void dispatchVolumeKeyEvent(@NonNull KeyEvent keyEvent, int stream, boolean musicOnly) {
+ try {
+ mService.dispatchVolumeKeyEvent(keyEvent, stream, musicOnly);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Failed to send volume key event.", e);
+ }
+ }
+
+ /**
+ * Dispatch an adjust volume request to the system. It will be sent to the
+ * most relevant audio stream or media session. The direction must be one of
+ * {@link AudioManager#ADJUST_LOWER}, {@link AudioManager#ADJUST_RAISE},
+ * {@link AudioManager#ADJUST_SAME}.
+ *
+ * @param suggestedStream The stream to fall back to if there isn't a
+ * relevant stream
+ * @param direction The direction to adjust volume in.
+ * @param flags Any flags to include with the volume change.
+ * @hide
+ */
+ public void dispatchAdjustVolume(int suggestedStream, int direction, int flags) {
+ try {
+ mService.dispatchAdjustVolume(suggestedStream, direction, flags);
+ } catch (RemoteException e) {
+ Log.e(TAG, "Failed to send adjust volume.", e);
+ }
+ }
+
+ /**
+ * Check if the global priority session is currently active. This can be
+ * used to decide if media keys should be sent to the session or to the app.
+ *
+ * @hide
+ */
+ public boolean isGlobalPriorityActive() {
+ try {
+ return mService.isGlobalPriorityActive();
+ } catch (RemoteException e) {
+ Log.e(TAG, "Failed to check if the global priority is active.", e);
+ }
+ return false;
+ }
+
+ /**
+ * Set the volume key long-press listener. While the listener is set, the listener
+ * gets the volume key long-presses instead of changing volume.
+ *
+ * <p>System can only have a single volume key long-press listener.
+ *
+ * @param listener The volume key long-press listener. {@code null} to reset.
+ * @param handler The handler on which the listener should be invoked, or {@code null}
+ * if the listener should be invoked on the calling thread's looper.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.SET_VOLUME_KEY_LONG_PRESS_LISTENER)
+ public void setOnVolumeKeyLongPressListener(
+ OnVolumeKeyLongPressListener listener, @Nullable Handler handler) {
+ synchronized (mLock) {
+ try {
+ if (listener == null) {
+ mOnVolumeKeyLongPressListener = null;
+ mService.setOnVolumeKeyLongPressListener(null);
+ } else {
+ if (handler == null) {
+ handler = new Handler();
+ }
+ mOnVolumeKeyLongPressListener =
+ new OnVolumeKeyLongPressListenerImpl(listener, handler);
+ mService.setOnVolumeKeyLongPressListener(mOnVolumeKeyLongPressListener);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Failed to set volume key long press listener", e);
+ }
+ }
+ }
+
+ /**
+ * Set the media key listener. While the listener is set, the listener
+ * gets the media key before any other media sessions but after the global priority session.
+ * If the listener handles the key (i.e. returns {@code true}),
+ * other sessions will not get the event.
+ *
+ * <p>System can only have a single media key listener.
+ *
+ * @param listener The media key listener. {@code null} to reset.
+ * @param handler The handler on which the listener should be invoked, or {@code null}
+ * if the listener should be invoked on the calling thread's looper.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.SET_MEDIA_KEY_LISTENER)
+ public void setOnMediaKeyListener(OnMediaKeyListener listener, @Nullable Handler handler) {
+ synchronized (mLock) {
+ try {
+ if (listener == null) {
+ mOnMediaKeyListener = null;
+ mService.setOnMediaKeyListener(null);
+ } else {
+ if (handler == null) {
+ handler = new Handler();
+ }
+ mOnMediaKeyListener = new OnMediaKeyListenerImpl(listener, handler);
+ mService.setOnMediaKeyListener(mOnMediaKeyListener);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Failed to set media key listener", e);
+ }
+ }
+ }
+
+ /**
+ * Set a {@link Callback}.
+ *
+ * <p>System can only have a single callback, and the callback can only be set by
+ * Bluetooth service process.
+ *
+ * @param callback A {@link Callback}. {@code null} to reset.
+ * @param handler The handler on which the callback should be invoked, or {@code null}
+ * if the callback should be invoked on the calling thread's looper.
+ * @hide
+ */
+ public void setCallback(@Nullable Callback callback, @Nullable Handler handler) {
+ synchronized (mLock) {
+ try {
+ if (callback == null) {
+ mCallback = null;
+ mService.setCallback(null);
+ } else {
+ if (handler == null) {
+ handler = new Handler();
+ }
+ mCallback = new CallbackImpl(callback, handler);
+ mService.setCallback(mCallback);
+ }
+ } catch (RemoteException e) {
+ Log.e(TAG, "Failed to set media key callback", e);
+ }
+ }
+ }
+
+ /**
+ * Listens for changes to the list of active sessions. This can be added
+ * using {@link #addOnActiveSessionsChangedListener}.
+ */
+ public interface OnActiveSessionsChangedListener {
+ public void onActiveSessionsChanged(@Nullable List<MediaController> controllers);
+ }
+
+ /**
+ * Listens the volume key long-presses.
+ * @hide
+ */
+ @SystemApi
+ public interface OnVolumeKeyLongPressListener {
+ /**
+ * Called when the volume key is long-pressed.
+ * <p>This will be called for both down and up events.
+ */
+ void onVolumeKeyLongPress(KeyEvent event);
+ }
+
+ /**
+ * Listens the media key.
+ * @hide
+ */
+ @SystemApi
+ public interface OnMediaKeyListener {
+ /**
+ * Called when the media key is pressed.
+ * <p>If the listener consumes the initial down event (i.e. ACTION_DOWN with
+ * repeat count zero), it must also comsume all following key events.
+ * (i.e. ACTION_DOWN with repeat count more than zero, and ACTION_UP).
+ * <p>If it takes more than 1s to return, the key event will be sent to
+ * other media sessions.
+ */
+ boolean onMediaKey(KeyEvent event);
+ }
+
+ /**
+ * Callbacks for the media session service.
+ *
+ * <p>Called when a media key event is dispatched or the addressed player is changed.
+ * The addressed player is either the media session or the media button receiver that will
+ * receive media key events.
+ * @hide
+ */
+ public static abstract class Callback {
+ /**
+ * Called when a media key event is dispatched to the media session
+ * through the media session service.
+ *
+ * @param event Dispatched media key event.
+ * @param sessionToken The media session's token.
+ */
+ public abstract void onMediaKeyEventDispatched(KeyEvent event,
+ MediaSession.Token sessionToken);
+
+ /**
+ * Called when a media key event is dispatched to the media button receiver
+ * through the media session service.
+ * <p>MediaSessionService may broadcast key events to the media button receiver
+ * when reviving playback after the media session is released.
+ *
+ * @param event Dispatched media key event.
+ * @param mediaButtonReceiver The media button receiver.
+ */
+ public abstract void onMediaKeyEventDispatched(KeyEvent event,
+ ComponentName mediaButtonReceiver);
+
+ /**
+ * Called when the addressed player is changed to a media session.
+ * <p>One of the {@ #onAddressedPlayerChanged} will be also called immediately after
+ * {@link #setCallback} if the addressed player exists.
+ *
+ * @param sessionToken The media session's token.
+ */
+ public abstract void onAddressedPlayerChanged(MediaSession.Token sessionToken);
+
+ /**
+ * Called when the addressed player is changed to the media button receiver.
+ * <p>One of the {@ #onAddressedPlayerChanged} will be also called immediately after
+ * {@link #setCallback} if the addressed player exists.
+ *
+ * @param mediaButtonReceiver The media button receiver.
+ */
+ public abstract void onAddressedPlayerChanged(ComponentName mediaButtonReceiver);
+ }
+
+ private static final class SessionsChangedWrapper {
+ private Context mContext;
+ private OnActiveSessionsChangedListener mListener;
+ private Handler mHandler;
+
+ public SessionsChangedWrapper(Context context, OnActiveSessionsChangedListener listener,
+ Handler handler) {
+ mContext = context;
+ mListener = listener;
+ mHandler = handler;
+ }
+
+ private final IActiveSessionsListener.Stub mStub = new IActiveSessionsListener.Stub() {
+ @Override
+ public void onActiveSessionsChanged(final List<MediaSession.Token> tokens) {
+ final Handler handler = mHandler;
+ if (handler != null) {
+ handler.post(new Runnable() {
+ @Override
+ public void run() {
+ final Context context = mContext;
+ if (context != null) {
+ ArrayList<MediaController> controllers
+ = new ArrayList<MediaController>();
+ int size = tokens.size();
+ for (int i = 0; i < size; i++) {
+ controllers.add(new MediaController(context, tokens.get(i)));
+ }
+ final OnActiveSessionsChangedListener listener = mListener;
+ if (listener != null) {
+ listener.onActiveSessionsChanged(controllers);
+ }
+ }
+ }
+ });
+ }
+ }
+ };
+
+ private void release() {
+ mListener = null;
+ mContext = null;
+ mHandler = null;
+ }
+ }
+
+ private static final class OnVolumeKeyLongPressListenerImpl
+ extends IOnVolumeKeyLongPressListener.Stub {
+ private OnVolumeKeyLongPressListener mListener;
+ private Handler mHandler;
+
+ public OnVolumeKeyLongPressListenerImpl(
+ OnVolumeKeyLongPressListener listener, Handler handler) {
+ mListener = listener;
+ mHandler = handler;
+ }
+
+ @Override
+ public void onVolumeKeyLongPress(KeyEvent event) {
+ if (mListener == null || mHandler == null) {
+ Log.w(TAG, "Failed to call volume key long-press listener." +
+ " Either mListener or mHandler is null");
+ return;
+ }
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mListener.onVolumeKeyLongPress(event);
+ }
+ });
+ }
+ }
+
+ private static final class OnMediaKeyListenerImpl extends IOnMediaKeyListener.Stub {
+ private OnMediaKeyListener mListener;
+ private Handler mHandler;
+
+ public OnMediaKeyListenerImpl(OnMediaKeyListener listener, Handler handler) {
+ mListener = listener;
+ mHandler = handler;
+ }
+
+ @Override
+ public void onMediaKey(KeyEvent event, ResultReceiver result) {
+ if (mListener == null || mHandler == null) {
+ Log.w(TAG, "Failed to call media key listener." +
+ " Either mListener or mHandler is null");
+ return;
+ }
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ boolean handled = mListener.onMediaKey(event);
+ Log.d(TAG, "The media key listener is returned " + handled);
+ if (result != null) {
+ result.send(
+ handled ? RESULT_MEDIA_KEY_HANDLED : RESULT_MEDIA_KEY_NOT_HANDLED,
+ null);
+ }
+ }
+ });
+ }
+ }
+
+ private static final class CallbackImpl extends ICallback.Stub {
+ private final Callback mCallback;
+ private final Handler mHandler;
+
+ public CallbackImpl(@NonNull Callback callback, @NonNull Handler handler) {
+ mCallback = callback;
+ mHandler = handler;
+ }
+
+ @Override
+ public void onMediaKeyEventDispatchedToMediaSession(KeyEvent event,
+ MediaSession.Token sessionToken) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onMediaKeyEventDispatched(event, sessionToken);
+ }
+ });
+ }
+
+ @Override
+ public void onMediaKeyEventDispatchedToMediaButtonReceiver(KeyEvent event,
+ ComponentName mediaButtonReceiver) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onMediaKeyEventDispatched(event, mediaButtonReceiver);
+ }
+ });
+ }
+
+ @Override
+ public void onAddressedPlayerChangedToMediaSession(MediaSession.Token sessionToken) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onAddressedPlayerChanged(sessionToken);
+ }
+ });
+ }
+
+ @Override
+ public void onAddressedPlayerChangedToMediaButtonReceiver(
+ ComponentName mediaButtonReceiver) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onAddressedPlayerChanged(mediaButtonReceiver);
+ }
+ });
+ }
+ }
+}
diff --git a/android/media/session/ParcelableVolumeInfo.java b/android/media/session/ParcelableVolumeInfo.java
new file mode 100644
index 00000000..f59c9756
--- /dev/null
+++ b/android/media/session/ParcelableVolumeInfo.java
@@ -0,0 +1,80 @@
+/* Copyright 2014, The Android Open Source Project
+ **
+ ** Licensed under the Apache License, Version 2.0 (the "License");
+ ** you may not use this file except in compliance with the License.
+ ** You may obtain a copy of the License at
+ **
+ ** http://www.apache.org/licenses/LICENSE-2.0
+ **
+ ** Unless required by applicable law or agreed to in writing, software
+ ** distributed under the License is distributed on an "AS IS" BASIS,
+ ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ ** See the License for the specific language governing permissions and
+ ** limitations under the License.
+ */
+
+package android.media.session;
+
+import android.media.AudioAttributes;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * Convenience class for passing information about the audio configuration of a
+ * session. The public implementation is {@link MediaController.PlaybackInfo}.
+ *
+ * @hide
+ */
+public class ParcelableVolumeInfo implements Parcelable {
+ public int volumeType;
+ public AudioAttributes audioAttrs;
+ public int controlType;
+ public int maxVolume;
+ public int currentVolume;
+
+ public ParcelableVolumeInfo(int volumeType, AudioAttributes audioAttrs, int controlType,
+ int maxVolume,
+ int currentVolume) {
+ this.volumeType = volumeType;
+ this.audioAttrs = audioAttrs;
+ this.controlType = controlType;
+ this.maxVolume = maxVolume;
+ this.currentVolume = currentVolume;
+ }
+
+ public ParcelableVolumeInfo(Parcel from) {
+ volumeType = from.readInt();
+ controlType = from.readInt();
+ maxVolume = from.readInt();
+ currentVolume = from.readInt();
+ audioAttrs = AudioAttributes.CREATOR.createFromParcel(from);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(volumeType);
+ dest.writeInt(controlType);
+ dest.writeInt(maxVolume);
+ dest.writeInt(currentVolume);
+ audioAttrs.writeToParcel(dest, flags);
+ }
+
+
+ public static final Parcelable.Creator<ParcelableVolumeInfo> CREATOR
+ = new Parcelable.Creator<ParcelableVolumeInfo>() {
+ @Override
+ public ParcelableVolumeInfo createFromParcel(Parcel in) {
+ return new ParcelableVolumeInfo(in);
+ }
+
+ @Override
+ public ParcelableVolumeInfo[] newArray(int size) {
+ return new ParcelableVolumeInfo[size];
+ }
+ };
+}
diff --git a/android/media/session/PlaybackState.java b/android/media/session/PlaybackState.java
new file mode 100644
index 00000000..8283c8b9
--- /dev/null
+++ b/android/media/session/PlaybackState.java
@@ -0,0 +1,1078 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.media.session;
+
+import android.annotation.DrawableRes;
+import android.annotation.IntDef;
+import android.annotation.Nullable;
+import android.media.RemoteControlClient;
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.SystemClock;
+import android.text.TextUtils;
+import java.util.ArrayList;
+import java.util.List;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * Playback state for a {@link MediaSession}. This includes a state like
+ * {@link PlaybackState#STATE_PLAYING}, the current playback position,
+ * and the current control capabilities.
+ */
+public final class PlaybackState implements Parcelable {
+ private static final String TAG = "PlaybackState";
+
+ /**
+ * @hide
+ */
+ @IntDef(flag=true, value={ACTION_STOP, ACTION_PAUSE, ACTION_PLAY, ACTION_REWIND,
+ ACTION_SKIP_TO_PREVIOUS, ACTION_SKIP_TO_NEXT, ACTION_FAST_FORWARD, ACTION_SET_RATING,
+ ACTION_SEEK_TO, ACTION_PLAY_PAUSE, ACTION_PLAY_FROM_MEDIA_ID, ACTION_PLAY_FROM_SEARCH,
+ ACTION_SKIP_TO_QUEUE_ITEM, ACTION_PLAY_FROM_URI, ACTION_PREPARE,
+ ACTION_PREPARE_FROM_MEDIA_ID, ACTION_PREPARE_FROM_SEARCH, ACTION_PREPARE_FROM_URI})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Actions {}
+
+ /**
+ * Indicates this session supports the stop command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_STOP = 1 << 0;
+
+ /**
+ * Indicates this session supports the pause command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PAUSE = 1 << 1;
+
+ /**
+ * Indicates this session supports the play command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PLAY = 1 << 2;
+
+ /**
+ * Indicates this session supports the rewind command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_REWIND = 1 << 3;
+
+ /**
+ * Indicates this session supports the previous command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_SKIP_TO_PREVIOUS = 1 << 4;
+
+ /**
+ * Indicates this session supports the next command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_SKIP_TO_NEXT = 1 << 5;
+
+ /**
+ * Indicates this session supports the fast forward command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_FAST_FORWARD = 1 << 6;
+
+ /**
+ * Indicates this session supports the set rating command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_SET_RATING = 1 << 7;
+
+ /**
+ * Indicates this session supports the seek to command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_SEEK_TO = 1 << 8;
+
+ /**
+ * Indicates this session supports the play/pause toggle command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PLAY_PAUSE = 1 << 9;
+
+ /**
+ * Indicates this session supports the play from media id command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PLAY_FROM_MEDIA_ID = 1 << 10;
+
+ /**
+ * Indicates this session supports the play from search command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PLAY_FROM_SEARCH = 1 << 11;
+
+ /**
+ * Indicates this session supports the skip to queue item command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_SKIP_TO_QUEUE_ITEM = 1 << 12;
+
+ /**
+ * Indicates this session supports the play from URI command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PLAY_FROM_URI = 1 << 13;
+
+ /**
+ * Indicates this session supports the prepare command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PREPARE = 1 << 14;
+
+ /**
+ * Indicates this session supports the prepare from media id command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PREPARE_FROM_MEDIA_ID = 1 << 15;
+
+ /**
+ * Indicates this session supports the prepare from search command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PREPARE_FROM_SEARCH = 1 << 16;
+
+ /**
+ * Indicates this session supports the prepare from URI command.
+ *
+ * @see Builder#setActions(long)
+ */
+ public static final long ACTION_PREPARE_FROM_URI = 1 << 17;
+
+ /**
+ * @hide
+ */
+ @IntDef({STATE_NONE, STATE_STOPPED, STATE_PAUSED, STATE_PLAYING, STATE_FAST_FORWARDING,
+ STATE_REWINDING, STATE_BUFFERING, STATE_ERROR, STATE_CONNECTING,
+ STATE_SKIPPING_TO_PREVIOUS, STATE_SKIPPING_TO_NEXT, STATE_SKIPPING_TO_QUEUE_ITEM})
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface State {}
+
+ /**
+ * This is the default playback state and indicates that no media has been
+ * added yet, or the performer has been reset and has no content to play.
+ *
+ * @see Builder#setState(int, long, float)
+ * @see Builder#setState(int, long, float, long)
+ */
+ public final static int STATE_NONE = 0;
+
+ /**
+ * State indicating this item is currently stopped.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_STOPPED = 1;
+
+ /**
+ * State indicating this item is currently paused.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_PAUSED = 2;
+
+ /**
+ * State indicating this item is currently playing.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_PLAYING = 3;
+
+ /**
+ * State indicating this item is currently fast forwarding.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_FAST_FORWARDING = 4;
+
+ /**
+ * State indicating this item is currently rewinding.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_REWINDING = 5;
+
+ /**
+ * State indicating this item is currently buffering and will begin playing
+ * when enough data has buffered.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_BUFFERING = 6;
+
+ /**
+ * State indicating this item is currently in an error state. The error
+ * message should also be set when entering this state.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_ERROR = 7;
+
+ /**
+ * State indicating the class doing playback is currently connecting to a
+ * new destination. Depending on the implementation you may return to the previous
+ * state when the connection finishes or enter {@link #STATE_NONE}.
+ * If the connection failed {@link #STATE_ERROR} should be used.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_CONNECTING = 8;
+
+ /**
+ * State indicating the player is currently skipping to the previous item.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_SKIPPING_TO_PREVIOUS = 9;
+
+ /**
+ * State indicating the player is currently skipping to the next item.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_SKIPPING_TO_NEXT = 10;
+
+ /**
+ * State indicating the player is currently skipping to a specific item in
+ * the queue.
+ *
+ * @see Builder#setState
+ */
+ public final static int STATE_SKIPPING_TO_QUEUE_ITEM = 11;
+
+ /**
+ * Use this value for the position to indicate the position is not known.
+ */
+ public final static long PLAYBACK_POSITION_UNKNOWN = -1;
+
+ private final int mState;
+ private final long mPosition;
+ private final long mBufferedPosition;
+ private final float mSpeed;
+ private final long mActions;
+ private List<PlaybackState.CustomAction> mCustomActions;
+ private final CharSequence mErrorMessage;
+ private final long mUpdateTime;
+ private final long mActiveItemId;
+ private final Bundle mExtras;
+
+ private PlaybackState(int state, long position, long updateTime, float speed,
+ long bufferedPosition, long transportControls,
+ List<PlaybackState.CustomAction> customActions, long activeItemId,
+ CharSequence error, Bundle extras) {
+ mState = state;
+ mPosition = position;
+ mSpeed = speed;
+ mUpdateTime = updateTime;
+ mBufferedPosition = bufferedPosition;
+ mActions = transportControls;
+ mCustomActions = new ArrayList<>(customActions);
+ mActiveItemId = activeItemId;
+ mErrorMessage = error;
+ mExtras = extras;
+ }
+
+ private PlaybackState(Parcel in) {
+ mState = in.readInt();
+ mPosition = in.readLong();
+ mSpeed = in.readFloat();
+ mUpdateTime = in.readLong();
+ mBufferedPosition = in.readLong();
+ mActions = in.readLong();
+ mCustomActions = in.createTypedArrayList(CustomAction.CREATOR);
+ mActiveItemId = in.readLong();
+ mErrorMessage = in.readCharSequence();
+ mExtras = in.readBundle();
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder bob = new StringBuilder("PlaybackState {");
+ bob.append("state=").append(mState);
+ bob.append(", position=").append(mPosition);
+ bob.append(", buffered position=").append(mBufferedPosition);
+ bob.append(", speed=").append(mSpeed);
+ bob.append(", updated=").append(mUpdateTime);
+ bob.append(", actions=").append(mActions);
+ bob.append(", custom actions=").append(mCustomActions);
+ bob.append(", active item id=").append(mActiveItemId);
+ bob.append(", error=").append(mErrorMessage);
+ bob.append("}");
+ return bob.toString();
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mState);
+ dest.writeLong(mPosition);
+ dest.writeFloat(mSpeed);
+ dest.writeLong(mUpdateTime);
+ dest.writeLong(mBufferedPosition);
+ dest.writeLong(mActions);
+ dest.writeTypedList(mCustomActions);
+ dest.writeLong(mActiveItemId);
+ dest.writeCharSequence(mErrorMessage);
+ dest.writeBundle(mExtras);
+ }
+
+ /**
+ * Get the current state of playback. One of the following:
+ * <ul>
+ * <li> {@link PlaybackState#STATE_NONE}</li>
+ * <li> {@link PlaybackState#STATE_STOPPED}</li>
+ * <li> {@link PlaybackState#STATE_PLAYING}</li>
+ * <li> {@link PlaybackState#STATE_PAUSED}</li>
+ * <li> {@link PlaybackState#STATE_FAST_FORWARDING}</li>
+ * <li> {@link PlaybackState#STATE_REWINDING}</li>
+ * <li> {@link PlaybackState#STATE_BUFFERING}</li>
+ * <li> {@link PlaybackState#STATE_ERROR}</li>
+ * <li> {@link PlaybackState#STATE_CONNECTING}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_PREVIOUS}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_NEXT}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_QUEUE_ITEM}</li>
+ * </ul>
+ */
+ @State
+ public int getState() {
+ return mState;
+ }
+
+ /**
+ * Get the current playback position in ms.
+ */
+ public long getPosition() {
+ return mPosition;
+ }
+
+ /**
+ * Get the current buffered position in ms. This is the farthest playback
+ * point that can be reached from the current position using only buffered
+ * content.
+ */
+ public long getBufferedPosition() {
+ return mBufferedPosition;
+ }
+
+ /**
+ * Get the current playback speed as a multiple of normal playback. This
+ * should be negative when rewinding. A value of 1 means normal playback and
+ * 0 means paused.
+ *
+ * @return The current speed of playback.
+ */
+ public float getPlaybackSpeed() {
+ return mSpeed;
+ }
+
+ /**
+ * Get the current actions available on this session. This should use a
+ * bitmask of the available actions.
+ * <ul>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_PREVIOUS}</li>
+ * <li> {@link PlaybackState#ACTION_REWIND}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY}</li>
+ * <li> {@link PlaybackState#ACTION_PAUSE}</li>
+ * <li> {@link PlaybackState#ACTION_STOP}</li>
+ * <li> {@link PlaybackState#ACTION_FAST_FORWARD}</li>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_NEXT}</li>
+ * <li> {@link PlaybackState#ACTION_SEEK_TO}</li>
+ * <li> {@link PlaybackState#ACTION_SET_RATING}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_PAUSE}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_MEDIA_ID}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_SEARCH}</li>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_QUEUE_ITEM}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_URI}</li>
+ * <li> {@link PlaybackState#ACTION_PREPARE}</li>
+ * <li> {@link PlaybackState#ACTION_PREPARE_FROM_MEDIA_ID}</li>
+ * <li> {@link PlaybackState#ACTION_PREPARE_FROM_SEARCH}</li>
+ * <li> {@link PlaybackState#ACTION_PREPARE_FROM_URI}</li>
+ * </ul>
+ */
+ @Actions
+ public long getActions() {
+ return mActions;
+ }
+
+ /**
+ * Get the list of custom actions.
+ */
+ public List<PlaybackState.CustomAction> getCustomActions() {
+ return mCustomActions;
+ }
+
+ /**
+ * Get a user readable error message. This should be set when the state is
+ * {@link PlaybackState#STATE_ERROR}.
+ */
+ public CharSequence getErrorMessage() {
+ return mErrorMessage;
+ }
+
+ /**
+ * Get the elapsed real time at which position was last updated. If the
+ * position has never been set this will return 0;
+ *
+ * @return The last time the position was updated.
+ */
+ public long getLastPositionUpdateTime() {
+ return mUpdateTime;
+ }
+
+ /**
+ * Get the id of the currently active item in the queue. If there is no
+ * queue or a queue is not supported by the session this will be
+ * {@link MediaSession.QueueItem#UNKNOWN_ID}.
+ *
+ * @return The id of the currently active item in the queue or
+ * {@link MediaSession.QueueItem#UNKNOWN_ID}.
+ */
+ public long getActiveQueueItemId() {
+ return mActiveItemId;
+ }
+
+ /**
+ * Get any custom extras that were set on this playback state.
+ *
+ * @return The extras for this state or null.
+ */
+ public @Nullable Bundle getExtras() {
+ return mExtras;
+ }
+
+ /**
+ * Get the {@link PlaybackState} state for the given
+ * {@link RemoteControlClient} state.
+ *
+ * @param rccState The state used by {@link RemoteControlClient}.
+ * @return The equivalent state used by {@link PlaybackState}.
+ * @hide
+ */
+ public static int getStateFromRccState(int rccState) {
+ switch (rccState) {
+ case RemoteControlClient.PLAYSTATE_BUFFERING:
+ return STATE_BUFFERING;
+ case RemoteControlClient.PLAYSTATE_ERROR:
+ return STATE_ERROR;
+ case RemoteControlClient.PLAYSTATE_FAST_FORWARDING:
+ return STATE_FAST_FORWARDING;
+ case RemoteControlClient.PLAYSTATE_NONE:
+ return STATE_NONE;
+ case RemoteControlClient.PLAYSTATE_PAUSED:
+ return STATE_PAUSED;
+ case RemoteControlClient.PLAYSTATE_PLAYING:
+ return STATE_PLAYING;
+ case RemoteControlClient.PLAYSTATE_REWINDING:
+ return STATE_REWINDING;
+ case RemoteControlClient.PLAYSTATE_SKIPPING_BACKWARDS:
+ return STATE_SKIPPING_TO_PREVIOUS;
+ case RemoteControlClient.PLAYSTATE_SKIPPING_FORWARDS:
+ return STATE_SKIPPING_TO_NEXT;
+ case RemoteControlClient.PLAYSTATE_STOPPED:
+ return STATE_STOPPED;
+ default:
+ return -1;
+ }
+ }
+
+ /**
+ * Get the {@link RemoteControlClient} state for the given
+ * {@link PlaybackState} state.
+ *
+ * @param state The state used by {@link PlaybackState}.
+ * @return The equivalent state used by {@link RemoteControlClient}.
+ * @hide
+ */
+ public static int getRccStateFromState(int state) {
+ switch (state) {
+ case STATE_BUFFERING:
+ return RemoteControlClient.PLAYSTATE_BUFFERING;
+ case STATE_ERROR:
+ return RemoteControlClient.PLAYSTATE_ERROR;
+ case STATE_FAST_FORWARDING:
+ return RemoteControlClient.PLAYSTATE_FAST_FORWARDING;
+ case STATE_NONE:
+ return RemoteControlClient.PLAYSTATE_NONE;
+ case STATE_PAUSED:
+ return RemoteControlClient.PLAYSTATE_PAUSED;
+ case STATE_PLAYING:
+ return RemoteControlClient.PLAYSTATE_PLAYING;
+ case STATE_REWINDING:
+ return RemoteControlClient.PLAYSTATE_REWINDING;
+ case STATE_SKIPPING_TO_PREVIOUS:
+ return RemoteControlClient.PLAYSTATE_SKIPPING_BACKWARDS;
+ case STATE_SKIPPING_TO_NEXT:
+ return RemoteControlClient.PLAYSTATE_SKIPPING_FORWARDS;
+ case STATE_STOPPED:
+ return RemoteControlClient.PLAYSTATE_STOPPED;
+ default:
+ return -1;
+ }
+ }
+
+ /**
+ * @hide
+ */
+ public static long getActionsFromRccControlFlags(int rccFlags) {
+ long actions = 0;
+ long flag = 1;
+ while (flag <= rccFlags) {
+ if ((flag & rccFlags) != 0) {
+ actions |= getActionForRccFlag((int) flag);
+ }
+ flag = flag << 1;
+ }
+ return actions;
+ }
+
+ /**
+ * @hide
+ */
+ public static int getRccControlFlagsFromActions(long actions) {
+ int rccFlags = 0;
+ long action = 1;
+ while (action <= actions && action < Integer.MAX_VALUE) {
+ if ((action & actions) != 0) {
+ rccFlags |= getRccFlagForAction(action);
+ }
+ action = action << 1;
+ }
+ return rccFlags;
+ }
+
+ private static long getActionForRccFlag(int flag) {
+ switch (flag) {
+ case RemoteControlClient.FLAG_KEY_MEDIA_PREVIOUS:
+ return ACTION_SKIP_TO_PREVIOUS;
+ case RemoteControlClient.FLAG_KEY_MEDIA_REWIND:
+ return ACTION_REWIND;
+ case RemoteControlClient.FLAG_KEY_MEDIA_PLAY:
+ return ACTION_PLAY;
+ case RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE:
+ return ACTION_PLAY_PAUSE;
+ case RemoteControlClient.FLAG_KEY_MEDIA_PAUSE:
+ return ACTION_PAUSE;
+ case RemoteControlClient.FLAG_KEY_MEDIA_STOP:
+ return ACTION_STOP;
+ case RemoteControlClient.FLAG_KEY_MEDIA_FAST_FORWARD:
+ return ACTION_FAST_FORWARD;
+ case RemoteControlClient.FLAG_KEY_MEDIA_NEXT:
+ return ACTION_SKIP_TO_NEXT;
+ case RemoteControlClient.FLAG_KEY_MEDIA_POSITION_UPDATE:
+ return ACTION_SEEK_TO;
+ case RemoteControlClient.FLAG_KEY_MEDIA_RATING:
+ return ACTION_SET_RATING;
+ }
+ return 0;
+ }
+
+ private static int getRccFlagForAction(long action) {
+ // We only care about the lower set of actions that can map to rcc
+ // flags.
+ int testAction = action < Integer.MAX_VALUE ? (int) action : 0;
+ switch (testAction) {
+ case (int) ACTION_SKIP_TO_PREVIOUS:
+ return RemoteControlClient.FLAG_KEY_MEDIA_PREVIOUS;
+ case (int) ACTION_REWIND:
+ return RemoteControlClient.FLAG_KEY_MEDIA_REWIND;
+ case (int) ACTION_PLAY:
+ return RemoteControlClient.FLAG_KEY_MEDIA_PLAY;
+ case (int) ACTION_PLAY_PAUSE:
+ return RemoteControlClient.FLAG_KEY_MEDIA_PLAY_PAUSE;
+ case (int) ACTION_PAUSE:
+ return RemoteControlClient.FLAG_KEY_MEDIA_PAUSE;
+ case (int) ACTION_STOP:
+ return RemoteControlClient.FLAG_KEY_MEDIA_STOP;
+ case (int) ACTION_FAST_FORWARD:
+ return RemoteControlClient.FLAG_KEY_MEDIA_FAST_FORWARD;
+ case (int) ACTION_SKIP_TO_NEXT:
+ return RemoteControlClient.FLAG_KEY_MEDIA_NEXT;
+ case (int) ACTION_SEEK_TO:
+ return RemoteControlClient.FLAG_KEY_MEDIA_POSITION_UPDATE;
+ case (int) ACTION_SET_RATING:
+ return RemoteControlClient.FLAG_KEY_MEDIA_RATING;
+ }
+ return 0;
+ }
+
+ public static final Parcelable.Creator<PlaybackState> CREATOR =
+ new Parcelable.Creator<PlaybackState>() {
+ @Override
+ public PlaybackState createFromParcel(Parcel in) {
+ return new PlaybackState(in);
+ }
+
+ @Override
+ public PlaybackState[] newArray(int size) {
+ return new PlaybackState[size];
+ }
+ };
+
+ /**
+ * {@link PlaybackState.CustomAction CustomActions} can be used to extend the capabilities of
+ * the standard transport controls by exposing app specific actions to
+ * {@link MediaController MediaControllers}.
+ */
+ public static final class CustomAction implements Parcelable {
+ private final String mAction;
+ private final CharSequence mName;
+ private final int mIcon;
+ private final Bundle mExtras;
+
+ /**
+ * Use {@link PlaybackState.CustomAction.Builder#build()}.
+ */
+ private CustomAction(String action, CharSequence name, int icon, Bundle extras) {
+ mAction = action;
+ mName = name;
+ mIcon = icon;
+ mExtras = extras;
+ }
+
+ private CustomAction(Parcel in) {
+ mAction = in.readString();
+ mName = TextUtils.CHAR_SEQUENCE_CREATOR.createFromParcel(in);
+ mIcon = in.readInt();
+ mExtras = in.readBundle();
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeString(mAction);
+ TextUtils.writeToParcel(mName, dest, flags);
+ dest.writeInt(mIcon);
+ dest.writeBundle(mExtras);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ public static final Parcelable.Creator<PlaybackState.CustomAction> CREATOR
+ = new Parcelable.Creator<PlaybackState.CustomAction>() {
+
+ @Override
+ public PlaybackState.CustomAction createFromParcel(Parcel p) {
+ return new PlaybackState.CustomAction(p);
+ }
+
+ @Override
+ public PlaybackState.CustomAction[] newArray(int size) {
+ return new PlaybackState.CustomAction[size];
+ }
+ };
+
+ /**
+ * Returns the action of the {@link CustomAction}.
+ *
+ * @return The action of the {@link CustomAction}.
+ */
+ public String getAction() {
+ return mAction;
+ }
+
+ /**
+ * Returns the display name of this action. e.g. "Favorite"
+ *
+ * @return The display name of this {@link CustomAction}.
+ */
+ public CharSequence getName() {
+ return mName;
+ }
+
+ /**
+ * Returns the resource id of the icon in the {@link MediaSession MediaSession's} package.
+ *
+ * @return The resource id of the icon in the {@link MediaSession MediaSession's} package.
+ */
+ public int getIcon() {
+ return mIcon;
+ }
+
+ /**
+ * Returns extras which provide additional application-specific information about the
+ * action, or null if none. These arguments are meant to be consumed by a
+ * {@link MediaController} if it knows how to handle them.
+ *
+ * @return Optional arguments for the {@link CustomAction}.
+ */
+ public Bundle getExtras() {
+ return mExtras;
+ }
+
+ @Override
+ public String toString() {
+ return "Action:" +
+ "mName='" + mName +
+ ", mIcon=" + mIcon +
+ ", mExtras=" + mExtras;
+ }
+
+ /**
+ * Builder for {@link CustomAction} objects.
+ */
+ public static final class Builder {
+ private final String mAction;
+ private final CharSequence mName;
+ private final int mIcon;
+ private Bundle mExtras;
+
+ /**
+ * Creates a {@link CustomAction} builder with the id, name, and icon set.
+ *
+ * @param action The action of the {@link CustomAction}.
+ * @param name The display name of the {@link CustomAction}. This name will be displayed
+ * along side the action if the UI supports it.
+ * @param icon The icon resource id of the {@link CustomAction}. This resource id
+ * must be in the same package as the {@link MediaSession}. It will be
+ * displayed with the custom action if the UI supports it.
+ */
+ public Builder(String action, CharSequence name, @DrawableRes int icon) {
+ if (TextUtils.isEmpty(action)) {
+ throw new IllegalArgumentException(
+ "You must specify an action to build a CustomAction.");
+ }
+ if (TextUtils.isEmpty(name)) {
+ throw new IllegalArgumentException(
+ "You must specify a name to build a CustomAction.");
+ }
+ if (icon == 0) {
+ throw new IllegalArgumentException(
+ "You must specify an icon resource id to build a CustomAction.");
+ }
+ mAction = action;
+ mName = name;
+ mIcon = icon;
+ }
+
+ /**
+ * Set optional extras for the {@link CustomAction}. These extras are meant to be
+ * consumed by a {@link MediaController} if it knows how to handle them.
+ * Keys should be fully qualified (e.g. "com.example.MY_ARG") to avoid collisions.
+ *
+ * @param extras Optional extras for the {@link CustomAction}.
+ * @return this.
+ */
+ public Builder setExtras(Bundle extras) {
+ mExtras = extras;
+ return this;
+ }
+
+ /**
+ * Build and return the {@link CustomAction} instance with the specified values.
+ *
+ * @return A new {@link CustomAction} instance.
+ */
+ public CustomAction build() {
+ return new CustomAction(mAction, mName, mIcon, mExtras);
+ }
+ }
+ }
+
+ /**
+ * Builder for {@link PlaybackState} objects.
+ */
+ public static final class Builder {
+ private final List<PlaybackState.CustomAction> mCustomActions = new ArrayList<>();
+
+ private int mState;
+ private long mPosition;
+ private long mBufferedPosition;
+ private float mSpeed;
+ private long mActions;
+ private CharSequence mErrorMessage;
+ private long mUpdateTime;
+ private long mActiveItemId = MediaSession.QueueItem.UNKNOWN_ID;
+ private Bundle mExtras;
+
+ /**
+ * Creates an initially empty state builder.
+ */
+ public Builder() {
+ }
+
+ /**
+ * Creates a builder with the same initial values as those in the from
+ * state.
+ *
+ * @param from The state to use for initializing the builder.
+ */
+ public Builder(PlaybackState from) {
+ if (from == null) {
+ return;
+ }
+ mState = from.mState;
+ mPosition = from.mPosition;
+ mBufferedPosition = from.mBufferedPosition;
+ mSpeed = from.mSpeed;
+ mActions = from.mActions;
+ if (from.mCustomActions != null) {
+ mCustomActions.addAll(from.mCustomActions);
+ }
+ mErrorMessage = from.mErrorMessage;
+ mUpdateTime = from.mUpdateTime;
+ mActiveItemId = from.mActiveItemId;
+ mExtras = from.mExtras;
+ }
+
+ /**
+ * Set the current state of playback.
+ * <p>
+ * The position must be in ms and indicates the current playback
+ * position within the item. If the position is unknown use
+ * {@link #PLAYBACK_POSITION_UNKNOWN}. When not using an unknown
+ * position the time at which the position was updated must be provided.
+ * It is okay to use {@link SystemClock#elapsedRealtime()} if the
+ * current position was just retrieved.
+ * <p>
+ * The speed is a multiple of normal playback and should be 0 when
+ * paused and negative when rewinding. Normal playback speed is 1.0.
+ * <p>
+ * The state must be one of the following:
+ * <ul>
+ * <li> {@link PlaybackState#STATE_NONE}</li>
+ * <li> {@link PlaybackState#STATE_STOPPED}</li>
+ * <li> {@link PlaybackState#STATE_PLAYING}</li>
+ * <li> {@link PlaybackState#STATE_PAUSED}</li>
+ * <li> {@link PlaybackState#STATE_FAST_FORWARDING}</li>
+ * <li> {@link PlaybackState#STATE_REWINDING}</li>
+ * <li> {@link PlaybackState#STATE_BUFFERING}</li>
+ * <li> {@link PlaybackState#STATE_ERROR}</li>
+ * <li> {@link PlaybackState#STATE_CONNECTING}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_PREVIOUS}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_NEXT}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_QUEUE_ITEM}</li>
+ * </ul>
+ *
+ * @param state The current state of playback.
+ * @param position The position in the current item in ms.
+ * @param playbackSpeed The current speed of playback as a multiple of
+ * normal playback.
+ * @param updateTime The time in the {@link SystemClock#elapsedRealtime}
+ * timebase that the position was updated at.
+ * @return this
+ */
+ public Builder setState(@State int state, long position, float playbackSpeed,
+ long updateTime) {
+ mState = state;
+ mPosition = position;
+ mUpdateTime = updateTime;
+ mSpeed = playbackSpeed;
+ return this;
+ }
+
+ /**
+ * Set the current state of playback.
+ * <p>
+ * The position must be in ms and indicates the current playback
+ * position within the item. If the position is unknown use
+ * {@link #PLAYBACK_POSITION_UNKNOWN}. The update time will be set to
+ * the current {@link SystemClock#elapsedRealtime()}.
+ * <p>
+ * The speed is a multiple of normal playback and should be 0 when
+ * paused and negative when rewinding. Normal playback speed is 1.0.
+ * <p>
+ * The state must be one of the following:
+ * <ul>
+ * <li> {@link PlaybackState#STATE_NONE}</li>
+ * <li> {@link PlaybackState#STATE_STOPPED}</li>
+ * <li> {@link PlaybackState#STATE_PLAYING}</li>
+ * <li> {@link PlaybackState#STATE_PAUSED}</li>
+ * <li> {@link PlaybackState#STATE_FAST_FORWARDING}</li>
+ * <li> {@link PlaybackState#STATE_REWINDING}</li>
+ * <li> {@link PlaybackState#STATE_BUFFERING}</li>
+ * <li> {@link PlaybackState#STATE_ERROR}</li>
+ * <li> {@link PlaybackState#STATE_CONNECTING}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_PREVIOUS}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_NEXT}</li>
+ * <li> {@link PlaybackState#STATE_SKIPPING_TO_QUEUE_ITEM}</li>
+ * </ul>
+ *
+ * @param state The current state of playback.
+ * @param position The position in the current item in ms.
+ * @param playbackSpeed The current speed of playback as a multiple of
+ * normal playback.
+ * @return this
+ */
+ public Builder setState(@State int state, long position, float playbackSpeed) {
+ return setState(state, position, playbackSpeed, SystemClock.elapsedRealtime());
+ }
+
+ /**
+ * Set the current actions available on this session. This should use a
+ * bitmask of possible actions.
+ * <ul>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_PREVIOUS}</li>
+ * <li> {@link PlaybackState#ACTION_REWIND}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY}</li>
+ * <li> {@link PlaybackState#ACTION_PAUSE}</li>
+ * <li> {@link PlaybackState#ACTION_STOP}</li>
+ * <li> {@link PlaybackState#ACTION_FAST_FORWARD}</li>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_NEXT}</li>
+ * <li> {@link PlaybackState#ACTION_SEEK_TO}</li>
+ * <li> {@link PlaybackState#ACTION_SET_RATING}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_PAUSE}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_MEDIA_ID}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_SEARCH}</li>
+ * <li> {@link PlaybackState#ACTION_SKIP_TO_QUEUE_ITEM}</li>
+ * <li> {@link PlaybackState#ACTION_PLAY_FROM_URI}</li>
+ * <li> {@link PlaybackState#ACTION_PREPARE}</li>
+ * <li> {@link PlaybackState#ACTION_PREPARE_FROM_MEDIA_ID}</li>
+ * <li> {@link PlaybackState#ACTION_PREPARE_FROM_SEARCH}</li>
+ * <li> {@link PlaybackState#ACTION_PREPARE_FROM_URI}</li>
+ * </ul>
+ *
+ * @param actions The set of actions allowed.
+ * @return this
+ */
+ public Builder setActions(@Actions long actions) {
+ mActions = actions;
+ return this;
+ }
+
+ /**
+ * Add a custom action to the playback state. Actions can be used to
+ * expose additional functionality to {@link MediaController
+ * MediaControllers} beyond what is offered by the standard transport
+ * controls.
+ * <p>
+ * e.g. start a radio station based on the current item or skip ahead by
+ * 30 seconds.
+ *
+ * @param action An identifier for this action. It can be sent back to
+ * the {@link MediaSession} through
+ * {@link MediaController.TransportControls#sendCustomAction(String, Bundle)}.
+ * @param name The display name for the action. If text is shown with
+ * the action or used for accessibility, this is what should
+ * be used.
+ * @param icon The resource action of the icon that should be displayed
+ * for the action. The resource should be in the package of
+ * the {@link MediaSession}.
+ * @return this
+ */
+ public Builder addCustomAction(String action, String name, int icon) {
+ return addCustomAction(new PlaybackState.CustomAction(action, name, icon, null));
+ }
+
+ /**
+ * Add a custom action to the playback state. Actions can be used to expose additional
+ * functionality to {@link MediaController MediaControllers} beyond what is offered by the
+ * standard transport controls.
+ * <p>
+ * An example of an action would be to start a radio station based on the current item
+ * or to skip ahead by 30 seconds.
+ *
+ * @param customAction The custom action to add to the {@link PlaybackState}.
+ * @return this
+ */
+ public Builder addCustomAction(PlaybackState.CustomAction customAction) {
+ if (customAction == null) {
+ throw new IllegalArgumentException(
+ "You may not add a null CustomAction to PlaybackState.");
+ }
+ mCustomActions.add(customAction);
+ return this;
+ }
+
+ /**
+ * Set the current buffered position in ms. This is the farthest
+ * playback point that can be reached from the current position using
+ * only buffered content.
+ *
+ * @param bufferedPosition The position in ms that playback is buffered
+ * to.
+ * @return this
+ */
+ public Builder setBufferedPosition(long bufferedPosition) {
+ mBufferedPosition = bufferedPosition;
+ return this;
+ }
+
+ /**
+ * Set the active item in the play queue by specifying its id. The
+ * default value is {@link MediaSession.QueueItem#UNKNOWN_ID}
+ *
+ * @param id The id of the active item.
+ * @return this
+ */
+ public Builder setActiveQueueItemId(long id) {
+ mActiveItemId = id;
+ return this;
+ }
+
+ /**
+ * Set a user readable error message. This should be set when the state
+ * is {@link PlaybackState#STATE_ERROR}.
+ *
+ * @param error The error message for display to the user.
+ * @return this
+ */
+ public Builder setErrorMessage(CharSequence error) {
+ mErrorMessage = error;
+ return this;
+ }
+
+ /**
+ * Set any custom extras to be included with the playback state.
+ *
+ * @param extras The extras to include.
+ * @return this
+ */
+ public Builder setExtras(Bundle extras) {
+ mExtras = extras;
+ return this;
+ }
+
+ /**
+ * Build and return the {@link PlaybackState} instance with these
+ * values.
+ *
+ * @return A new state instance.
+ */
+ public PlaybackState build() {
+ return new PlaybackState(mState, mPosition, mUpdateTime, mSpeed, mBufferedPosition,
+ mActions, mCustomActions, mActiveItemId, mErrorMessage, mExtras);
+ }
+ }
+}
diff --git a/android/media/soundtrigger/SoundTriggerDetector.java b/android/media/soundtrigger/SoundTriggerDetector.java
new file mode 100644
index 00000000..7969ee75
--- /dev/null
+++ b/android/media/soundtrigger/SoundTriggerDetector.java
@@ -0,0 +1,393 @@
+/**
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.soundtrigger;
+import static android.hardware.soundtrigger.SoundTrigger.STATUS_OK;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.RequiresPermission;
+import android.annotation.SystemApi;
+import android.hardware.soundtrigger.IRecognitionStatusCallback;
+import android.hardware.soundtrigger.SoundTrigger;
+import android.hardware.soundtrigger.SoundTrigger.RecognitionConfig;
+import android.media.AudioFormat;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.Message;
+import android.os.ParcelUuid;
+import android.os.RemoteException;
+import android.util.Slog;
+
+import com.android.internal.app.ISoundTriggerService;
+
+import java.io.PrintWriter;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.UUID;
+
+/**
+ * A class that allows interaction with the actual sound trigger detection on the system.
+ * Sound trigger detection refers to a detectors that match generic sound patterns that are
+ * not voice-based. The voice-based recognition models should utilize the {@link
+ * VoiceInteractionService} instead. Access to this class is protected by a permission
+ * granted only to system or privileged apps.
+ *
+ * @hide
+ */
+@SystemApi
+public final class SoundTriggerDetector {
+ private static final boolean DBG = false;
+ private static final String TAG = "SoundTriggerDetector";
+
+ private static final int MSG_AVAILABILITY_CHANGED = 1;
+ private static final int MSG_SOUND_TRIGGER_DETECTED = 2;
+ private static final int MSG_DETECTION_ERROR = 3;
+ private static final int MSG_DETECTION_PAUSE = 4;
+ private static final int MSG_DETECTION_RESUME = 5;
+
+ private final Object mLock = new Object();
+
+ private final ISoundTriggerService mSoundTriggerService;
+ private final UUID mSoundModelId;
+ private final Callback mCallback;
+ private final Handler mHandler;
+ private final RecognitionCallback mRecognitionCallback;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(flag = true,
+ value = {
+ RECOGNITION_FLAG_NONE,
+ RECOGNITION_FLAG_CAPTURE_TRIGGER_AUDIO,
+ RECOGNITION_FLAG_ALLOW_MULTIPLE_TRIGGERS
+ })
+ public @interface RecognitionFlags {}
+
+ /**
+ * Empty flag for {@link #startRecognition(int)}.
+ *
+ * @hide
+ */
+ public static final int RECOGNITION_FLAG_NONE = 0;
+
+ /**
+ * Recognition flag for {@link #startRecognition(int)} that indicates
+ * whether the trigger audio for hotword needs to be captured.
+ */
+ public static final int RECOGNITION_FLAG_CAPTURE_TRIGGER_AUDIO = 0x1;
+
+ /**
+ * Recognition flag for {@link #startRecognition(int)} that indicates
+ * whether the recognition should keep going on even after the
+ * model triggers.
+ * If this flag is specified, it's possible to get multiple
+ * triggers after a call to {@link #startRecognition(int)}, if the model
+ * triggers multiple times.
+ * When this isn't specified, the default behavior is to stop recognition once the
+ * trigger happenss, till the caller starts recognition again.
+ */
+ public static final int RECOGNITION_FLAG_ALLOW_MULTIPLE_TRIGGERS = 0x2;
+
+ /**
+ * Additional payload for {@link Callback#onDetected}.
+ */
+ public static class EventPayload {
+ private final boolean mTriggerAvailable;
+
+ // Indicates if {@code captureSession} can be used to continue capturing more audio
+ // from the DSP hardware.
+ private final boolean mCaptureAvailable;
+ // The session to use when attempting to capture more audio from the DSP hardware.
+ private final int mCaptureSession;
+ private final AudioFormat mAudioFormat;
+ // Raw data associated with the event.
+ // This is the audio that triggered the keyphrase if {@code isTriggerAudio} is true.
+ private final byte[] mData;
+
+ private EventPayload(boolean triggerAvailable, boolean captureAvailable,
+ AudioFormat audioFormat, int captureSession, byte[] data) {
+ mTriggerAvailable = triggerAvailable;
+ mCaptureAvailable = captureAvailable;
+ mCaptureSession = captureSession;
+ mAudioFormat = audioFormat;
+ mData = data;
+ }
+
+ /**
+ * Gets the format of the audio obtained using {@link #getTriggerAudio()}.
+ * May be null if there's no audio present.
+ */
+ @Nullable
+ public AudioFormat getCaptureAudioFormat() {
+ return mAudioFormat;
+ }
+
+ /**
+ * Gets the raw audio that triggered the detector.
+ * This may be null if the trigger audio isn't available.
+ * If non-null, the format of the audio can be obtained by calling
+ * {@link #getCaptureAudioFormat()}.
+ *
+ * @see AlwaysOnHotwordDetector#RECOGNITION_FLAG_CAPTURE_TRIGGER_AUDIO
+ */
+ @Nullable
+ public byte[] getTriggerAudio() {
+ if (mTriggerAvailable) {
+ return mData;
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Gets the opaque data passed from the detection engine for the event.
+ * This may be null if it was not populated by the engine, or if the data is known to
+ * contain the trigger audio.
+ *
+ * @see #getTriggerAudio
+ *
+ * @hide
+ */
+ @Nullable
+ public byte[] getData() {
+ if (!mTriggerAvailable) {
+ return mData;
+ } else {
+ return null;
+ }
+ }
+
+ /**
+ * Gets the session ID to start a capture from the DSP.
+ * This may be null if streaming capture isn't possible.
+ * If non-null, the format of the audio that can be captured can be
+ * obtained using {@link #getCaptureAudioFormat()}.
+ *
+ * TODO: Candidate for Public API when the API to start capture with a session ID
+ * is made public.
+ *
+ * TODO: Add this to {@link #getCaptureAudioFormat()}:
+ * "Gets the format of the audio obtained using {@link #getTriggerAudio()}
+ * or {@link #getCaptureSession()}. May be null if no audio can be obtained
+ * for either the trigger or a streaming session."
+ *
+ * TODO: Should this return a known invalid value instead?
+ *
+ * @hide
+ */
+ @Nullable
+ public Integer getCaptureSession() {
+ if (mCaptureAvailable) {
+ return mCaptureSession;
+ } else {
+ return null;
+ }
+ }
+ }
+
+ public static abstract class Callback {
+ /**
+ * Called when the availability of the sound model changes.
+ */
+ public abstract void onAvailabilityChanged(int status);
+
+ /**
+ * Called when the sound model has triggered (such as when it matched a
+ * given sound pattern).
+ */
+ public abstract void onDetected(@NonNull EventPayload eventPayload);
+
+ /**
+ * Called when the detection fails due to an error.
+ */
+ public abstract void onError();
+
+ /**
+ * Called when the recognition is paused temporarily for some reason.
+ * This is an informational callback, and the clients shouldn't be doing anything here
+ * except showing an indication on their UI if they have to.
+ */
+ public abstract void onRecognitionPaused();
+
+ /**
+ * Called when the recognition is resumed after it was temporarily paused.
+ * This is an informational callback, and the clients shouldn't be doing anything here
+ * except showing an indication on their UI if they have to.
+ */
+ public abstract void onRecognitionResumed();
+ }
+
+ /**
+ * This class should be constructed by the {@link SoundTriggerManager}.
+ * @hide
+ */
+ SoundTriggerDetector(ISoundTriggerService soundTriggerService, UUID soundModelId,
+ @NonNull Callback callback, @Nullable Handler handler) {
+ mSoundTriggerService = soundTriggerService;
+ mSoundModelId = soundModelId;
+ mCallback = callback;
+ if (handler == null) {
+ mHandler = new MyHandler();
+ } else {
+ mHandler = new MyHandler(handler.getLooper());
+ }
+ mRecognitionCallback = new RecognitionCallback();
+ }
+
+ /**
+ * Starts recognition on the associated sound model. Result is indicated via the
+ * {@link Callback}.
+ * @return Indicates whether the call succeeded or not.
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public boolean startRecognition(@RecognitionFlags int recognitionFlags) {
+ if (DBG) {
+ Slog.d(TAG, "startRecognition()");
+ }
+ boolean captureTriggerAudio =
+ (recognitionFlags & RECOGNITION_FLAG_CAPTURE_TRIGGER_AUDIO) != 0;
+
+ boolean allowMultipleTriggers =
+ (recognitionFlags & RECOGNITION_FLAG_ALLOW_MULTIPLE_TRIGGERS) != 0;
+ int status = STATUS_OK;
+ try {
+ status = mSoundTriggerService.startRecognition(new ParcelUuid(mSoundModelId),
+ mRecognitionCallback, new RecognitionConfig(captureTriggerAudio,
+ allowMultipleTriggers, null, null));
+ } catch (RemoteException e) {
+ return false;
+ }
+ return status == STATUS_OK;
+ }
+
+ /**
+ * Stops recognition for the associated model.
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public boolean stopRecognition() {
+ int status = STATUS_OK;
+ try {
+ status = mSoundTriggerService.stopRecognition(new ParcelUuid(mSoundModelId),
+ mRecognitionCallback);
+ } catch (RemoteException e) {
+ return false;
+ }
+ return status == STATUS_OK;
+ }
+
+ /**
+ * @hide
+ */
+ public void dump(String prefix, PrintWriter pw) {
+ synchronized (mLock) {
+ // TODO: Dump useful debug information.
+ }
+ }
+
+ /**
+ * Callback that handles events from the lower sound trigger layer.
+ *
+ * Note that these callbacks will be called synchronously from the SoundTriggerService
+ * layer and thus should do minimal work (such as sending a message on a handler to do
+ * the real work).
+ * @hide
+ */
+ private class RecognitionCallback extends IRecognitionStatusCallback.Stub {
+
+ /**
+ * @hide
+ */
+ @Override
+ public void onGenericSoundTriggerDetected(SoundTrigger.GenericRecognitionEvent event) {
+ Slog.d(TAG, "onGenericSoundTriggerDetected()" + event);
+ Message.obtain(mHandler,
+ MSG_SOUND_TRIGGER_DETECTED,
+ new EventPayload(event.triggerInData, event.captureAvailable,
+ event.captureFormat, event.captureSession, event.data))
+ .sendToTarget();
+ }
+
+ @Override
+ public void onKeyphraseDetected(SoundTrigger.KeyphraseRecognitionEvent event) {
+ Slog.e(TAG, "Ignoring onKeyphraseDetected() called for " + event);
+ }
+
+ /**
+ * @hide
+ */
+ @Override
+ public void onError(int status) {
+ Slog.d(TAG, "onError()" + status);
+ mHandler.sendEmptyMessage(MSG_DETECTION_ERROR);
+ }
+
+ /**
+ * @hide
+ */
+ @Override
+ public void onRecognitionPaused() {
+ Slog.d(TAG, "onRecognitionPaused()");
+ mHandler.sendEmptyMessage(MSG_DETECTION_PAUSE);
+ }
+
+ /**
+ * @hide
+ */
+ @Override
+ public void onRecognitionResumed() {
+ Slog.d(TAG, "onRecognitionResumed()");
+ mHandler.sendEmptyMessage(MSG_DETECTION_RESUME);
+ }
+ }
+
+ private class MyHandler extends Handler {
+
+ MyHandler() {
+ super();
+ }
+
+ MyHandler(Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ if (mCallback == null) {
+ Slog.w(TAG, "Received message: " + msg.what + " for NULL callback.");
+ return;
+ }
+ switch (msg.what) {
+ case MSG_SOUND_TRIGGER_DETECTED:
+ mCallback.onDetected((EventPayload) msg.obj);
+ break;
+ case MSG_DETECTION_ERROR:
+ mCallback.onError();
+ break;
+ case MSG_DETECTION_PAUSE:
+ mCallback.onRecognitionPaused();
+ break;
+ case MSG_DETECTION_RESUME:
+ mCallback.onRecognitionResumed();
+ break;
+ default:
+ super.handleMessage(msg);
+
+ }
+ }
+ }
+}
diff --git a/android/media/soundtrigger/SoundTriggerManager.java b/android/media/soundtrigger/SoundTriggerManager.java
new file mode 100644
index 00000000..92ffae0f
--- /dev/null
+++ b/android/media/soundtrigger/SoundTriggerManager.java
@@ -0,0 +1,327 @@
+/**
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.soundtrigger;
+import static android.hardware.soundtrigger.SoundTrigger.STATUS_ERROR;
+
+import android.app.PendingIntent;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.RequiresPermission;
+import android.annotation.SystemApi;
+import android.annotation.SystemService;
+import android.content.Context;
+import android.hardware.soundtrigger.SoundTrigger;
+import android.hardware.soundtrigger.SoundTrigger.SoundModel;
+import android.hardware.soundtrigger.SoundTrigger.GenericSoundModel;
+import android.hardware.soundtrigger.SoundTrigger.KeyphraseSoundModel;
+import android.hardware.soundtrigger.SoundTrigger.RecognitionConfig;
+import android.os.Handler;
+import android.os.ParcelUuid;
+import android.os.RemoteException;
+import android.util.Slog;
+
+import com.android.internal.app.ISoundTriggerService;
+
+import java.util.HashMap;
+import java.util.UUID;
+
+/**
+ * This class provides management of non-voice (general sound trigger) based sound recognition
+ * models. Usage of this class is restricted to system or signature applications only. This allows
+ * OEMs to write apps that can manage non-voice based sound trigger models.
+ *
+ * @hide
+ */
+@SystemApi
+@SystemService(Context.SOUND_TRIGGER_SERVICE)
+public final class SoundTriggerManager {
+ private static final boolean DBG = false;
+ private static final String TAG = "SoundTriggerManager";
+
+ private final Context mContext;
+ private final ISoundTriggerService mSoundTriggerService;
+
+ // Stores a mapping from the sound model UUID to the SoundTriggerInstance created by
+ // the createSoundTriggerDetector() call.
+ private final HashMap<UUID, SoundTriggerDetector> mReceiverInstanceMap;
+
+ /**
+ * @hide
+ */
+ public SoundTriggerManager(Context context, ISoundTriggerService soundTriggerService ) {
+ if (DBG) {
+ Slog.i(TAG, "SoundTriggerManager created.");
+ }
+ mSoundTriggerService = soundTriggerService;
+ mContext = context;
+ mReceiverInstanceMap = new HashMap<UUID, SoundTriggerDetector>();
+ }
+
+ /**
+ * Updates the given sound trigger model.
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public void updateModel(Model model) {
+ try {
+ mSoundTriggerService.updateSoundModel(model.getGenericSoundModel());
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the sound trigger model represented by the given UUID. An instance of {@link Model}
+ * is returned.
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public Model getModel(UUID soundModelId) {
+ try {
+ return new Model(mSoundTriggerService.getSoundModel(
+ new ParcelUuid(soundModelId)));
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Deletes the sound model represented by the provided UUID.
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public void deleteModel(UUID soundModelId) {
+ try {
+ mSoundTriggerService.deleteSoundModel(new ParcelUuid(soundModelId));
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Creates an instance of {@link SoundTriggerDetector} which can be used to start/stop
+ * recognition on the model and register for triggers from the model. Note that this call
+ * invalidates any previously returned instances for the same sound model Uuid.
+ *
+ * @param soundModelId UUID of the sound model to create the receiver object for.
+ * @param callback Instance of the {@link SoundTriggerDetector#Callback} object for the
+ * callbacks for the given sound model.
+ * @param handler The Handler to use for the callback operations. A null value will use the
+ * current thread's Looper.
+ * @return Instance of {@link SoundTriggerDetector} or null on error.
+ */
+ @Nullable
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public SoundTriggerDetector createSoundTriggerDetector(UUID soundModelId,
+ @NonNull SoundTriggerDetector.Callback callback, @Nullable Handler handler) {
+ if (soundModelId == null) {
+ return null;
+ }
+
+ SoundTriggerDetector oldInstance = mReceiverInstanceMap.get(soundModelId);
+ if (oldInstance != null) {
+ // Shutdown old instance.
+ }
+ SoundTriggerDetector newInstance = new SoundTriggerDetector(mSoundTriggerService,
+ soundModelId, callback, handler);
+ mReceiverInstanceMap.put(soundModelId, newInstance);
+ return newInstance;
+ }
+
+ /**
+ * Class captures the data and fields that represent a non-keyphrase sound model. Use the
+ * factory constructor {@link Model#create()} to create an instance.
+ */
+ // We use encapsulation to expose the SoundTrigger.GenericSoundModel as a SystemApi. This
+ // prevents us from exposing SoundTrigger.GenericSoundModel as an Api.
+ public static class Model {
+
+ private SoundTrigger.GenericSoundModel mGenericSoundModel;
+
+ /**
+ * @hide
+ */
+ Model(SoundTrigger.GenericSoundModel soundTriggerModel) {
+ mGenericSoundModel = soundTriggerModel;
+ }
+
+ /**
+ * Factory constructor to create a SoundModel instance for use with methods in this
+ * class.
+ */
+ public static Model create(UUID modelUuid, UUID vendorUuid, byte[] data) {
+ return new Model(new SoundTrigger.GenericSoundModel(modelUuid,
+ vendorUuid, data));
+ }
+
+ public UUID getModelUuid() {
+ return mGenericSoundModel.uuid;
+ }
+
+ public UUID getVendorUuid() {
+ return mGenericSoundModel.vendorUuid;
+ }
+
+ public byte[] getModelData() {
+ return mGenericSoundModel.data;
+ }
+
+ /**
+ * @hide
+ */
+ SoundTrigger.GenericSoundModel getGenericSoundModel() {
+ return mGenericSoundModel;
+ }
+ }
+
+
+ /**
+ * Default message type.
+ * @hide
+ */
+ public static final int FLAG_MESSAGE_TYPE_UNKNOWN = -1;
+ /**
+ * Contents of EXTRA_MESSAGE_TYPE extra for a RecognitionEvent.
+ * @hide
+ */
+ public static final int FLAG_MESSAGE_TYPE_RECOGNITION_EVENT = 0;
+ /**
+ * Contents of EXTRA_MESSAGE_TYPE extra for recognition error events.
+ * @hide
+ */
+ public static final int FLAG_MESSAGE_TYPE_RECOGNITION_ERROR = 1;
+ /**
+ * Contents of EXTRA_MESSAGE_TYPE extra for a recognition paused events.
+ * @hide
+ */
+ public static final int FLAG_MESSAGE_TYPE_RECOGNITION_PAUSED = 2;
+ /**
+ * Contents of EXTRA_MESSAGE_TYPE extra for recognition resumed events.
+ * @hide
+ */
+ public static final int FLAG_MESSAGE_TYPE_RECOGNITION_RESUMED = 3;
+
+ /**
+ * Extra key in the intent for the type of the message.
+ * @hide
+ */
+ public static final String EXTRA_MESSAGE_TYPE = "android.media.soundtrigger.MESSAGE_TYPE";
+ /**
+ * Extra key in the intent that holds the RecognitionEvent parcelable.
+ * @hide
+ */
+ public static final String EXTRA_RECOGNITION_EVENT = "android.media.soundtrigger.RECOGNITION_EVENT";
+ /**
+ * Extra key in the intent that holds the status in an error message.
+ * @hide
+ */
+ public static final String EXTRA_STATUS = "android.media.soundtrigger.STATUS";
+
+ /**
+ * Loads a given sound model into the sound trigger. Note the model will be unloaded if there is
+ * an error/the system service is restarted.
+ * @hide
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public int loadSoundModel(SoundModel soundModel) {
+ if (soundModel == null) {
+ return STATUS_ERROR;
+ }
+
+ try {
+ switch (soundModel.type) {
+ case SoundModel.TYPE_GENERIC_SOUND:
+ return mSoundTriggerService.loadGenericSoundModel(
+ (GenericSoundModel) soundModel);
+ case SoundModel.TYPE_KEYPHRASE:
+ return mSoundTriggerService.loadKeyphraseSoundModel(
+ (KeyphraseSoundModel) soundModel);
+ default:
+ Slog.e(TAG, "Unkown model type");
+ return STATUS_ERROR;
+ }
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Starts recognition on the given model id. All events from the model will be sent to the
+ * PendingIntent.
+ * @hide
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public int startRecognition(UUID soundModelId, PendingIntent callbackIntent,
+ RecognitionConfig config) {
+ if (soundModelId == null || callbackIntent == null || config == null) {
+ return STATUS_ERROR;
+ }
+ try {
+ return mSoundTriggerService.startRecognitionForIntent(new ParcelUuid(soundModelId),
+ callbackIntent, config);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Stops the given model's recognition.
+ * @hide
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public int stopRecognition(UUID soundModelId) {
+ if (soundModelId == null) {
+ return STATUS_ERROR;
+ }
+ try {
+ return mSoundTriggerService.stopRecognitionForIntent(new ParcelUuid(soundModelId));
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Removes the given model from memory. Will also stop any pending recognitions.
+ * @hide
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public int unloadSoundModel(UUID soundModelId) {
+ if (soundModelId == null) {
+ return STATUS_ERROR;
+ }
+ try {
+ return mSoundTriggerService.unloadSoundModel(
+ new ParcelUuid(soundModelId));
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns true if the given model has had detection started on it.
+ * @hide
+ */
+ @RequiresPermission(android.Manifest.permission.MANAGE_SOUND_TRIGGER)
+ public boolean isRecognitionActive(UUID soundModelId) {
+ if (soundModelId == null) {
+ return false;
+ }
+ try {
+ return mSoundTriggerService.isRecognitionActive(
+ new ParcelUuid(soundModelId));
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+}
diff --git a/android/media/tv/DvbDeviceInfo.java b/android/media/tv/DvbDeviceInfo.java
new file mode 100644
index 00000000..e07f3a64
--- /dev/null
+++ b/android/media/tv/DvbDeviceInfo.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+/**
+ * Simple container for information about DVB device.
+ * Not for third-party developers.
+ *
+ * @hide
+ */
+public final class DvbDeviceInfo implements Parcelable {
+ static final String TAG = "DvbDeviceInfo";
+
+ public static final Parcelable.Creator<DvbDeviceInfo> CREATOR =
+ new Parcelable.Creator<DvbDeviceInfo>() {
+ @Override
+ public DvbDeviceInfo createFromParcel(Parcel source) {
+ try {
+ return new DvbDeviceInfo(source);
+ } catch (Exception e) {
+ Log.e(TAG, "Exception creating DvbDeviceInfo from parcel", e);
+ return null;
+ }
+ }
+
+ @Override
+ public DvbDeviceInfo[] newArray(int size) {
+ return new DvbDeviceInfo[size];
+ }
+ };
+
+ private final int mAdapterId;
+ private final int mDeviceId;
+
+ private DvbDeviceInfo(Parcel source) {
+ mAdapterId = source.readInt();
+ mDeviceId = source.readInt();
+ }
+
+ /**
+ * Constructs a new {@link DvbDeviceInfo} with the given adapter ID and device ID.
+ */
+ public DvbDeviceInfo(int adapterId, int deviceId) {
+ mAdapterId = adapterId;
+ mDeviceId = deviceId;
+ }
+
+ /**
+ * Returns the adapter ID of DVB device, in terms of enumerating the DVB device adapters
+ * installed in the system. The adapter ID counts from zero.
+ */
+ public int getAdapterId() {
+ return mAdapterId;
+ }
+
+ /**
+ * Returns the device ID of DVB device, in terms of enumerating the DVB devices attached to
+ * the same device adapter. The device ID counts from zero.
+ */
+ public int getDeviceId() {
+ return mDeviceId;
+ }
+
+ // Parcelable
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mAdapterId);
+ dest.writeInt(mDeviceId);
+ }
+}
diff --git a/android/media/tv/ITvInputSessionWrapper.java b/android/media/tv/ITvInputSessionWrapper.java
new file mode 100644
index 00000000..df87e0f2
--- /dev/null
+++ b/android/media/tv/ITvInputSessionWrapper.java
@@ -0,0 +1,383 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.Nullable;
+import android.content.Context;
+import android.graphics.Rect;
+import android.media.PlaybackParams;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.util.Log;
+import android.view.InputChannel;
+import android.view.InputEvent;
+import android.view.InputEventReceiver;
+import android.view.Surface;
+
+import com.android.internal.os.HandlerCaller;
+import com.android.internal.os.SomeArgs;
+
+/**
+ * Implements the internal ITvInputSession interface to convert incoming calls on to it back to
+ * calls on the public TvInputSession interface, scheduling them on the main thread of the process.
+ *
+ * @hide
+ */
+public class ITvInputSessionWrapper extends ITvInputSession.Stub implements HandlerCaller.Callback {
+ private static final String TAG = "TvInputSessionWrapper";
+
+ private static final int EXECUTE_MESSAGE_TIMEOUT_SHORT_MILLIS = 50;
+ private static final int EXECUTE_MESSAGE_TUNE_TIMEOUT_MILLIS = 2000;
+ private static final int EXECUTE_MESSAGE_TIMEOUT_LONG_MILLIS = 5 * 1000;
+
+ private static final int DO_RELEASE = 1;
+ private static final int DO_SET_MAIN = 2;
+ private static final int DO_SET_SURFACE = 3;
+ private static final int DO_DISPATCH_SURFACE_CHANGED = 4;
+ private static final int DO_SET_STREAM_VOLUME = 5;
+ private static final int DO_TUNE = 6;
+ private static final int DO_SET_CAPTION_ENABLED = 7;
+ private static final int DO_SELECT_TRACK = 8;
+ private static final int DO_APP_PRIVATE_COMMAND = 9;
+ private static final int DO_CREATE_OVERLAY_VIEW = 10;
+ private static final int DO_RELAYOUT_OVERLAY_VIEW = 11;
+ private static final int DO_REMOVE_OVERLAY_VIEW = 12;
+ private static final int DO_UNBLOCK_CONTENT = 13;
+ private static final int DO_TIME_SHIFT_PLAY = 14;
+ private static final int DO_TIME_SHIFT_PAUSE = 15;
+ private static final int DO_TIME_SHIFT_RESUME = 16;
+ private static final int DO_TIME_SHIFT_SEEK_TO = 17;
+ private static final int DO_TIME_SHIFT_SET_PLAYBACK_PARAMS = 18;
+ private static final int DO_TIME_SHIFT_ENABLE_POSITION_TRACKING = 19;
+ private static final int DO_START_RECORDING = 20;
+ private static final int DO_STOP_RECORDING = 21;
+
+ private final boolean mIsRecordingSession;
+ private final HandlerCaller mCaller;
+
+ private TvInputService.Session mTvInputSessionImpl;
+ private TvInputService.RecordingSession mTvInputRecordingSessionImpl;
+
+ private InputChannel mChannel;
+ private TvInputEventReceiver mReceiver;
+
+ public ITvInputSessionWrapper(Context context, TvInputService.Session sessionImpl,
+ InputChannel channel) {
+ mIsRecordingSession = false;
+ mCaller = new HandlerCaller(context, null, this, true /* asyncHandler */);
+ mTvInputSessionImpl = sessionImpl;
+ mChannel = channel;
+ if (channel != null) {
+ mReceiver = new TvInputEventReceiver(channel, context.getMainLooper());
+ }
+ }
+
+ // For the recording session
+ public ITvInputSessionWrapper(Context context,
+ TvInputService.RecordingSession recordingSessionImpl) {
+ mIsRecordingSession = true;
+ mCaller = new HandlerCaller(context, null, this, true /* asyncHandler */);
+ mTvInputRecordingSessionImpl = recordingSessionImpl;
+ }
+
+ @Override
+ public void executeMessage(Message msg) {
+ if ((mIsRecordingSession && mTvInputRecordingSessionImpl == null)
+ || (!mIsRecordingSession && mTvInputSessionImpl == null)) {
+ return;
+ }
+
+ long startTime = System.nanoTime();
+ switch (msg.what) {
+ case DO_RELEASE: {
+ if (mIsRecordingSession) {
+ mTvInputRecordingSessionImpl.release();
+ mTvInputRecordingSessionImpl = null;
+ } else {
+ mTvInputSessionImpl.release();
+ mTvInputSessionImpl = null;
+ if (mReceiver != null) {
+ mReceiver.dispose();
+ mReceiver = null;
+ }
+ if (mChannel != null) {
+ mChannel.dispose();
+ mChannel = null;
+ }
+ }
+ break;
+ }
+ case DO_SET_MAIN: {
+ mTvInputSessionImpl.setMain((Boolean) msg.obj);
+ break;
+ }
+ case DO_SET_SURFACE: {
+ mTvInputSessionImpl.setSurface((Surface) msg.obj);
+ break;
+ }
+ case DO_DISPATCH_SURFACE_CHANGED: {
+ SomeArgs args = (SomeArgs) msg.obj;
+ mTvInputSessionImpl.dispatchSurfaceChanged(args.argi1, args.argi2, args.argi3);
+ args.recycle();
+ break;
+ }
+ case DO_SET_STREAM_VOLUME: {
+ mTvInputSessionImpl.setStreamVolume((Float) msg.obj);
+ break;
+ }
+ case DO_TUNE: {
+ SomeArgs args = (SomeArgs) msg.obj;
+ if (mIsRecordingSession) {
+ mTvInputRecordingSessionImpl.tune((Uri) args.arg1, (Bundle) args.arg2);
+ } else {
+ mTvInputSessionImpl.tune((Uri) args.arg1, (Bundle) args.arg2);
+ }
+ args.recycle();
+ break;
+ }
+ case DO_SET_CAPTION_ENABLED: {
+ mTvInputSessionImpl.setCaptionEnabled((Boolean) msg.obj);
+ break;
+ }
+ case DO_SELECT_TRACK: {
+ SomeArgs args = (SomeArgs) msg.obj;
+ mTvInputSessionImpl.selectTrack((Integer) args.arg1, (String) args.arg2);
+ args.recycle();
+ break;
+ }
+ case DO_APP_PRIVATE_COMMAND: {
+ SomeArgs args = (SomeArgs) msg.obj;
+ if (mIsRecordingSession) {
+ mTvInputRecordingSessionImpl.appPrivateCommand(
+ (String) args.arg1, (Bundle) args.arg2);
+ } else {
+ mTvInputSessionImpl.appPrivateCommand((String) args.arg1, (Bundle) args.arg2);
+ }
+ args.recycle();
+ break;
+ }
+ case DO_CREATE_OVERLAY_VIEW: {
+ SomeArgs args = (SomeArgs) msg.obj;
+ mTvInputSessionImpl.createOverlayView((IBinder) args.arg1, (Rect) args.arg2);
+ args.recycle();
+ break;
+ }
+ case DO_RELAYOUT_OVERLAY_VIEW: {
+ mTvInputSessionImpl.relayoutOverlayView((Rect) msg.obj);
+ break;
+ }
+ case DO_REMOVE_OVERLAY_VIEW: {
+ mTvInputSessionImpl.removeOverlayView(true);
+ break;
+ }
+ case DO_UNBLOCK_CONTENT: {
+ mTvInputSessionImpl.unblockContent((String) msg.obj);
+ break;
+ }
+ case DO_TIME_SHIFT_PLAY: {
+ mTvInputSessionImpl.timeShiftPlay((Uri) msg.obj);
+ break;
+ }
+ case DO_TIME_SHIFT_PAUSE: {
+ mTvInputSessionImpl.timeShiftPause();
+ break;
+ }
+ case DO_TIME_SHIFT_RESUME: {
+ mTvInputSessionImpl.timeShiftResume();
+ break;
+ }
+ case DO_TIME_SHIFT_SEEK_TO: {
+ mTvInputSessionImpl.timeShiftSeekTo((Long) msg.obj);
+ break;
+ }
+ case DO_TIME_SHIFT_SET_PLAYBACK_PARAMS: {
+ mTvInputSessionImpl.timeShiftSetPlaybackParams((PlaybackParams) msg.obj);
+ break;
+ }
+ case DO_TIME_SHIFT_ENABLE_POSITION_TRACKING: {
+ mTvInputSessionImpl.timeShiftEnablePositionTracking((Boolean) msg.obj);
+ break;
+ }
+ case DO_START_RECORDING: {
+ mTvInputRecordingSessionImpl.startRecording((Uri) msg.obj);
+ break;
+ }
+ case DO_STOP_RECORDING: {
+ mTvInputRecordingSessionImpl.stopRecording();
+ break;
+ }
+ default: {
+ Log.w(TAG, "Unhandled message code: " + msg.what);
+ break;
+ }
+ }
+ long durationMs = (System.nanoTime() - startTime) / (1000 * 1000);
+ if (durationMs > EXECUTE_MESSAGE_TIMEOUT_SHORT_MILLIS) {
+ Log.w(TAG, "Handling message (" + msg.what + ") took too long time (duration="
+ + durationMs + "ms)");
+ if (msg.what == DO_TUNE && durationMs > EXECUTE_MESSAGE_TUNE_TIMEOUT_MILLIS) {
+ throw new RuntimeException("Too much time to handle tune request. (" + durationMs
+ + "ms > " + EXECUTE_MESSAGE_TUNE_TIMEOUT_MILLIS + "ms) "
+ + "Consider handling the tune request in a separate thread.");
+ }
+ if (durationMs > EXECUTE_MESSAGE_TIMEOUT_LONG_MILLIS) {
+ throw new RuntimeException("Too much time to handle a request. (type=" + msg.what +
+ ", " + durationMs + "ms > " + EXECUTE_MESSAGE_TIMEOUT_LONG_MILLIS + "ms).");
+ }
+ }
+ }
+
+ @Override
+ public void release() {
+ if (!mIsRecordingSession) {
+ mTvInputSessionImpl.scheduleOverlayViewCleanup();
+ }
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_RELEASE));
+ }
+
+ @Override
+ public void setMain(boolean isMain) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_SET_MAIN, isMain));
+ }
+
+ @Override
+ public void setSurface(Surface surface) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_SET_SURFACE, surface));
+ }
+
+ @Override
+ public void dispatchSurfaceChanged(int format, int width, int height) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageIIII(DO_DISPATCH_SURFACE_CHANGED,
+ format, width, height, 0));
+ }
+
+ @Override
+ public final void setVolume(float volume) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_SET_STREAM_VOLUME, volume));
+ }
+
+ @Override
+ public void tune(Uri channelUri, Bundle params) {
+ // Clear the pending tune requests.
+ mCaller.removeMessages(DO_TUNE);
+ mCaller.executeOrSendMessage(mCaller.obtainMessageOO(DO_TUNE, channelUri, params));
+ }
+
+ @Override
+ public void setCaptionEnabled(boolean enabled) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_SET_CAPTION_ENABLED, enabled));
+ }
+
+ @Override
+ public void selectTrack(int type, String trackId) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageOO(DO_SELECT_TRACK, type, trackId));
+ }
+
+ @Override
+ public void appPrivateCommand(String action, Bundle data) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageOO(DO_APP_PRIVATE_COMMAND, action,
+ data));
+ }
+
+ @Override
+ public void createOverlayView(IBinder windowToken, Rect frame) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageOO(DO_CREATE_OVERLAY_VIEW, windowToken,
+ frame));
+ }
+
+ @Override
+ public void relayoutOverlayView(Rect frame) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_RELAYOUT_OVERLAY_VIEW, frame));
+ }
+
+ @Override
+ public void removeOverlayView() {
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_REMOVE_OVERLAY_VIEW));
+ }
+
+ @Override
+ public void unblockContent(String unblockedRating) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(
+ DO_UNBLOCK_CONTENT, unblockedRating));
+ }
+
+ @Override
+ public void timeShiftPlay(Uri recordedProgramUri) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(
+ DO_TIME_SHIFT_PLAY, recordedProgramUri));
+ }
+
+ @Override
+ public void timeShiftPause() {
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_TIME_SHIFT_PAUSE));
+ }
+
+ @Override
+ public void timeShiftResume() {
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_TIME_SHIFT_RESUME));
+ }
+
+ @Override
+ public void timeShiftSeekTo(long timeMs) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_TIME_SHIFT_SEEK_TO, timeMs));
+ }
+
+ @Override
+ public void timeShiftSetPlaybackParams(PlaybackParams params) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_TIME_SHIFT_SET_PLAYBACK_PARAMS,
+ params));
+ }
+
+ @Override
+ public void timeShiftEnablePositionTracking(boolean enable) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(
+ DO_TIME_SHIFT_ENABLE_POSITION_TRACKING, enable));
+ }
+
+ @Override
+ public void startRecording(@Nullable Uri programUri) {
+ mCaller.executeOrSendMessage(mCaller.obtainMessageO(DO_START_RECORDING, programUri));
+ }
+
+ @Override
+ public void stopRecording() {
+ mCaller.executeOrSendMessage(mCaller.obtainMessage(DO_STOP_RECORDING));
+ }
+
+ private final class TvInputEventReceiver extends InputEventReceiver {
+ public TvInputEventReceiver(InputChannel inputChannel, Looper looper) {
+ super(inputChannel, looper);
+ }
+
+ @Override
+ public void onInputEvent(InputEvent event, int displayId) {
+ if (mTvInputSessionImpl == null) {
+ // The session has been finished.
+ finishInputEvent(event, false);
+ return;
+ }
+
+ int handled = mTvInputSessionImpl.dispatchInputEvent(event, this);
+ if (handled != TvInputManager.Session.DISPATCH_IN_PROGRESS) {
+ finishInputEvent(event, handled == TvInputManager.Session.DISPATCH_HANDLED);
+ }
+ }
+ }
+}
diff --git a/android/media/tv/TvContentRating.java b/android/media/tv/TvContentRating.java
new file mode 100644
index 00000000..6197c707
--- /dev/null
+++ b/android/media/tv/TvContentRating.java
@@ -0,0 +1,983 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.NonNull;
+import android.annotation.SystemApi;
+import android.text.TextUtils;
+
+import com.android.internal.util.Preconditions;
+
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * A class representing a TV content rating. When a TV input service inserts the content rating
+ * information on a program into the database, this class can be used to generate the formatted
+ * string for
+ * {@link TvContract.Programs#COLUMN_CONTENT_RATING TvContract.Programs.COLUMN_CONTENT_RATING}.
+ * To create a {@code TvContentRating} object, use the
+ * {@link #createRating TvContentRating.createRating} method with valid rating system string
+ * constants.
+ *
+ * <p>It is possible for an application to define its own content rating system by supplying a
+ * content rating system definition XML resource (see example below) and declaring a broadcast
+ * receiver that filters {@link TvInputManager#ACTION_QUERY_CONTENT_RATING_SYSTEMS} in its manifest.
+ *
+ * <h3> Example: Rating system definition for the TV Parental Guidelines</h3>
+ * The following XML example shows how the TV Parental Guidelines in the United States can be
+ * defined:
+ * <p><pre class="prettyprint">
+ * {@literal
+ * <rating-system-definitions xmlns:android="http://schemas.android.com/apk/res/android"
+ * android:versionCode="1">
+ * <rating-system-definition android:name="US_TV"
+ * android:country="US"
+ * android:description="@string/description_us_tv">
+ * <sub-rating-definition android:name="US_TV_D"
+ * android:title="D"
+ * android:description="@string/description_us_tv_d" />
+ * <sub-rating-definition android:name="US_TV_L"
+ * android:title="L"
+ * android:description="@string/description_us_tv_l" />
+ * <sub-rating-definition android:name="US_TV_S"
+ * android:title="S"
+ * android:description="@string/description_us_tv_s" />
+ * <sub-rating-definition android:name="US_TV_V"
+ * android:title="V"
+ * android:description="@string/description_us_tv_v" />
+ * <sub-rating-definition android:name="US_TV_FV"
+ * android:title="FV"
+ * android:description="@string/description_us_tv_fv" />
+ *
+ * <rating-definition android:name="US_TV_Y"
+ * android:title="TV-Y"
+ * android:description="@string/description_us_tv_y"
+ * android:icon="@drawable/icon_us_tv_y"
+ * android:contentAgeHint="0" />
+ * <rating-definition android:name="US_TV_Y7"
+ * android:title="TV-Y7"
+ * android:description="@string/description_us_tv_y7"
+ * android:icon="@drawable/icon_us_tv_y7"
+ * android:contentAgeHint="7">
+ * <sub-rating android:name="US_TV_FV" />
+ * </rating-definition>
+ * <rating-definition android:name="US_TV_G"
+ * android:title="TV-G"
+ * android:description="@string/description_us_tv_g"
+ * android:icon="@drawable/icon_us_tv_g"
+ * android:contentAgeHint="0" />
+ * <rating-definition android:name="US_TV_PG"
+ * android:title="TV-PG"
+ * android:description="@string/description_us_tv_pg"
+ * android:icon="@drawable/icon_us_tv_pg"
+ * android:contentAgeHint="14">
+ * <sub-rating android:name="US_TV_D" />
+ * <sub-rating android:name="US_TV_L" />
+ * <sub-rating android:name="US_TV_S" />
+ * <sub-rating android:name="US_TV_V" />
+ * </rating-definition>
+ * <rating-definition android:name="US_TV_14"
+ * android:title="TV-14"
+ * android:description="@string/description_us_tv_14"
+ * android:icon="@drawable/icon_us_tv_14"
+ * android:contentAgeHint="14">
+ * <sub-rating android:name="US_TV_D" />
+ * <sub-rating android:name="US_TV_L" />
+ * <sub-rating android:name="US_TV_S" />
+ * <sub-rating android:name="US_TV_V" />
+ * </rating-definition>
+ * <rating-definition android:name="US_TV_MA"
+ * android:title="TV-MA"
+ * android:description="@string/description_us_tv_ma"
+ * android:icon="@drawable/icon_us_tv_ma"
+ * android:contentAgeHint="17">
+ * <sub-rating android:name="US_TV_L" />
+ * <sub-rating android:name="US_TV_S" />
+ * <sub-rating android:name="US_TV_V" />
+ * </rating-definition>
+ * <rating-order>
+ * <rating android:name="US_TV_Y" />
+ * <rating android:name="US_TV_Y7" />
+ * </rating-order>
+ * <rating-order>
+ * <rating android:name="US_TV_G" />
+ * <rating android:name="US_TV_PG" />
+ * <rating android:name="US_TV_14" />
+ * <rating android:name="US_TV_MA" />
+ * </rating-order>
+ * </rating-system-definition>
+ * </rating-system-definitions>}</pre>
+ *
+ * <h3>System defined rating strings</h3>
+ * The following strings are defined by the system to provide a standard way to create
+ * {@code TvContentRating} objects.
+ *
+ * <p>For example, to create an object that represents TV-PG rating with suggestive dialogue and
+ * coarse language from the TV Parental Guidelines in the United States, one can use the following
+ * code snippet:
+ *
+ * <pre>
+ * TvContentRating rating = TvContentRating.createRating(
+ * "com.android.tv",
+ * "US_TV",
+ * "US_TV_PG",
+ * "US_TV_D", "US_TV_L");
+ * </pre>
+ * <h4>System defined string for domains</h4>
+ * <table>
+ * <tr>
+ * <th>Constant Value</th>
+ * <th>Description</th>
+ * </tr>
+ * <tr>
+ * <td>com.android.tv</td>
+ * <td>Used for creating system defined content ratings</td>
+ * </tr>
+ * </table>
+ *
+ * <h4>System defined strings for rating systems</h4>
+ * <table>
+ * <tr>
+ * <th>Constant Value</th>
+ * <th>Description</th>
+ * </tr>
+ * <tr>
+ * <td>AR_TV</td>
+ * <td>TV content rating system for Argentina</td>
+ * </tr>
+ * <tr>
+ * <td>AU_TV</td>
+ * <td>TV content rating system for Australia</td>
+ * </tr>
+ * <tr>
+ * <td>BR_TV</td>
+ * <td>TV content rating system for Brazil</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN</td>
+ * <td>TV content rating system for Canada (English)</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR</td>
+ * <td>TV content rating system for Canada (French)</td>
+ * </tr>
+ * <tr>
+ * <td>DVB</td>
+ * <td>DVB content rating system</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB</td>
+ * <td>DVB content rating system for Spain</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB</td>
+ * <td>DVB content rating system for France</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB</td>
+ * <td>ISDB content rating system</td>
+ * </tr>
+ * <tr>
+ * <td>KR_TV</td>
+ * <td>TV content rating system for South Korea</td>
+ * </tr>
+ * <tr>
+ * <td>SG_TV</td>
+ * <td>TV content rating system for Singapore</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV</td>
+ * <td>Movie content rating system for the United States</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV</td>
+ * <td>TV content rating system for the United States</td>
+ * </tr>
+ * </table>
+ *
+ * <h4>System defined strings for ratings</h4>
+ * <table>
+ * <tr>
+ * <th>Rating System</th>
+ * <th>Constant Value</th>
+ * <th>Description</th>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="4">AR_TV</td>
+ * <td>AR_TV_ATP</td>
+ * <td>Suitable for all audiences. Programs may contain mild violence, language and mature
+ * situations</td>
+ * </tr>
+ * <tr>
+ * <td>AR_TV_SAM_13</td>
+ * <td>Suitable for ages 13 and up. Programs may contain mild to moderate language and mild
+ * violence and sexual references</td>
+ * </tr>
+ * <tr>
+ * <td>AR_TV_SAM_16</td>
+ * <td>Suitable for ages 16 and up. Programs may contain more intensive violence and coarse
+ * language, partial nudity and moderate sexual references</td>
+ * </tr>
+ * <tr>
+ * <td>AR_TV_SAM_18</td>
+ * <td>Suitable for mature audiences only. Programs contain strong violence, coarse language
+ * and explicit sexual references</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="8">AU_TV</td>
+ * <td>AU_TV_P</td>
+ * <td>Recommended for younger children aged between 2 and 11 years</td>
+ * </tr>
+ * <tr>
+ * <td>AU_TV_C</td>
+ * <td>Recommended for older children aged between 5 and 14 years</td>
+ * </tr>
+ * <tr>
+ * <td>AU_TV_G</td>
+ * <td>Recommended for all ages</td>
+ * </tr>
+ * <tr>
+ * <td>AU_TV_PG</td>
+ * <td>Parental guidance is recommended for young viewers under 15</td>
+ * </tr>
+ * <tr>
+ * <td>AU_TV_M</td>
+ * <td>Recommended for mature audiences aged 15 years and over</td>
+ * </tr>
+ * <tr>
+ * <td>AU_TV_MA</td>
+ * <td>Not suitable for children and teens under 15, due to sexual descriptions, course
+ * language, adult themes or drug use</td>
+ * </tr>
+ * <tr>
+ * <td>AU_TV_AV</td>
+ * <td>Not suitable for children and teens under 15. This category is used specifically for
+ * violent programs</td>
+ * </tr>
+ * <tr>
+ * <td>AU_TV_R</td>
+ * <td>Not for children under 18. Content may include graphic violence, sexual situations,
+ * coarse language and explicit drug use</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="6">BR_TV</td>
+ * <td>BR_TV_L</td>
+ * <td>Content is suitable for all audiences</td>
+ * </tr>
+ * <tr>
+ * <td>BR_TV_10</td>
+ * <td>Content suitable for viewers over the age of 10</td>
+ * </tr>
+ * <tr>
+ * <td>BR_TV_12</td>
+ * <td>Content suitable for viewers over the age of 12</td>
+ * </tr>
+ * <tr>
+ * <td>BR_TV_14</td>
+ * <td>Content suitable for viewers over the age of 14</td>
+ * </tr>
+ * <tr>
+ * <td>BR_TV_16</td>
+ * <td>Content suitable for viewers over the age of 16</td>
+ * </tr>
+ * <tr>
+ * <td>BR_TV_18</td>
+ * <td>Content suitable for viewers over the age of 18</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="7">CA_TV_EN</td>
+ * <td>CA_TV_EN_EXEMPT</td>
+ * <td>Exempt from ratings</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_C</td>
+ * <td>Suitable for children ages 2&#8211;7</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_C8</td>
+ * <td>Suitable for children ages 8 and older</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_G</td>
+ * <td>Suitable for the entire family</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_PG</td>
+ * <td>May contain moderate violence, profanity, nudity, and sexual references</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_14</td>
+ * <td>Intended for viewers ages 14 and older</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_EN_18</td>
+ * <td>Intended for viewers ages 18 and older</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="6">CA_TV_FR</td>
+ * <td>CA_TV_FR_E</td>
+ * <td>Exempt from ratings</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_G</td>
+ * <td>Appropriate for all ages</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_8</td>
+ * <td>Appropriate for children 8</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_13</td>
+ * <td>Suitable for children 13</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_16</td>
+ * <td>Recommended for children over the age of 16</td>
+ * </tr>
+ * <tr>
+ * <td>CA_TV_FR_18</td>
+ * <td>Only to be viewed by adults</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="15">DVB</td>
+ * <td>DVB_4</td>
+ * <td>Recommended for ages 4 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_5</td>
+ * <td>Recommended for ages 5 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_6</td>
+ * <td>Recommended for ages 6 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_7</td>
+ * <td>Recommended for ages 7 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_8</td>
+ * <td>Recommended for ages 8 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_9</td>
+ * <td>Recommended for ages 9 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_10</td>
+ * <td>Recommended for ages 10 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_11</td>
+ * <td>Recommended for ages 11 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_12</td>
+ * <td>Recommended for ages 12 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_13</td>
+ * <td>Recommended for ages 13 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_14</td>
+ * <td>Recommended for ages 14 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_15</td>
+ * <td>Recommended for ages 15 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_16</td>
+ * <td>Recommended for ages 16 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_17</td>
+ * <td>Recommended for ages 17 and over</td>
+ * </tr>
+ * <tr>
+ * <td>DVB_18</td>
+ * <td>Recommended for ages 18 and over</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="18">ES_DVB</td>
+ * <td>ES_DVB_ALL</td>
+ * <td>Recommended for all ages</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_C</td>
+ * <td>Recommended for children</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_X</td>
+ * <td>Recommended for adults</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_4</td>
+ * <td>Recommended for ages 4 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_5</td>
+ * <td>Recommended for ages 5 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_6</td>
+ * <td>Recommended for ages 6 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_7</td>
+ * <td>Recommended for ages 7 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_8</td>
+ * <td>Recommended for ages 8 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_9</td>
+ * <td>Recommended for ages 9 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_10</td>
+ * <td>Recommended for ages 10 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_11</td>
+ * <td>Recommended for ages 11 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_12</td>
+ * <td>Recommended for ages 12 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_13</td>
+ * <td>Recommended for ages 13 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_14</td>
+ * <td>Recommended for ages 14 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_15</td>
+ * <td>Recommended for ages 15 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_16</td>
+ * <td>Recommended for ages 16 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_17</td>
+ * <td>Recommended for ages 17 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ES_DVB_18</td>
+ * <td>Recommended for ages 18 and over</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="16">FR_DVB</td>
+ * <td>FR_DVB_U</td>
+ * <td>Recommended for all ages</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_4</td>
+ * <td>Recommended for ages 4 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_5</td>
+ * <td>Recommended for ages 5 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_6</td>
+ * <td>Recommended for ages 6 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_7</td>
+ * <td>Recommended for ages 7 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_8</td>
+ * <td>Recommended for ages 8 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_9</td>
+ * <td>Recommended for ages 9 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_10</td>
+ * <td>Recommended for ages 10 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_11</td>
+ * <td>Recommended for ages 11 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_12</td>
+ * <td>Recommended for ages 12 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_13</td>
+ * <td>Recommended for ages 13 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_14</td>
+ * <td>Recommended for ages 14 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_15</td>
+ * <td>Recommended for ages 15 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_16</td>
+ * <td>Recommended for ages 16 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_17</td>
+ * <td>Recommended for ages 17 and over</td>
+ * </tr>
+ * <tr>
+ * <td>FR_DVB_18</td>
+ * <td>Recommended for ages 18 and over</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="17">ISDB</td>
+ * <td>ISDB_4</td>
+ * <td>Recommended for ages 4 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_5</td>
+ * <td>Recommended for ages 5 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_6</td>
+ * <td>Recommended for ages 6 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_7</td>
+ * <td>Recommended for ages 7 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_8</td>
+ * <td>Recommended for ages 8 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_9</td>
+ * <td>Recommended for ages 9 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_10</td>
+ * <td>Recommended for ages 10 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_11</td>
+ * <td>Recommended for ages 11 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_12</td>
+ * <td>Recommended for ages 12 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_13</td>
+ * <td>Recommended for ages 13 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_14</td>
+ * <td>Recommended for ages 14 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_15</td>
+ * <td>Recommended for ages 15 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_16</td>
+ * <td>Recommended for ages 16 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_17</td>
+ * <td>Recommended for ages 17 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_18</td>
+ * <td>Recommended for ages 18 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_19</td>
+ * <td>Recommended for ages 19 and over</td>
+ * </tr>
+ * <tr>
+ * <td>ISDB_20</td>
+ * <td>Recommended for ages 20 and over</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="5">KR_TV</td>
+ * <td>KR_TV_ALL</td>
+ * <td>Appropriate for all ages</td>
+ * </tr>
+ * <tr>
+ * <td>KR_TV_7</td>
+ * <td>May contain material inappropriate for children younger than 7, and parental
+ * discretion should be used</td>
+ * </tr>
+ * <tr>
+ * <td>KR_TV_12</td>
+ * <td>May deemed inappropriate for those younger than 12, and parental discretion should be
+ * used</td>
+ * </tr>
+ * <tr>
+ * <td>KR_TV_15</td>
+ * <td>May be inappropriate for children under 15, and that parental discretion should be
+ * used</td>
+ * </tr>
+ * <tr>
+ * <td>KR_TV_19</td>
+ * <td>For adults only</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="6">SG_TV</td>
+ * <td>SG_TV_G</td>
+ * <td>Suitable for all ages</td>
+ * </tr>
+ * <tr>
+ * <td>SG_TV_PG</td>
+ * <td>Suitable for all but parents should guide their young</td>
+ * </tr>
+ * <tr>
+ * <td>SG_TV_PG13</td>
+ * <td>Suitable for persons aged 13 and above but parental guidance is advised for children
+ * below 13</td>
+ * </tr>
+ * <tr>
+ * <td>SG_TV_NC16</td>
+ * <td>Suitable for persons aged 16 and above</td>
+ * </tr>
+ * <tr>
+ * <td>SG_TV_M18</td>
+ * <td>Suitable for persons aged 18 and above</td>
+ * </tr>
+ * <tr>
+ * <td>SG_TV_R21</td>
+ * <td>Suitable for adults aged 21 and above</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="5">US_MV</td>
+ * <td>US_MV_G</td>
+ * <td>General audiences</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV_PG</td>
+ * <td>Parental guidance suggested</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV_PG13</td>
+ * <td>Parents strongly cautioned</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV_R</td>
+ * <td>Restricted, under 17 requires accompanying parent or adult guardian</td>
+ * </tr>
+ * <tr>
+ * <td>US_MV_NC17</td>
+ * <td>No one 17 and under admitted</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="6">US_TV</td>
+ * <td>US_TV_Y</td>
+ * <td>This program is designed to be appropriate for all children</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_Y7</td>
+ * <td>This program is designed for children age 7 and above</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_G</td>
+ * <td>Most parents would find this program suitable for all ages</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_PG</td>
+ * <td>This program contains material that parents may find unsuitable for younger children
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_14</td>
+ * <td>This program contains some material that many parents would find unsuitable for
+ * children under 14 years of age</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_MA</td>
+ * <td>This program is specifically designed to be viewed by adults and therefore may be
+ * unsuitable for children under 17</td>
+ * </tr>
+ * </table>
+ *
+ * <h4>System defined strings for sub-ratings</h4>
+ * <table>
+ * <tr>
+ * <th>Rating System</th>
+ * <th>Constant Value</th>
+ * <th>Description</th>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="3">BR_TV</td>
+ * <td>BR_TV_D</td>
+ * <td>Drugs<br/>Applicable to BR_TV_L, BR_TV_10, BR_TV_12, BR_TV_14, BR_TV_16, and BR_TV_18
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>BR_TV_S</td>
+ * <td>Sex<br/>Applicable to BR_TV_L, BR_TV_10, BR_TV_12, BR_TV_14, BR_TV_16, and BR_TV_18
+ * </td>
+ * </tr>
+ * <tr>
+ * <td>BR_TV_V</td>
+ * <td>Violence<br/>Applicable to BR_TV_L, BR_TV_10, BR_TV_12, BR_TV_14, BR_TV_16, and
+ * BR_TV_18</td>
+ * </tr>
+ * <tr>
+ * <td valign="top" rowspan="5">US_TV</td>
+ * <td>US_TV_D</td>
+ * <td>Suggestive dialogue (Usually means talks about sex)<br/>Applicable to US_TV_PG, and
+ * US_TV_14</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_L</td>
+ * <td>Coarse language<br/>Applicable to US_TV_PG, US_TV_14, and US_TV_MA</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_S</td>
+ * <td>Sexual content<br/>Applicable to US_TV_PG, US_TV_14, and US_TV_MA</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_V</td>
+ * <td>Violence<br/>Applicable to US_TV_PG, US_TV_14, and US_TV_MA</td>
+ * </tr>
+ * <tr>
+ * <td>US_TV_FV</td>
+ * <td>Fantasy violence (Children's programming only)<br/>Applicable to US_TV_Y7</td>
+ * </tr>
+ * </table>
+ */
+public final class TvContentRating {
+ // TODO: Consider to use other DELIMITER. In some countries such as India may use this delimiter
+ // in the main ratings.
+ private static final String DELIMITER = "/";
+
+ private final String mDomain;
+ private final String mRatingSystem;
+ private final String mRating;
+ private final String[] mSubRatings;
+ private final int mHashCode;
+
+ /**
+ * Rating constant denoting unrated content. Used to handle the case where the content rating
+ * information is missing.
+ *
+ * <p>TV input services can call {@link TvInputManager#isRatingBlocked} with this constant to
+ * determine whether they should block unrated content. The subsequent call to
+ * {@link TvInputService.Session#notifyContentBlocked} with the same constant notifies
+ * applications that the current program content is blocked by parental controls.
+ */
+ public static final TvContentRating UNRATED = new TvContentRating("null", "null", "null", null);
+
+ /**
+ * Creates a {@code TvContentRating} object with predefined content rating strings.
+ *
+ * @param domain The domain string. For example, "com.android.tv".
+ * @param ratingSystem The rating system string. For example, "US_TV".
+ * @param rating The content rating string. For example, "US_TV_PG".
+ * @param subRatings The sub-rating strings. For example, "US_TV_D" and "US_TV_L".
+ * @return A {@code TvContentRating} object.
+ * @throws IllegalArgumentException If {@code domain}, {@code ratingSystem} or {@code rating} is
+ * {@code null}.
+ */
+ public static TvContentRating createRating(String domain, String ratingSystem,
+ String rating, String... subRatings) {
+ if (TextUtils.isEmpty(domain)) {
+ throw new IllegalArgumentException("domain cannot be empty");
+ }
+ if (TextUtils.isEmpty(ratingSystem)) {
+ throw new IllegalArgumentException("ratingSystem cannot be empty");
+ }
+ if (TextUtils.isEmpty(rating)) {
+ throw new IllegalArgumentException("rating cannot be empty");
+ }
+ return new TvContentRating(domain, ratingSystem, rating, subRatings);
+ }
+
+ /**
+ * Recovers a {@code TvContentRating} object from the string that was previously created from
+ * {@link #flattenToString}.
+ *
+ * @param ratingString The string returned by {@link #flattenToString}.
+ * @return the {@code TvContentRating} object containing the domain, rating system, rating and
+ * sub-ratings information encoded in {@code ratingString}.
+ * @see #flattenToString
+ */
+ public static TvContentRating unflattenFromString(String ratingString) {
+ if (TextUtils.isEmpty(ratingString)) {
+ throw new IllegalArgumentException("ratingString cannot be empty");
+ }
+ String[] strs = ratingString.split(DELIMITER);
+ if (strs.length < 3) {
+ throw new IllegalArgumentException("Invalid rating string: " + ratingString);
+ }
+ if (strs.length > 3) {
+ String[] subRatings = new String[strs.length - 3];
+ System.arraycopy(strs, 3, subRatings, 0, subRatings.length);
+ return new TvContentRating(strs[0], strs[1], strs[2], subRatings);
+ }
+ return new TvContentRating(strs[0], strs[1], strs[2], null);
+ }
+
+ /**
+ * Constructs a TvContentRating object from a given rating and sub-rating constants.
+ *
+ * @param domain The string for domain of the content rating system such as "com.android.tv".
+ * @param ratingSystem The rating system string such as "US_TV".
+ * @param rating The content rating string such as "US_TV_PG".
+ * @param subRatings The sub-rating strings such as "US_TV_D" and "US_TV_L".
+ */
+ private TvContentRating(
+ String domain, String ratingSystem, String rating, String[] subRatings) {
+ mDomain = domain;
+ mRatingSystem = ratingSystem;
+ mRating = rating;
+ if (subRatings == null || subRatings.length == 0) {
+ mSubRatings = null;
+ } else {
+ Arrays.sort(subRatings);
+ mSubRatings = subRatings;
+ }
+ mHashCode = 31 * Objects.hash(mDomain, mRating) + Arrays.hashCode(mSubRatings);
+ }
+
+ /**
+ * Returns the domain of this {@code TvContentRating} object.
+ */
+ public String getDomain() {
+ return mDomain;
+ }
+
+ /**
+ * Returns the rating system of this {@code TvContentRating} object.
+ */
+ public String getRatingSystem() {
+ return mRatingSystem;
+ }
+
+ /**
+ * Returns the main rating of this {@code TvContentRating} object.
+ */
+ public String getMainRating() {
+ return mRating;
+ }
+
+ /**
+ * Returns the unmodifiable sub-rating string {@link List} of this {@code TvContentRating}
+ * object.
+ */
+ public List<String> getSubRatings() {
+ if (mSubRatings == null) {
+ return null;
+ }
+ return Collections.unmodifiableList(Arrays.asList(mSubRatings));
+ }
+
+ /**
+ * Returns a string that unambiguously describes the rating information contained in a
+ * {@code TvContentRating} object. One can later recover the object from this string through
+ * {@link #unflattenFromString}.
+ *
+ * @return a string containing the rating information, which can later be stored in the
+ * database.
+ * @see #unflattenFromString
+ */
+ public String flattenToString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append(mDomain);
+ builder.append(DELIMITER);
+ builder.append(mRatingSystem);
+ builder.append(DELIMITER);
+ builder.append(mRating);
+ if (mSubRatings != null) {
+ for (String subRating : mSubRatings) {
+ builder.append(DELIMITER);
+ builder.append(subRating);
+ }
+ }
+ return builder.toString();
+ }
+
+ /**
+ * Returns {@code true} if this rating has the same main rating as the specified rating and when
+ * this rating's sub-ratings contain the other's.
+ *
+ * <p>For example, a {@code TvContentRating} object that represents TV-PG with
+ * S(Sexual content) and V(Violence) contains TV-PG, TV-PG/S, TV-PG/V and itself.
+ *
+ * @param rating The {@link TvContentRating} to check.
+ * @return {@code true} if this object contains {@code rating}, {@code false} otherwise.
+ */
+ public final boolean contains(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
+ if (!rating.getMainRating().equals(mRating)) {
+ return false;
+ }
+ if (!rating.getDomain().equals(mDomain) ||
+ !rating.getRatingSystem().equals(mRatingSystem) ||
+ !rating.getMainRating().equals(mRating)) {
+ return false;
+ }
+ List<String> subRatings = getSubRatings();
+ List<String> subRatingsOther = rating.getSubRatings();
+ if (subRatings == null && subRatingsOther == null) {
+ return true;
+ } else if (subRatings == null && subRatingsOther != null) {
+ return false;
+ } else if (subRatings != null && subRatingsOther == null) {
+ return true;
+ } else {
+ return subRatings.containsAll(subRatingsOther);
+ }
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof TvContentRating)) {
+ return false;
+ }
+ TvContentRating other = (TvContentRating) obj;
+ if (mHashCode != other.mHashCode) {
+ return false;
+ }
+ if (!TextUtils.equals(mDomain, other.mDomain)) {
+ return false;
+ }
+ if (!TextUtils.equals(mRatingSystem, other.mRatingSystem)) {
+ return false;
+ }
+ if (!TextUtils.equals(mRating, other.mRating)) {
+ return false;
+ }
+ return Arrays.equals(mSubRatings, other.mSubRatings);
+ }
+
+ @Override
+ public int hashCode() {
+ return mHashCode;
+ }
+}
diff --git a/android/media/tv/TvContentRatingSystemInfo.java b/android/media/tv/TvContentRatingSystemInfo.java
new file mode 100644
index 00000000..f2e5b08c
--- /dev/null
+++ b/android/media/tv/TvContentRatingSystemInfo.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.SystemApi;
+import android.content.ContentResolver;
+import android.content.pm.ApplicationInfo;
+import android.net.Uri;
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * TvContentRatingSystemInfo class provides information about a specific TV content rating system
+ * defined either by a system app or by a third-party app.
+ *
+ * @hide
+ */
+@SystemApi
+public final class TvContentRatingSystemInfo implements Parcelable {
+ private final Uri mXmlUri;
+
+ private final ApplicationInfo mApplicationInfo;
+
+ /**
+ * Creates a TvContentRatingSystemInfo object with given resource ID and receiver info.
+ *
+ * @param xmlResourceId The ID of an XML resource whose root element is
+ * <code> &lt;rating-system-definitions&gt;</code>
+ * @param applicationInfo Information about the application that provides the TV content rating
+ * system definition.
+ */
+ public static final TvContentRatingSystemInfo createTvContentRatingSystemInfo(int xmlResourceId,
+ ApplicationInfo applicationInfo) {
+ Uri uri = new Uri.Builder()
+ .scheme(ContentResolver.SCHEME_ANDROID_RESOURCE)
+ .authority(applicationInfo.packageName)
+ .appendPath(String.valueOf(xmlResourceId))
+ .build();
+ return new TvContentRatingSystemInfo(uri, applicationInfo);
+ }
+
+ private TvContentRatingSystemInfo(Uri xmlUri, ApplicationInfo applicationInfo) {
+ mXmlUri = xmlUri;
+ mApplicationInfo = applicationInfo;
+ }
+
+ /**
+ * Returns {@code true} if the TV content rating system is defined by a system app,
+ * {@code false} otherwise.
+ */
+ public final boolean isSystemDefined() {
+ return (mApplicationInfo.flags & ApplicationInfo.FLAG_SYSTEM) != 0;
+ }
+
+ /**
+ * Returns the URI to the XML resource that defines the TV content rating system.
+ *
+ * TODO: Remove. Instead, parse the XML resource and provide an interface to directly access
+ * parsed information.
+ */
+ public final Uri getXmlUri() {
+ return mXmlUri;
+ }
+
+ /**
+ * Used to make this class parcelable.
+ * @hide
+ */
+ public static final Parcelable.Creator<TvContentRatingSystemInfo> CREATOR =
+ new Parcelable.Creator<TvContentRatingSystemInfo>() {
+ @Override
+ public TvContentRatingSystemInfo createFromParcel(Parcel in) {
+ return new TvContentRatingSystemInfo(in);
+ }
+
+ @Override
+ public TvContentRatingSystemInfo[] newArray(int size) {
+ return new TvContentRatingSystemInfo[size];
+ }
+ };
+
+ private TvContentRatingSystemInfo(Parcel in) {
+ mXmlUri = in.readParcelable(null);
+ mApplicationInfo = in.readParcelable(null);
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeParcelable(mXmlUri, flags);
+ dest.writeParcelable(mApplicationInfo, flags);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+}
diff --git a/android/media/tv/TvContract.java b/android/media/tv/TvContract.java
new file mode 100644
index 00000000..0f460960
--- /dev/null
+++ b/android/media/tv/TvContract.java
@@ -0,0 +1,3141 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.SdkConstant;
+import android.annotation.StringDef;
+import android.annotation.SystemApi;
+import android.annotation.SdkConstant.SdkConstantType;
+import android.app.Activity;
+import android.content.ComponentName;
+import android.content.ContentResolver;
+import android.content.ContentUris;
+import android.content.Context;
+import android.content.Intent;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.IBinder;
+import android.provider.BaseColumns;
+import android.text.TextUtils;
+import android.util.ArraySet;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * The contract between the TV provider and applications. Contains definitions for the supported
+ * URIs and columns.
+ * <h3>Overview</h3>
+ *
+ * <p>TvContract defines a basic database of TV content metadata such as channel and program
+ * information. The information is stored in {@link Channels} and {@link Programs} tables.
+ *
+ * <ul>
+ * <li>A row in the {@link Channels} table represents information about a TV channel. The data
+ * format can vary greatly from standard to standard or according to service provider, thus
+ * the columns here are mostly comprised of basic entities that are usually seen to users
+ * regardless of standard such as channel number and name.</li>
+ * <li>A row in the {@link Programs} table represents a set of data describing a TV program such
+ * as program title and start time.</li>
+ * </ul>
+ */
+public final class TvContract {
+ /** The authority for the TV provider. */
+ public static final String AUTHORITY = "android.media.tv";
+
+ /**
+ * Permission to read TV listings. This is required to read all the TV channel and program
+ * information available on the system.
+ * @hide
+ */
+ public static final String PERMISSION_READ_TV_LISTINGS = "android.permission.READ_TV_LISTINGS";
+
+ private static final String PATH_CHANNEL = "channel";
+ private static final String PATH_PROGRAM = "program";
+ private static final String PATH_RECORDED_PROGRAM = "recorded_program";
+ private static final String PATH_PREVIEW_PROGRAM = "preview_program";
+ private static final String PATH_WATCH_NEXT_PROGRAM = "watch_next_program";
+ private static final String PATH_PASSTHROUGH = "passthrough";
+
+ /**
+ * Broadcast Action: sent when an application requests the system to make the given channel
+ * browsable. The operation is performed in the background without user interaction. This
+ * is only relevant to channels with {@link Channels#TYPE_PREVIEW} type.
+ *
+ * <p>The intent must contain the following bundle parameters:
+ * <ul>
+ * <li>{@link #EXTRA_CHANNEL_ID}: ID for the {@link Channels#TYPE_PREVIEW} channel as a long
+ * integer.</li>
+ * <li>{@link #EXTRA_PACKAGE_NAME}: the package name of the requesting application.</li>
+ * </ul>
+ * @hide
+ */
+ @SystemApi
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_CHANNEL_BROWSABLE_REQUESTED =
+ "android.media.tv.action.CHANNEL_BROWSABLE_REQUESTED";
+
+ /**
+ * Activity Action: sent by an application telling the system to make the given channel
+ * browsable with user interaction. The system may show UI to ask user to approve the channel.
+ * This is only relevant to channels with {@link Channels#TYPE_PREVIEW} type. Use
+ * {@link Activity#startActivityForResult} to get the result of the request.
+ *
+ * <p>The intent must contain the following bundle parameters:
+ * <ul>
+ * <li>{@link #EXTRA_CHANNEL_ID}: ID for the {@link Channels#TYPE_PREVIEW} channel as a long
+ * integer.</li>
+ * </ul>
+ */
+ @SdkConstant(SdkConstantType.ACTIVITY_INTENT_ACTION)
+ public static final String ACTION_REQUEST_CHANNEL_BROWSABLE =
+ "android.media.tv.action.REQUEST_CHANNEL_BROWSABLE";
+
+ /**
+ * Broadcast Action: sent by the system to tell the target TV input that one of its preview
+ * program's browsable state is disabled, i.e., it will no longer be shown to users, which, for
+ * example, might be a result of users' interaction with UI. The input is expected to delete the
+ * preview program from the content provider.
+ *
+ * <p>The intent must contain the following bundle parameter:
+ * <ul>
+ * <li>{@link #EXTRA_PREVIEW_PROGRAM_ID}: the disabled preview program ID.</li>
+ * </ul>
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_PREVIEW_PROGRAM_BROWSABLE_DISABLED =
+ "android.media.tv.action.PREVIEW_PROGRAM_BROWSABLE_DISABLED";
+
+ /**
+ * Broadcast Action: sent by the system to tell the target TV input that one of its "watch next"
+ * program's browsable state is disabled, i.e., it will no longer be shown to users, which, for
+ * example, might be a result of users' interaction with UI. The input is expected to delete the
+ * "watch next" program from the content provider.
+ *
+ * <p>The intent must contain the following bundle parameter:
+ * <ul>
+ * <li>{@link #EXTRA_WATCH_NEXT_PROGRAM_ID}: the disabled "watch next" program ID.</li>
+ * </ul>
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_WATCH_NEXT_PROGRAM_BROWSABLE_DISABLED =
+ "android.media.tv.action.WATCH_NEXT_PROGRAM_BROWSABLE_DISABLED";
+
+ /**
+ * Broadcast Action: sent by the system to tell the target TV input that one of its existing
+ * preview programs is added to the watch next programs table by user.
+ *
+ * <p>The intent must contain the following bundle parameters:
+ * <ul>
+ * <li>{@link #EXTRA_PREVIEW_PROGRAM_ID}: the ID of the existing preview program.</li>
+ * <li>{@link #EXTRA_WATCH_NEXT_PROGRAM_ID}: the ID of the new watch next program.</li>
+ * </ul>
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_PREVIEW_PROGRAM_ADDED_TO_WATCH_NEXT =
+ "android.media.tv.action.PREVIEW_PROGRAM_ADDED_TO_WATCH_NEXT";
+
+ /**
+ * Broadcast Action: sent to the target TV input after it is first installed to notify the input
+ * to initialize its channels and programs to the system content provider.
+ *
+ * <p>Note that this intent is sent only on devices with
+ * {@link android.content.pm.PackageManager#FEATURE_LEANBACK} enabled. Besides that, in order
+ * to receive this intent, the target TV input must:
+ * <ul>
+ * <li>Declare a broadcast receiver for this intent in its
+ * <code>AndroidManifest.xml</code>.</li>
+ * <li>Declare appropriate permissions to write channel and program data in its
+ * <code>AndroidManifest.xml</code>.</li>
+ * </ul>
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_INITIALIZE_PROGRAMS =
+ "android.media.tv.action.INITIALIZE_PROGRAMS";
+
+ /**
+ * The key for a bundle parameter containing a channel ID as a long integer
+ */
+ public static final String EXTRA_CHANNEL_ID = "android.media.tv.extra.CHANNEL_ID";
+
+ /**
+ * The key for a bundle parameter containing a package name as a string.
+ * @hide
+ */
+ @SystemApi
+ public static final String EXTRA_PACKAGE_NAME = "android.media.tv.extra.PACKAGE_NAME";
+
+ /** The key for a bundle parameter containing a program ID as a long integer. */
+ public static final String EXTRA_PREVIEW_PROGRAM_ID =
+ "android.media.tv.extra.PREVIEW_PROGRAM_ID";
+
+ /** The key for a bundle parameter containing a watch next program ID as a long integer. */
+ public static final String EXTRA_WATCH_NEXT_PROGRAM_ID =
+ "android.media.tv.extra.WATCH_NEXT_PROGRAM_ID";
+
+ /**
+ * The key for a bundle parameter containing the result code of a method call as an integer.
+ *
+ * @see #RESULT_OK
+ * @see #RESULT_ERROR_IO
+ * @see #RESULT_ERROR_INVALID_ARGUMENT
+ * @hide
+ */
+ @SystemApi
+ public static final String EXTRA_RESULT_CODE = "android.media.tv.extra.RESULT_CODE";
+
+ /**
+ * The result code for a successful execution without error.
+ * @hide
+ */
+ @SystemApi
+ public static final int RESULT_OK = 0;
+
+ /**
+ * The result code for a failure from I/O operation.
+ * @hide
+ */
+ @SystemApi
+ public static final int RESULT_ERROR_IO = 1;
+
+ /**
+ * The result code for a failure from invalid argument.
+ * @hide
+ */
+ @SystemApi
+ public static final int RESULT_ERROR_INVALID_ARGUMENT = 2;
+
+ /**
+ * The method name to get existing columns in the given table of the specified content provider.
+ *
+ * <p>The method caller must provide the following parameter:
+ * <ul>
+ * <li>{@code arg}: The content URI of the target table as a {@link String}.</li>
+ * </ul>
+
+ * <p>On success, the returned {@link android.os.Bundle} will include existing column names
+ * with the key {@link #EXTRA_EXISTING_COLUMN_NAMES}. Otherwise, the return value will be {@code null}.
+ *
+ * @see ContentResolver#call(Uri, String, String, Bundle)
+ * @see #EXTRA_EXISTING_COLUMN_NAMES
+ * @hide
+ */
+ @SystemApi
+ public static final String METHOD_GET_COLUMNS = "get_columns";
+
+ /**
+ * The method name to add a new column in the given table of the specified content provider.
+ *
+ * <p>The method caller must provide the following parameter:
+ * <ul>
+ * <li>{@code arg}: The content URI of the target table as a {@link String}.</li>
+ * <li>{@code extra}: Name, data type, and default value of the new column in a Bundle:
+ * <ul>
+ * <li>{@link #EXTRA_COLUMN_NAME} the column name as a {@link String}.</li>
+ * <li>{@link #EXTRA_DATA_TYPE} the data type as a {@link String}.</li>
+ * <li>{@link #EXTRA_DEFAULT_VALUE} the default value as a {@link String}.
+ * (optional)</li>
+ * </ul>
+ * </li>
+ * </ul>
+ *
+ * <p>On success, the returned {@link android.os.Bundle} will include current colum names after
+ * the addition operation with the key {@link #EXTRA_EXISTING_COLUMN_NAMES}. Otherwise, the
+ * return value will be {@code null}.
+ *
+ * @see ContentResolver#call(Uri, String, String, Bundle)
+ * @see #EXTRA_COLUMN_NAME
+ * @see #EXTRA_DATA_TYPE
+ * @see #EXTRA_DEFAULT_VALUE
+ * @see #EXTRA_EXISTING_COLUMN_NAMES
+ * @hide
+ */
+ @SystemApi
+ public static final String METHOD_ADD_COLUMN = "add_column";
+
+ /**
+ * The method name to get all the blocked packages. When a package is blocked, all the data for
+ * preview programs/channels and watch next programs belonging to this package in the content
+ * provider will be cleared. Once a package is blocked, {@link SecurityException} will be thrown
+ * for all the requests to preview programs/channels and watch next programs via
+ * {@link android.content.ContentProvider} from it.
+ *
+ * <p>The returned {@link android.os.Bundle} will include all the blocked package names with the
+ * key {@link #EXTRA_BLOCKED_PACKAGES}.
+ *
+ * @see ContentResolver#call(Uri, String, String, Bundle)
+ * @see #EXTRA_BLOCKED_PACKAGES
+ * @see #METHOD_BLOCK_PACKAGE
+ * @see #METHOD_UNBLOCK_PACKAGE
+ * @hide
+ */
+ @SystemApi
+ public static final String METHOD_GET_BLOCKED_PACKAGES = "get_blocked_packages";
+
+ /**
+ * The method name to block the access from the given package. When a package is blocked, all
+ * the data for preview programs/channels and watch next programs belonging to this package in
+ * the content provider will be cleared. Once a package is blocked, {@link SecurityException}
+ * will be thrown for all the requests to preview programs/channels and watch next programs via
+ * {@link android.content.ContentProvider} from it.
+ *
+ * <p>The method caller must provide the following parameter:
+ * <ul>
+ * <li>{@code arg}: The package name to be added as blocked package {@link String}.</li>
+ * </ul>
+ *
+ * <p>The returned {@link android.os.Bundle} will include an integer code denoting whether the
+ * execution is successful or not with the key {@link #EXTRA_RESULT_CODE}. If {@code arg} is
+ * empty, the result code will be {@link #RESULT_ERROR_INVALID_ARGUMENT}. If success, the result
+ * code will be {@link #RESULT_OK}. Otherwise, the result code will be {@link #RESULT_ERROR_IO}.
+ *
+ * @see ContentResolver#call(Uri, String, String, Bundle)
+ * @see #EXTRA_RESULT_CODE
+ * @see #METHOD_GET_BLOCKED_PACKAGES
+ * @see #METHOD_UNBLOCK_PACKAGE
+ * @hide
+ */
+ @SystemApi
+ public static final String METHOD_BLOCK_PACKAGE = "block_package";
+
+ /**
+ * The method name to unblock the access from the given package. When a package is blocked, all
+ * the data for preview programs/channels and watch next programs belonging to this package in
+ * the content provider will be cleared. Once a package is blocked, {@link SecurityException}
+ * will be thrown for all the requests to preview programs/channels and watch next programs via
+ * {@link android.content.ContentProvider} from it.
+ *
+ * <p>The method caller must provide the following parameter:
+ * <ul>
+ * <li>{@code arg}: The package name to be removed from blocked list as a {@link String}.
+ * </li>
+ * </ul>
+ *
+ * <p>The returned {@link android.os.Bundle} will include an integer code denoting whether the
+ * execution is successful or not with the key {@link #EXTRA_RESULT_CODE}. If {@code arg} is
+ * empty, the result code will be {@link #RESULT_ERROR_INVALID_ARGUMENT}. If success, the result
+ * code will be {@link #RESULT_OK}. Otherwise, the result code will be {@link #RESULT_ERROR_IO}.
+ *
+ * @see ContentResolver#call(Uri, String, String, Bundle)
+ * @see #EXTRA_RESULT_CODE
+ * @see #METHOD_GET_BLOCKED_PACKAGES
+ * @see #METHOD_BLOCK_PACKAGE
+ * @hide
+ */
+ @SystemApi
+ public static final String METHOD_UNBLOCK_PACKAGE = "unblock_package";
+
+ /**
+ * The key for a returned {@link Bundle} value containing existing column names in the given
+ * table as an {@link ArrayList} of {@link String}.
+ *
+ * @see #METHOD_GET_COLUMNS
+ * @see #METHOD_ADD_COLUMN
+ * @hide
+ */
+ @SystemApi
+ public static final String EXTRA_EXISTING_COLUMN_NAMES =
+ "android.media.tv.extra.EXISTING_COLUMN_NAMES";
+
+ /**
+ * The key for a {@link Bundle} parameter containing the new column name to be added in the
+ * given table as a non-empty {@link CharSequence}.
+ *
+ * @see #METHOD_ADD_COLUMN
+ * @hide
+ */
+ @SystemApi
+ public static final String EXTRA_COLUMN_NAME = "android.media.tv.extra.COLUMN_NAME";
+
+ /**
+ * The key for a {@link Bundle} parameter containing the data type of the new column to be added
+ * in the given table as a non-empty {@link CharSequence}, which should be one of the following
+ * values: {@code "TEXT"}, {@code "INTEGER"}, {@code "REAL"}, or {@code "BLOB"}.
+ *
+ * @see #METHOD_ADD_COLUMN
+ * @hide
+ */
+ @SystemApi
+ public static final String EXTRA_DATA_TYPE = "android.media.tv.extra.DATA_TYPE";
+
+ /**
+ * The key for a {@link Bundle} parameter containing the default value of the new column to be
+ * added in the given table as a {@link CharSequence}, which represents a valid default value
+ * according to the data type provided with {@link #EXTRA_DATA_TYPE}.
+ *
+ * @see #METHOD_ADD_COLUMN
+ * @hide
+ */
+ @SystemApi
+ public static final String EXTRA_DEFAULT_VALUE = "android.media.tv.extra.DEFAULT_VALUE";
+
+ /**
+ * The key for a returned {@link Bundle} value containing all the blocked package names as an
+ * {@link ArrayList} of {@link String}.
+ *
+ * @see #METHOD_GET_BLOCKED_PACKAGES
+ * @hide
+ */
+ @SystemApi
+ public static final String EXTRA_BLOCKED_PACKAGES = "android.media.tv.extra.BLOCKED_PACKAGES";
+
+ /**
+ * An optional query, update or delete URI parameter that allows the caller to specify TV input
+ * ID to filter channels.
+ * @hide
+ */
+ public static final String PARAM_INPUT = "input";
+
+ /**
+ * An optional query, update or delete URI parameter that allows the caller to specify channel
+ * ID to filter programs.
+ * @hide
+ */
+ public static final String PARAM_CHANNEL = "channel";
+
+ /**
+ * An optional query, update or delete URI parameter that allows the caller to specify start
+ * time (in milliseconds since the epoch) to filter programs.
+ * @hide
+ */
+ public static final String PARAM_START_TIME = "start_time";
+
+ /**
+ * An optional query, update or delete URI parameter that allows the caller to specify end time
+ * (in milliseconds since the epoch) to filter programs.
+ * @hide
+ */
+ public static final String PARAM_END_TIME = "end_time";
+
+ /**
+ * A query, update or delete URI parameter that allows the caller to operate on all or
+ * browsable-only channels. If set to "true", the rows that contain non-browsable channels are
+ * not affected.
+ * @hide
+ */
+ public static final String PARAM_BROWSABLE_ONLY = "browsable_only";
+
+ /**
+ * An optional query, update or delete URI parameter that allows the caller to specify canonical
+ * genre to filter programs.
+ * @hide
+ */
+ public static final String PARAM_CANONICAL_GENRE = "canonical_genre";
+
+ /**
+ * A query, update or delete URI parameter that allows the caller to operate only on preview or
+ * non-preview channels. If set to "true", the operation affects the rows for preview channels
+ * only. If set to "false", the operation affects the rows for non-preview channels only.
+ * @hide
+ */
+ public static final String PARAM_PREVIEW = "preview";
+
+ /**
+ * An optional query, update or delete URI parameter that allows the caller to specify package
+ * name to filter channels.
+ * @hide
+ */
+ public static final String PARAM_PACKAGE = "package";
+
+ /**
+ * Builds an ID that uniquely identifies a TV input service.
+ *
+ * @param name The {@link ComponentName} of the TV input service to build ID for.
+ * @return the ID for the given TV input service.
+ */
+ public static String buildInputId(ComponentName name) {
+ return name.flattenToShortString();
+ }
+
+ /**
+ * Builds a URI that points to a specific channel.
+ *
+ * @param channelId The ID of the channel to point to.
+ */
+ public static Uri buildChannelUri(long channelId) {
+ return ContentUris.withAppendedId(Channels.CONTENT_URI, channelId);
+ }
+
+ /**
+ * Build a special channel URI intended to be used with pass-through inputs. (e.g. HDMI)
+ *
+ * @param inputId The ID of the pass-through input to build a channels URI for.
+ * @see TvInputInfo#isPassthroughInput()
+ */
+ public static Uri buildChannelUriForPassthroughInput(String inputId) {
+ return new Uri.Builder().scheme(ContentResolver.SCHEME_CONTENT).authority(AUTHORITY)
+ .appendPath(PATH_PASSTHROUGH).appendPath(inputId).build();
+ }
+
+ /**
+ * Builds a URI that points to a channel logo. See {@link Channels.Logo}.
+ *
+ * @param channelId The ID of the channel whose logo is pointed to.
+ */
+ public static Uri buildChannelLogoUri(long channelId) {
+ return buildChannelLogoUri(buildChannelUri(channelId));
+ }
+
+ /**
+ * Builds a URI that points to a channel logo. See {@link Channels.Logo}.
+ *
+ * @param channelUri The URI of the channel whose logo is pointed to.
+ */
+ public static Uri buildChannelLogoUri(Uri channelUri) {
+ if (!isChannelUriForTunerInput(channelUri)) {
+ throw new IllegalArgumentException("Not a channel: " + channelUri);
+ }
+ return Uri.withAppendedPath(channelUri, Channels.Logo.CONTENT_DIRECTORY);
+ }
+
+ /**
+ * Builds a URI that points to all channels from a given TV input.
+ *
+ * @param inputId The ID of the TV input to build a channels URI for. If {@code null}, builds a
+ * URI for all the TV inputs.
+ */
+ public static Uri buildChannelsUriForInput(@Nullable String inputId) {
+ return buildChannelsUriForInput(inputId, false);
+ }
+
+ /**
+ * Builds a URI that points to all or browsable-only channels from a given TV input.
+ *
+ * @param inputId The ID of the TV input to build a channels URI for. If {@code null}, builds a
+ * URI for all the TV inputs.
+ * @param browsableOnly If set to {@code true} the URI points to only browsable channels. If set
+ * to {@code false} the URI points to all channels regardless of whether they are
+ * browsable or not.
+ * @hide
+ */
+ @SystemApi
+ public static Uri buildChannelsUriForInput(@Nullable String inputId,
+ boolean browsableOnly) {
+ Uri.Builder builder = Channels.CONTENT_URI.buildUpon();
+ if (inputId != null) {
+ builder.appendQueryParameter(PARAM_INPUT, inputId);
+ }
+ return builder.appendQueryParameter(PARAM_BROWSABLE_ONLY, String.valueOf(browsableOnly))
+ .build();
+ }
+
+ /**
+ * Builds a URI that points to all or browsable-only channels which have programs with the given
+ * genre from the given TV input.
+ *
+ * @param inputId The ID of the TV input to build a channels URI for. If {@code null}, builds a
+ * URI for all the TV inputs.
+ * @param genre {@link Programs.Genres} to search. If {@code null}, builds a URI for all genres.
+ * @param browsableOnly If set to {@code true} the URI points to only browsable channels. If set
+ * to {@code false} the URI points to all channels regardless of whether they are
+ * browsable or not.
+ * @hide
+ */
+ @SystemApi
+ public static Uri buildChannelsUriForInput(@Nullable String inputId,
+ @Nullable String genre, boolean browsableOnly) {
+ if (genre == null) {
+ return buildChannelsUriForInput(inputId, browsableOnly);
+ }
+ if (!Programs.Genres.isCanonical(genre)) {
+ throw new IllegalArgumentException("Not a canonical genre: '" + genre + "'");
+ }
+ return buildChannelsUriForInput(inputId, browsableOnly).buildUpon()
+ .appendQueryParameter(PARAM_CANONICAL_GENRE, genre).build();
+ }
+
+ /**
+ * Builds a URI that points to a specific program.
+ *
+ * @param programId The ID of the program to point to.
+ */
+ public static Uri buildProgramUri(long programId) {
+ return ContentUris.withAppendedId(Programs.CONTENT_URI, programId);
+ }
+
+ /**
+ * Builds a URI that points to all programs on a given channel.
+ *
+ * @param channelId The ID of the channel to return programs for.
+ */
+ public static Uri buildProgramsUriForChannel(long channelId) {
+ return Programs.CONTENT_URI.buildUpon()
+ .appendQueryParameter(PARAM_CHANNEL, String.valueOf(channelId)).build();
+ }
+
+ /**
+ * Builds a URI that points to all programs on a given channel.
+ *
+ * @param channelUri The URI of the channel to return programs for.
+ */
+ public static Uri buildProgramsUriForChannel(Uri channelUri) {
+ if (!isChannelUriForTunerInput(channelUri)) {
+ throw new IllegalArgumentException("Not a channel: " + channelUri);
+ }
+ return buildProgramsUriForChannel(ContentUris.parseId(channelUri));
+ }
+
+ /**
+ * Builds a URI that points to programs on a specific channel whose schedules overlap with the
+ * given time frame.
+ *
+ * @param channelId The ID of the channel to return programs for.
+ * @param startTime The start time used to filter programs. The returned programs will have a
+ * {@link Programs#COLUMN_END_TIME_UTC_MILLIS} that is greater than or equal to
+ {@code startTime}.
+ * @param endTime The end time used to filter programs. The returned programs will have
+ * {@link Programs#COLUMN_START_TIME_UTC_MILLIS} that is less than or equal to
+ * {@code endTime}.
+ */
+ public static Uri buildProgramsUriForChannel(long channelId, long startTime,
+ long endTime) {
+ Uri uri = buildProgramsUriForChannel(channelId);
+ return uri.buildUpon().appendQueryParameter(PARAM_START_TIME, String.valueOf(startTime))
+ .appendQueryParameter(PARAM_END_TIME, String.valueOf(endTime)).build();
+ }
+
+ /**
+ * Builds a URI that points to programs on a specific channel whose schedules overlap with the
+ * given time frame.
+ *
+ * @param channelUri The URI of the channel to return programs for.
+ * @param startTime The start time used to filter programs. The returned programs should have
+ * {@link Programs#COLUMN_END_TIME_UTC_MILLIS} that is greater than this time.
+ * @param endTime The end time used to filter programs. The returned programs should have
+ * {@link Programs#COLUMN_START_TIME_UTC_MILLIS} that is less than this time.
+ */
+ public static Uri buildProgramsUriForChannel(Uri channelUri, long startTime,
+ long endTime) {
+ if (!isChannelUriForTunerInput(channelUri)) {
+ throw new IllegalArgumentException("Not a channel: " + channelUri);
+ }
+ return buildProgramsUriForChannel(ContentUris.parseId(channelUri), startTime, endTime);
+ }
+
+ /**
+ * Builds a URI that points to a specific recorded program.
+ *
+ * @param recordedProgramId The ID of the recorded program to point to.
+ */
+ public static Uri buildRecordedProgramUri(long recordedProgramId) {
+ return ContentUris.withAppendedId(RecordedPrograms.CONTENT_URI, recordedProgramId);
+ }
+
+ /**
+ * Builds a URI that points to a specific preview program.
+ *
+ * @param previewProgramId The ID of the preview program to point to.
+ */
+ public static Uri buildPreviewProgramUri(long previewProgramId) {
+ return ContentUris.withAppendedId(PreviewPrograms.CONTENT_URI, previewProgramId);
+ }
+
+ /**
+ * Builds a URI that points to all preview programs on a given channel.
+ *
+ * @param channelId The ID of the channel to return preview programs for.
+ */
+ public static Uri buildPreviewProgramsUriForChannel(long channelId) {
+ return PreviewPrograms.CONTENT_URI.buildUpon()
+ .appendQueryParameter(PARAM_CHANNEL, String.valueOf(channelId)).build();
+ }
+
+ /**
+ * Builds a URI that points to all preview programs on a given channel.
+ *
+ * @param channelUri The URI of the channel to return preview programs for.
+ */
+ public static Uri buildPreviewProgramsUriForChannel(Uri channelUri) {
+ if (!isChannelUriForTunerInput(channelUri)) {
+ throw new IllegalArgumentException("Not a channel: " + channelUri);
+ }
+ return buildPreviewProgramsUriForChannel(ContentUris.parseId(channelUri));
+ }
+
+ /**
+ * Builds a URI that points to a specific watch next program.
+ *
+ * @param watchNextProgramId The ID of the watch next program to point to.
+ */
+ public static Uri buildWatchNextProgramUri(long watchNextProgramId) {
+ return ContentUris.withAppendedId(WatchNextPrograms.CONTENT_URI, watchNextProgramId);
+ }
+
+ /**
+ * Builds a URI that points to a specific program the user watched.
+ *
+ * @param watchedProgramId The ID of the watched program to point to.
+ * @hide
+ */
+ public static Uri buildWatchedProgramUri(long watchedProgramId) {
+ return ContentUris.withAppendedId(WatchedPrograms.CONTENT_URI, watchedProgramId);
+ }
+
+ private static boolean isTvUri(Uri uri) {
+ return uri != null && ContentResolver.SCHEME_CONTENT.equals(uri.getScheme())
+ && AUTHORITY.equals(uri.getAuthority());
+ }
+
+ private static boolean isTwoSegmentUriStartingWith(Uri uri, String pathSegment) {
+ List<String> pathSegments = uri.getPathSegments();
+ return pathSegments.size() == 2 && pathSegment.equals(pathSegments.get(0));
+ }
+
+ /**
+ * @return {@code true} if {@code uri} is a channel URI.
+ */
+ public static boolean isChannelUri(@NonNull Uri uri) {
+ return isChannelUriForTunerInput(uri) || isChannelUriForPassthroughInput(uri);
+ }
+
+ /**
+ * @return {@code true} if {@code uri} is a channel URI for a tuner input.
+ */
+ public static boolean isChannelUriForTunerInput(@NonNull Uri uri) {
+ return isTvUri(uri) && isTwoSegmentUriStartingWith(uri, PATH_CHANNEL);
+ }
+
+ /**
+ * @return {@code true} if {@code uri} is a channel URI for a pass-through input.
+ */
+ public static boolean isChannelUriForPassthroughInput(@NonNull Uri uri) {
+ return isTvUri(uri) && isTwoSegmentUriStartingWith(uri, PATH_PASSTHROUGH);
+ }
+
+ /**
+ * @return {@code true} if {@code uri} is a program URI.
+ */
+ public static boolean isProgramUri(@NonNull Uri uri) {
+ return isTvUri(uri) && isTwoSegmentUriStartingWith(uri, PATH_PROGRAM);
+ }
+
+ /**
+ * @return {@code true} if {@code uri} is a recorded program URI.
+ */
+ public static boolean isRecordedProgramUri(@NonNull Uri uri) {
+ return isTvUri(uri) && isTwoSegmentUriStartingWith(uri, PATH_RECORDED_PROGRAM);
+ }
+
+ /**
+ * Requests to make a channel browsable.
+ *
+ * <p>Once called, the system will review the request and make the channel browsable based on
+ * its policy. The first request from a package is guaranteed to be approved. This is only
+ * relevant to channels with {@link Channels#TYPE_PREVIEW} type.
+ *
+ * @param context The context for accessing content provider.
+ * @param channelId The channel ID to be browsable.
+ * @see Channels#COLUMN_BROWSABLE
+ */
+ public static void requestChannelBrowsable(Context context, long channelId) {
+ TvInputManager manager = (TvInputManager) context.getSystemService(
+ Context.TV_INPUT_SERVICE);
+ if (manager != null) {
+ manager.requestChannelBrowsable(buildChannelUri(channelId));
+ }
+ }
+
+ private TvContract() {}
+
+ /**
+ * Common base for the tables of TV channels/programs.
+ */
+ public interface BaseTvColumns extends BaseColumns {
+ /**
+ * The name of the package that owns the current row.
+ *
+ * <p>The TV provider fills in this column with the name of the package that provides the
+ * initial data of the row. If the package is later uninstalled, the rows it owns are
+ * automatically removed from the tables.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_PACKAGE_NAME = "package_name";
+ }
+
+ /**
+ * Common columns for the tables of TV programs.
+ * @hide
+ */
+ interface ProgramColumns {
+ /** @hide */
+ @IntDef({
+ REVIEW_RATING_STYLE_STARS,
+ REVIEW_RATING_STYLE_THUMBS_UP_DOWN,
+ REVIEW_RATING_STYLE_PERCENTAGE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ @interface ReviewRatingStyle {}
+
+ /**
+ * The review rating style for five star rating.
+ *
+ * @see #COLUMN_REVIEW_RATING_STYLE
+ */
+ int REVIEW_RATING_STYLE_STARS = 0;
+
+ /**
+ * The review rating style for thumbs-up and thumbs-down rating.
+ *
+ * @see #COLUMN_REVIEW_RATING_STYLE
+ */
+ int REVIEW_RATING_STYLE_THUMBS_UP_DOWN = 1;
+
+ /**
+ * The review rating style for 0 to 100 point system.
+ *
+ * @see #COLUMN_REVIEW_RATING_STYLE
+ */
+ int REVIEW_RATING_STYLE_PERCENTAGE = 2;
+
+ /**
+ * The title of this TV program.
+ *
+ * <p>If this program is an episodic TV show, it is recommended that the title is the series
+ * title and its related fields ({@link #COLUMN_SEASON_TITLE} and/or
+ * {@link #COLUMN_SEASON_DISPLAY_NUMBER}, {@link #COLUMN_SEASON_DISPLAY_NUMBER},
+ * {@link #COLUMN_EPISODE_DISPLAY_NUMBER}, and {@link #COLUMN_EPISODE_TITLE}) are filled in.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_TITLE = "title";
+
+ /**
+ * The season display number of this TV program for episodic TV shows.
+ *
+ * <p>This is used to indicate the season number. (e.g. 1, 2 or 3) Note that the value
+ * does not necessarily be numeric. (e.g. 12B)
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_SEASON_DISPLAY_NUMBER = "season_display_number";
+
+ /**
+ * The title of the season for this TV program for episodic TV shows.
+ *
+ * <p>This is an optional field supplied only when the season has a special title
+ * (e.g. The Final Season). If provided, the applications should display it instead of
+ * {@link #COLUMN_SEASON_DISPLAY_NUMBER}, and should display it without alterations.
+ * (e.g. for "The Final Season", displayed string should be "The Final Season", not
+ * "Season The Final Season"). When displaying multiple programs, the order should be based
+ * on {@link #COLUMN_SEASON_DISPLAY_NUMBER}, even when {@link #COLUMN_SEASON_TITLE} exists.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_SEASON_TITLE = "season_title";
+
+ /**
+ * The episode display number of this TV program for episodic TV shows.
+ *
+ * <p>This is used to indicate the episode number. (e.g. 1, 2 or 3) Note that the value
+ * does not necessarily be numeric. (e.g. 12B)
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_EPISODE_DISPLAY_NUMBER = "episode_display_number";
+
+ /**
+ * The episode title of this TV program for episodic TV shows.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_EPISODE_TITLE = "episode_title";
+
+ /**
+ * The comma-separated canonical genre string of this TV program.
+ *
+ * <p>Canonical genres are defined in {@link Genres}. Use {@link Genres#encode} to create a
+ * text that can be stored in this column. Use {@link Genres#decode} to get the canonical
+ * genre strings from the text stored in the column.
+ *
+ * <p>Type: TEXT
+ * @see Genres
+ * @see Genres#encode
+ * @see Genres#decode
+ */
+ String COLUMN_CANONICAL_GENRE = "canonical_genre";
+
+ /**
+ * The short description of this TV program that is displayed to the user by default.
+ *
+ * <p>It is recommended to limit the length of the descriptions to 256 characters.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_SHORT_DESCRIPTION = "short_description";
+
+ /**
+ * The detailed, lengthy description of this TV program that is displayed only when the user
+ * wants to see more information.
+ *
+ * <p>TV input services should leave this field empty if they have no additional details
+ * beyond {@link #COLUMN_SHORT_DESCRIPTION}.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_LONG_DESCRIPTION = "long_description";
+
+ /**
+ * The width of the video for this TV program, in the unit of pixels.
+ *
+ * <p>Together with {@link #COLUMN_VIDEO_HEIGHT} this is used to determine the video
+ * resolution of the current TV program. Can be empty if it is not known initially or the
+ * program does not convey any video such as the programs from type
+ * {@link Channels#SERVICE_TYPE_AUDIO} channels.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_VIDEO_WIDTH = "video_width";
+
+ /**
+ * The height of the video for this TV program, in the unit of pixels.
+ *
+ * <p>Together with {@link #COLUMN_VIDEO_WIDTH} this is used to determine the video
+ * resolution of the current TV program. Can be empty if it is not known initially or the
+ * program does not convey any video such as the programs from type
+ * {@link Channels#SERVICE_TYPE_AUDIO} channels.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_VIDEO_HEIGHT = "video_height";
+
+ /**
+ * The comma-separated audio languages of this TV program.
+ *
+ * <p>This is used to describe available audio languages included in the program. Use either
+ * ISO 639-1 or 639-2/T codes.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_AUDIO_LANGUAGE = "audio_language";
+
+ /**
+ * The comma-separated content ratings of this TV program.
+ *
+ * <p>This is used to describe the content rating(s) of this program. Each comma-separated
+ * content rating sub-string should be generated by calling
+ * {@link TvContentRating#flattenToString}. Note that in most cases the program content is
+ * rated by a single rating system, thus resulting in a corresponding single sub-string that
+ * does not require comma separation and multiple sub-strings appear only when the program
+ * content is rated by two or more content rating systems. If any of those ratings is
+ * specified as "blocked rating" in the user's parental control settings, the TV input
+ * service should block the current content and wait for the signal that it is okay to
+ * unblock.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_CONTENT_RATING = "content_rating";
+
+ /**
+ * The URI for the poster art of this TV program.
+ *
+ * <p>The data in the column must be a URL, or a URI in one of the following formats:
+ *
+ * <ul>
+ * <li>content ({@link android.content.ContentResolver#SCHEME_CONTENT})</li>
+ * <li>android.resource ({@link android.content.ContentResolver#SCHEME_ANDROID_RESOURCE})
+ * </li>
+ * <li>file ({@link android.content.ContentResolver#SCHEME_FILE})</li>
+ * </ul>
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_POSTER_ART_URI = "poster_art_uri";
+
+ /**
+ * The URI for the thumbnail of this TV program.
+ *
+ * <p>The system can generate a thumbnail from the poster art if this column is not
+ * specified. Thus it is not necessary for TV input services to include a thumbnail if it is
+ * just a scaled image of the poster art.
+ *
+ * <p>The data in the column must be a URL, or a URI in one of the following formats:
+ *
+ * <ul>
+ * <li>content ({@link android.content.ContentResolver#SCHEME_CONTENT})</li>
+ * <li>android.resource ({@link android.content.ContentResolver#SCHEME_ANDROID_RESOURCE})
+ * </li>
+ * <li>file ({@link android.content.ContentResolver#SCHEME_FILE})</li>
+ * </ul>
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_THUMBNAIL_URI = "thumbnail_uri";
+
+ /**
+ * The flag indicating whether this TV program is searchable or not.
+ *
+ * <p>The columns of searchable programs can be read by other applications that have proper
+ * permission. Care must be taken not to open sensitive data.
+ *
+ * <p>A value of 1 indicates that the program is searchable and its columns can be read by
+ * other applications, a value of 0 indicates that the program is hidden and its columns can
+ * be read only by the package that owns the program and the system. If not specified, this
+ * value is set to 1 (searchable) by default.
+ *
+ * <p>Type: INTEGER (boolean)
+ */
+ String COLUMN_SEARCHABLE = "searchable";
+
+ /**
+ * Internal data used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: BLOB
+ */
+ String COLUMN_INTERNAL_PROVIDER_DATA = "internal_provider_data";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_INTERNAL_PROVIDER_FLAG1 = "internal_provider_flag1";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_INTERNAL_PROVIDER_FLAG2 = "internal_provider_flag2";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_INTERNAL_PROVIDER_FLAG3 = "internal_provider_flag3";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_INTERNAL_PROVIDER_FLAG4 = "internal_provider_flag4";
+
+ /**
+ * The version number of this row entry used by TV input services.
+ *
+ * <p>This is best used by sync adapters to identify the rows to update. The number can be
+ * defined by individual TV input services. One may assign the same value as
+ * {@code version_number} in ETSI EN 300 468 or ATSC A/65, if the data are coming from a TV
+ * broadcast.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_VERSION_NUMBER = "version_number";
+
+ /**
+ * The review rating score style used for {@link #COLUMN_REVIEW_RATING}.
+ *
+ * <p> The value should match one of the followings: {@link #REVIEW_RATING_STYLE_STARS},
+ * {@link #REVIEW_RATING_STYLE_THUMBS_UP_DOWN}, and {@link #REVIEW_RATING_STYLE_PERCENTAGE}.
+ *
+ * <p>Type: INTEGER
+ * @see #COLUMN_REVIEW_RATING
+ */
+ String COLUMN_REVIEW_RATING_STYLE = "review_rating_style";
+
+ /**
+ * The review rating score for this program.
+ *
+ * <p>The format of the value is dependent on {@link #COLUMN_REVIEW_RATING_STYLE}. If the
+ * style is {@link #REVIEW_RATING_STYLE_STARS}, the value should be a real number between
+ * 0.0 and 5.0. (e.g. "4.5") If the style is {@link #REVIEW_RATING_STYLE_THUMBS_UP_DOWN},
+ * the value should be two integers, one for thumbs-up count and the other for thumbs-down
+ * count, with a comma between them. (e.g. "200,40") If the style is
+ * {@link #REVIEW_RATING_STYLE_PERCENTAGE}, the value shoule be a real number between 0 and
+ * 100. (e.g. "99.9")
+ *
+ * <p>Type: TEXT
+ * @see #COLUMN_REVIEW_RATING_STYLE
+ */
+ String COLUMN_REVIEW_RATING = "review_rating";
+ }
+
+ /**
+ * Common columns for the tables of preview programs.
+ * @hide
+ */
+ interface PreviewProgramColumns {
+
+ /** @hide */
+ @IntDef({
+ TYPE_MOVIE,
+ TYPE_TV_SERIES,
+ TYPE_TV_SEASON,
+ TYPE_TV_EPISODE,
+ TYPE_CLIP,
+ TYPE_EVENT,
+ TYPE_CHANNEL,
+ TYPE_TRACK,
+ TYPE_ALBUM,
+ TYPE_ARTIST,
+ TYPE_PLAYLIST,
+ TYPE_STATION,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Type {}
+
+ /**
+ * The program type for movie.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_MOVIE = 0;
+
+ /**
+ * The program type for TV series.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_TV_SERIES = 1;
+
+ /**
+ * The program type for TV season.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_TV_SEASON = 2;
+
+ /**
+ * The program type for TV episode.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_TV_EPISODE = 3;
+
+ /**
+ * The program type for clip.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_CLIP = 4;
+
+ /**
+ * The program type for event.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_EVENT = 5;
+
+ /**
+ * The program type for channel.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_CHANNEL = 6;
+
+ /**
+ * The program type for track.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_TRACK = 7;
+
+ /**
+ * The program type for album.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_ALBUM = 8;
+
+ /**
+ * The program type for artist.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_ARTIST = 9;
+
+ /**
+ * The program type for playlist.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_PLAYLIST = 10;
+
+ /**
+ * The program type for station.
+ *
+ * @see #COLUMN_TYPE
+ */
+ int TYPE_STATION = 11;
+
+ /** @hide */
+ @IntDef({
+ ASPECT_RATIO_16_9,
+ ASPECT_RATIO_3_2,
+ ASPECT_RATIO_1_1,
+ ASPECT_RATIO_2_3,
+ ASPECT_RATIO_4_3,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface AspectRatio {}
+
+ /**
+ * The aspect ratio for 16:9.
+ *
+ * @see #COLUMN_POSTER_ART_ASPECT_RATIO
+ * @see #COLUMN_THUMBNAIL_ASPECT_RATIO
+ */
+ int ASPECT_RATIO_16_9 = 0;
+
+ /**
+ * The aspect ratio for 3:2.
+ *
+ * @see #COLUMN_POSTER_ART_ASPECT_RATIO
+ * @see #COLUMN_THUMBNAIL_ASPECT_RATIO
+ */
+ int ASPECT_RATIO_3_2 = 1;
+
+ /**
+ * The aspect ratio for 4:3.
+ *
+ * @see #COLUMN_POSTER_ART_ASPECT_RATIO
+ * @see #COLUMN_THUMBNAIL_ASPECT_RATIO
+ */
+ int ASPECT_RATIO_4_3 = 2;
+
+ /**
+ * The aspect ratio for 1:1.
+ *
+ * @see #COLUMN_POSTER_ART_ASPECT_RATIO
+ * @see #COLUMN_THUMBNAIL_ASPECT_RATIO
+ */
+ int ASPECT_RATIO_1_1 = 3;
+
+ /**
+ * The aspect ratio for 2:3.
+ *
+ * @see #COLUMN_POSTER_ART_ASPECT_RATIO
+ * @see #COLUMN_THUMBNAIL_ASPECT_RATIO
+ */
+ int ASPECT_RATIO_2_3 = 4;
+
+ /** @hide */
+ @IntDef({
+ AVAILABILITY_AVAILABLE,
+ AVAILABILITY_FREE_WITH_SUBSCRIPTION,
+ AVAILABILITY_PAID_CONTENT,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Availability {}
+
+ /**
+ * The availability for "available to this user".
+ *
+ * @see #COLUMN_AVAILABILITY
+ */
+ int AVAILABILITY_AVAILABLE = 0;
+
+ /**
+ * The availability for "free with subscription".
+ *
+ * @see #COLUMN_AVAILABILITY
+ */
+ int AVAILABILITY_FREE_WITH_SUBSCRIPTION = 1;
+
+ /**
+ * The availability for "paid content, either to-own or rental
+ * (user has not purchased/rented).
+ *
+ * @see #COLUMN_AVAILABILITY
+ */
+ int AVAILABILITY_PAID_CONTENT = 2;
+
+ /** @hide */
+ @IntDef({
+ INTERACTION_TYPE_VIEWS,
+ INTERACTION_TYPE_LISTENS,
+ INTERACTION_TYPE_FOLLOWERS,
+ INTERACTION_TYPE_FANS,
+ INTERACTION_TYPE_LIKES,
+ INTERACTION_TYPE_THUMBS,
+ INTERACTION_TYPE_VIEWERS,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface InteractionType {}
+
+ /**
+ * The interaction type for "views".
+ *
+ * @see #COLUMN_INTERACTION_TYPE
+ */
+ int INTERACTION_TYPE_VIEWS = 0;
+
+ /**
+ * The interaction type for "listens".
+ *
+ * @see #COLUMN_INTERACTION_TYPE
+ */
+ int INTERACTION_TYPE_LISTENS = 1;
+
+ /**
+ * The interaction type for "followers".
+ *
+ * @see #COLUMN_INTERACTION_TYPE
+ */
+ int INTERACTION_TYPE_FOLLOWERS = 2;
+
+ /**
+ * The interaction type for "fans".
+ *
+ * @see #COLUMN_INTERACTION_TYPE
+ */
+ int INTERACTION_TYPE_FANS = 3;
+
+ /**
+ * The interaction type for "likes".
+ *
+ * @see #COLUMN_INTERACTION_TYPE
+ */
+ int INTERACTION_TYPE_LIKES = 4;
+
+ /**
+ * The interaction type for "thumbs".
+ *
+ * @see #COLUMN_INTERACTION_TYPE
+ */
+ int INTERACTION_TYPE_THUMBS = 5;
+
+ /**
+ * The interaction type for "viewers".
+ *
+ * @see #COLUMN_INTERACTION_TYPE
+ */
+ int INTERACTION_TYPE_VIEWERS = 6;
+
+ /**
+ * The type of this program content.
+ *
+ * <p>The value should match one of the followings:
+ * {@link #TYPE_MOVIE},
+ * {@link #TYPE_TV_SERIES},
+ * {@link #TYPE_TV_SEASON},
+ * {@link #TYPE_TV_EPISODE},
+ * {@link #TYPE_CLIP},
+ * {@link #TYPE_EVENT},
+ * {@link #TYPE_CHANNEL},
+ * {@link #TYPE_TRACK},
+ * {@link #TYPE_ALBUM},
+ * {@link #TYPE_ARTIST},
+ * {@link #TYPE_PLAYLIST}, and
+ * {@link #TYPE_STATION}.
+ *
+ * <p>This is a required field if the program is from a {@link Channels#TYPE_PREVIEW}
+ * channel.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_TYPE = "type";
+
+ /**
+ * The aspect ratio of the poster art for this TV program.
+ *
+ * <p>The value should match one of the followings:
+ * {@link #ASPECT_RATIO_16_9},
+ * {@link #ASPECT_RATIO_3_2},
+ * {@link #ASPECT_RATIO_4_3},
+ * {@link #ASPECT_RATIO_1_1}, and
+ * {@link #ASPECT_RATIO_2_3}.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_POSTER_ART_ASPECT_RATIO = "poster_art_aspect_ratio";
+
+ /**
+ * The aspect ratio of the thumbnail for this TV program.
+ *
+ * <p>The value should match one of the followings:
+ * {@link #ASPECT_RATIO_16_9},
+ * {@link #ASPECT_RATIO_3_2},
+ * {@link #ASPECT_RATIO_4_3},
+ * {@link #ASPECT_RATIO_1_1}, and
+ * {@link #ASPECT_RATIO_2_3}.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_THUMBNAIL_ASPECT_RATIO = "poster_thumbnail_aspect_ratio";
+
+ /**
+ * The URI for the logo of this TV program.
+ *
+ * <p>This is a small badge shown on top of the poster art or thumbnail representing the
+ * source of the content.
+ *
+ * <p>The data in the column must be a URL, or a URI in one of the following formats:
+ *
+ * <ul>
+ * <li>content ({@link android.content.ContentResolver#SCHEME_CONTENT})</li>
+ * <li>android.resource ({@link android.content.ContentResolver#SCHEME_ANDROID_RESOURCE})
+ * </li>
+ * <li>file ({@link android.content.ContentResolver#SCHEME_FILE})</li>
+ * </ul>
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_LOGO_URI = "logo_uri";
+
+ /**
+ * The availability of this TV program.
+ *
+ * <p>The value should match one of the followings:
+ * {@link #AVAILABILITY_AVAILABLE},
+ * {@link #AVAILABILITY_FREE_WITH_SUBSCRIPTION}, and
+ * {@link #AVAILABILITY_PAID_CONTENT}.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_AVAILABILITY = "availability";
+
+ /**
+ * The starting price of this TV program.
+ *
+ * <p>This indicates the lowest regular acquisition cost of the content. It is only used
+ * if the availability of the program is {@link #AVAILABILITY_PAID_CONTENT}.
+ *
+ * <p>Type: TEXT
+ * @see #COLUMN_OFFER_PRICE
+ */
+ String COLUMN_STARTING_PRICE = "starting_price";
+
+ /**
+ * The offer price of this TV program.
+ *
+ * <p>This is the promotional cost of the content. It is only used if the availability of
+ * the program is {@link #AVAILABILITY_PAID_CONTENT}.
+ *
+ * <p>Type: TEXT
+ * @see #COLUMN_STARTING_PRICE
+ */
+ String COLUMN_OFFER_PRICE = "offer_price";
+
+ /**
+ * The release date of this TV program.
+ *
+ * <p>The value should be in one of the following formats:
+ * "yyyy", "yyyy-MM-dd", and "yyyy-MM-ddTHH:mm:ssZ" (UTC in ISO 8601).
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_RELEASE_DATE = "release_date";
+
+ /**
+ * The count of the items included in this TV program.
+ *
+ * <p>This is only relevant if the program represents a collection of items such as series,
+ * episodes, or music tracks.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_ITEM_COUNT = "item_count";
+
+ /**
+ * The flag indicating whether this TV program is live or not.
+ *
+ * <p>A value of 1 indicates that the content is airing and should be consumed now, a value
+ * of 0 indicates that the content is off the air and does not need to be consumed at the
+ * present time. If not specified, the value is set to 0 (not live) by default.
+ *
+ * <p>Type: INTEGER (boolean)
+ */
+ String COLUMN_LIVE = "live";
+
+ /**
+ * The internal ID used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_INTERNAL_PROVIDER_ID = "internal_provider_id";
+
+ /**
+ * The URI for the preview video.
+ *
+ * <p>The data in the column must be a URL, or a URI in one of the following formats:
+ *
+ * <ul>
+ * <li>content ({@link android.content.ContentResolver#SCHEME_CONTENT})</li>
+ * <li>android.resource ({@link android.content.ContentResolver#SCHEME_ANDROID_RESOURCE})
+ * </li>
+ * <li>file ({@link android.content.ContentResolver#SCHEME_FILE})</li>
+ * </ul>
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_PREVIEW_VIDEO_URI = "preview_video_uri";
+
+ /**
+ * The last playback position (in milliseconds) of the original content of this preview
+ * program.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_LAST_PLAYBACK_POSITION_MILLIS =
+ "last_playback_position_millis";
+
+ /**
+ * The duration (in milliseconds) of the original content of this preview program.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ */
+ String COLUMN_DURATION_MILLIS = "duration_millis";
+
+ /**
+ * The intent URI which is launched when the preview program is selected.
+ *
+ * <p>The URI is created using {@link Intent#toUri} with {@link Intent#URI_INTENT_SCHEME}
+ * and converted back to the original intent with {@link Intent#parseUri}. The intent is
+ * launched when the user selects the preview program item.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_INTENT_URI = "intent_uri";
+
+ /**
+ * The flag indicating whether this program is transient or not.
+ *
+ * <p>A value of 1 indicates that the channel will be automatically removed by the system on
+ * reboot, and a value of 0 indicates that the channel is persistent across reboot. If not
+ * specified, this value is set to 0 (not transient) by default.
+ *
+ * <p>Type: INTEGER (boolean)
+ * @see Channels#COLUMN_TRANSIENT
+ */
+ String COLUMN_TRANSIENT = "transient";
+
+ /**
+ * The type of interaction for this TV program.
+ *
+ * <p> The value should match one of the followings:
+ * {@link #INTERACTION_TYPE_VIEWS},
+ * {@link #INTERACTION_TYPE_LISTENS},
+ * {@link #INTERACTION_TYPE_FOLLOWERS},
+ * {@link #INTERACTION_TYPE_FANS},
+ * {@link #INTERACTION_TYPE_LIKES},
+ * {@link #INTERACTION_TYPE_THUMBS}, and
+ * {@link #INTERACTION_TYPE_VIEWERS}.
+ *
+ * <p>Type: INTEGER
+ * @see #COLUMN_INTERACTION_COUNT
+ */
+ String COLUMN_INTERACTION_TYPE = "interaction_type";
+
+ /**
+ * The interaction count for this program.
+ *
+ * <p>This indicates the number of times interaction has happened.
+ *
+ * <p>Type: INTEGER (long)
+ * @see #COLUMN_INTERACTION_TYPE
+ */
+ String COLUMN_INTERACTION_COUNT = "interaction_count";
+
+ /**
+ * The author or artist of this content.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_AUTHOR = "author";
+
+ /**
+ * The flag indicating whether this TV program is browsable or not.
+ *
+ * <p>This column can only be set by applications having proper system permission. For
+ * other applications, this is a read-only column.
+ *
+ * <p>A value of 1 indicates that the program is browsable and can be shown to users in
+ * the UI. A value of 0 indicates that the program should be hidden from users and the
+ * application who changes this value to 0 should send
+ * {@link #ACTION_WATCH_NEXT_PROGRAM_BROWSABLE_DISABLED} to the owner of the program
+ * to notify this change.
+ *
+ * <p>This value is set to 1 (browsable) by default.
+ *
+ * <p>Type: INTEGER (boolean)
+ */
+ String COLUMN_BROWSABLE = "browsable";
+
+ /**
+ * The content ID of this TV program.
+ *
+ * <p>A public ID of the content which allows the application to apply the same operation to
+ * all the program copies in different channels.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ String COLUMN_CONTENT_ID = "content_id";
+
+ }
+
+ /** Column definitions for the TV channels table. */
+ public static final class Channels implements BaseTvColumns {
+
+ /**
+ * The content:// style URI for this table.
+ *
+ * <p>SQL selection is not supported for {@link ContentResolver#query},
+ * {@link ContentResolver#update} and {@link ContentResolver#delete} operations.
+ */
+ public static final Uri CONTENT_URI = Uri.parse("content://" + AUTHORITY + "/"
+ + PATH_CHANNEL);
+
+ /** The MIME type of a directory of TV channels. */
+ public static final String CONTENT_TYPE = "vnd.android.cursor.dir/channel";
+
+ /** The MIME type of a single TV channel. */
+ public static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/channel";
+
+ /** @hide */
+ @StringDef({
+ TYPE_OTHER,
+ TYPE_NTSC,
+ TYPE_PAL,
+ TYPE_SECAM,
+ TYPE_DVB_T,
+ TYPE_DVB_T2,
+ TYPE_DVB_S,
+ TYPE_DVB_S2,
+ TYPE_DVB_C,
+ TYPE_DVB_C2,
+ TYPE_DVB_H,
+ TYPE_DVB_SH,
+ TYPE_ATSC_T,
+ TYPE_ATSC_C,
+ TYPE_ATSC_M_H,
+ TYPE_ISDB_T,
+ TYPE_ISDB_TB,
+ TYPE_ISDB_S,
+ TYPE_ISDB_C,
+ TYPE_1SEG,
+ TYPE_DTMB,
+ TYPE_CMMB,
+ TYPE_T_DMB,
+ TYPE_S_DMB,
+ TYPE_PREVIEW,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Type {}
+
+ /**
+ * A generic channel type.
+ *
+ * Use this if the current channel is streaming-based or its broadcast system type does not
+ * fit under any other types. This is the default channel type.
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_OTHER = "TYPE_OTHER";
+
+ /**
+ * The channel type for NTSC.
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_NTSC = "TYPE_NTSC";
+
+ /**
+ * The channel type for PAL.
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_PAL = "TYPE_PAL";
+
+ /**
+ * The channel type for SECAM.
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_SECAM = "TYPE_SECAM";
+
+ /**
+ * The channel type for DVB-T (terrestrial).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DVB_T = "TYPE_DVB_T";
+
+ /**
+ * The channel type for DVB-T2 (terrestrial).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DVB_T2 = "TYPE_DVB_T2";
+
+ /**
+ * The channel type for DVB-S (satellite).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DVB_S = "TYPE_DVB_S";
+
+ /**
+ * The channel type for DVB-S2 (satellite).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DVB_S2 = "TYPE_DVB_S2";
+
+ /**
+ * The channel type for DVB-C (cable).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DVB_C = "TYPE_DVB_C";
+
+ /**
+ * The channel type for DVB-C2 (cable).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DVB_C2 = "TYPE_DVB_C2";
+
+ /**
+ * The channel type for DVB-H (handheld).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DVB_H = "TYPE_DVB_H";
+
+ /**
+ * The channel type for DVB-SH (satellite).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DVB_SH = "TYPE_DVB_SH";
+
+ /**
+ * The channel type for ATSC (terrestrial).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_ATSC_T = "TYPE_ATSC_T";
+
+ /**
+ * The channel type for ATSC (cable).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_ATSC_C = "TYPE_ATSC_C";
+
+ /**
+ * The channel type for ATSC-M/H (mobile/handheld).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_ATSC_M_H = "TYPE_ATSC_M_H";
+
+ /**
+ * The channel type for ISDB-T (terrestrial).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_ISDB_T = "TYPE_ISDB_T";
+
+ /**
+ * The channel type for ISDB-Tb (Brazil).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_ISDB_TB = "TYPE_ISDB_TB";
+
+ /**
+ * The channel type for ISDB-S (satellite).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_ISDB_S = "TYPE_ISDB_S";
+
+ /**
+ * The channel type for ISDB-C (cable).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_ISDB_C = "TYPE_ISDB_C";
+
+ /**
+ * The channel type for 1seg (handheld).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_1SEG = "TYPE_1SEG";
+
+ /**
+ * The channel type for DTMB (terrestrial).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_DTMB = "TYPE_DTMB";
+
+ /**
+ * The channel type for CMMB (handheld).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_CMMB = "TYPE_CMMB";
+
+ /**
+ * The channel type for T-DMB (terrestrial).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_T_DMB = "TYPE_T_DMB";
+
+ /**
+ * The channel type for S-DMB (satellite).
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_S_DMB = "TYPE_S_DMB";
+
+ /**
+ * The channel type for preview videos.
+ *
+ * <P>Unlike other broadcast TV channel types, the programs in the preview channel usually
+ * are promotional videos. The UI may treat the preview channels differently from the other
+ * broadcast channels.
+ *
+ * @see #COLUMN_TYPE
+ */
+ public static final String TYPE_PREVIEW = "TYPE_PREVIEW";
+
+ /** @hide */
+ @StringDef({
+ SERVICE_TYPE_OTHER,
+ SERVICE_TYPE_AUDIO_VIDEO,
+ SERVICE_TYPE_AUDIO,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface ServiceType {}
+
+ /** A generic service type. */
+ public static final String SERVICE_TYPE_OTHER = "SERVICE_TYPE_OTHER";
+
+ /** The service type for regular TV channels that have both audio and video. */
+ public static final String SERVICE_TYPE_AUDIO_VIDEO = "SERVICE_TYPE_AUDIO_VIDEO";
+
+ /** The service type for radio channels that have audio only. */
+ public static final String SERVICE_TYPE_AUDIO = "SERVICE_TYPE_AUDIO";
+
+ /** @hide */
+ @StringDef({
+ VIDEO_FORMAT_240P,
+ VIDEO_FORMAT_360P,
+ VIDEO_FORMAT_480I,
+ VIDEO_FORMAT_576I,
+ VIDEO_FORMAT_576P,
+ VIDEO_FORMAT_720P,
+ VIDEO_FORMAT_1080I,
+ VIDEO_FORMAT_1080P,
+ VIDEO_FORMAT_2160P,
+ VIDEO_FORMAT_4320P,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface VideoFormat {}
+
+ /** The video format for 240p. */
+ public static final String VIDEO_FORMAT_240P = "VIDEO_FORMAT_240P";
+
+ /** The video format for 360p. */
+ public static final String VIDEO_FORMAT_360P = "VIDEO_FORMAT_360P";
+
+ /** The video format for 480i. */
+ public static final String VIDEO_FORMAT_480I = "VIDEO_FORMAT_480I";
+
+ /** The video format for 480p. */
+ public static final String VIDEO_FORMAT_480P = "VIDEO_FORMAT_480P";
+
+ /** The video format for 576i. */
+ public static final String VIDEO_FORMAT_576I = "VIDEO_FORMAT_576I";
+
+ /** The video format for 576p. */
+ public static final String VIDEO_FORMAT_576P = "VIDEO_FORMAT_576P";
+
+ /** The video format for 720p. */
+ public static final String VIDEO_FORMAT_720P = "VIDEO_FORMAT_720P";
+
+ /** The video format for 1080i. */
+ public static final String VIDEO_FORMAT_1080I = "VIDEO_FORMAT_1080I";
+
+ /** The video format for 1080p. */
+ public static final String VIDEO_FORMAT_1080P = "VIDEO_FORMAT_1080P";
+
+ /** The video format for 2160p. */
+ public static final String VIDEO_FORMAT_2160P = "VIDEO_FORMAT_2160P";
+
+ /** The video format for 4320p. */
+ public static final String VIDEO_FORMAT_4320P = "VIDEO_FORMAT_4320P";
+
+ /** @hide */
+ @StringDef({
+ VIDEO_RESOLUTION_SD,
+ VIDEO_RESOLUTION_ED,
+ VIDEO_RESOLUTION_HD,
+ VIDEO_RESOLUTION_FHD,
+ VIDEO_RESOLUTION_UHD,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface VideoResolution {}
+
+ /** The video resolution for standard-definition. */
+ public static final String VIDEO_RESOLUTION_SD = "VIDEO_RESOLUTION_SD";
+
+ /** The video resolution for enhanced-definition. */
+ public static final String VIDEO_RESOLUTION_ED = "VIDEO_RESOLUTION_ED";
+
+ /** The video resolution for high-definition. */
+ public static final String VIDEO_RESOLUTION_HD = "VIDEO_RESOLUTION_HD";
+
+ /** The video resolution for full high-definition. */
+ public static final String VIDEO_RESOLUTION_FHD = "VIDEO_RESOLUTION_FHD";
+
+ /** The video resolution for ultra high-definition. */
+ public static final String VIDEO_RESOLUTION_UHD = "VIDEO_RESOLUTION_UHD";
+
+ private static final Map<String, String> VIDEO_FORMAT_TO_RESOLUTION_MAP = new HashMap<>();
+
+ static {
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_480I, VIDEO_RESOLUTION_SD);
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_480P, VIDEO_RESOLUTION_ED);
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_576I, VIDEO_RESOLUTION_SD);
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_576P, VIDEO_RESOLUTION_ED);
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_720P, VIDEO_RESOLUTION_HD);
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_1080I, VIDEO_RESOLUTION_HD);
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_1080P, VIDEO_RESOLUTION_FHD);
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_2160P, VIDEO_RESOLUTION_UHD);
+ VIDEO_FORMAT_TO_RESOLUTION_MAP.put(VIDEO_FORMAT_4320P, VIDEO_RESOLUTION_UHD);
+ }
+
+ /**
+ * Returns the video resolution (definition) for a given video format.
+ *
+ * @param videoFormat The video format defined in {@link Channels}.
+ * @return the corresponding video resolution string. {@code null} if the resolution string
+ * is not defined for the given video format.
+ * @see #COLUMN_VIDEO_FORMAT
+ */
+ @Nullable
+ public static final String getVideoResolution(@VideoFormat String videoFormat) {
+ return VIDEO_FORMAT_TO_RESOLUTION_MAP.get(videoFormat);
+ }
+
+ /**
+ * The ID of the TV input service that provides this TV channel.
+ *
+ * <p>Use {@link #buildInputId} to build the ID.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_INPUT_ID = "input_id";
+
+ /**
+ * The broadcast system type of this TV channel.
+ *
+ * <p>This is used to indicate the broadcast standard (e.g. ATSC, DVB or ISDB) the current
+ * channel conforms to. Use {@link #TYPE_OTHER} for streaming-based channels, which is the
+ * default channel type. The value should match one of the followings:
+ * {@link #TYPE_1SEG},
+ * {@link #TYPE_ATSC_C},
+ * {@link #TYPE_ATSC_M_H},
+ * {@link #TYPE_ATSC_T},
+ * {@link #TYPE_CMMB},
+ * {@link #TYPE_DTMB},
+ * {@link #TYPE_DVB_C},
+ * {@link #TYPE_DVB_C2},
+ * {@link #TYPE_DVB_H},
+ * {@link #TYPE_DVB_S},
+ * {@link #TYPE_DVB_S2},
+ * {@link #TYPE_DVB_SH},
+ * {@link #TYPE_DVB_T},
+ * {@link #TYPE_DVB_T2},
+ * {@link #TYPE_ISDB_C},
+ * {@link #TYPE_ISDB_S},
+ * {@link #TYPE_ISDB_T},
+ * {@link #TYPE_ISDB_TB},
+ * {@link #TYPE_NTSC},
+ * {@link #TYPE_OTHER},
+ * {@link #TYPE_PAL},
+ * {@link #TYPE_SECAM},
+ * {@link #TYPE_S_DMB},
+ * {@link #TYPE_T_DMB}, and
+ * {@link #TYPE_PREVIEW}.
+ *
+ * <p>This value cannot be changed once it's set. Trying to modify it will make the update
+ * fail.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_TYPE = "type";
+
+ /**
+ * The predefined service type of this TV channel.
+ *
+ * <p>This is primarily used to indicate whether the current channel is a regular TV channel
+ * or a radio-like channel. Use the same coding for {@code service_type} in the underlying
+ * broadcast standard if it is defined there (e.g. ATSC A/53, ETSI EN 300 468 and ARIB
+ * STD-B10). Otherwise use one of the followings: {@link #SERVICE_TYPE_OTHER},
+ * {@link #SERVICE_TYPE_AUDIO_VIDEO}, {@link #SERVICE_TYPE_AUDIO}
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_SERVICE_TYPE = "service_type";
+
+ /**
+ * The original network ID of this TV channel.
+ *
+ * <p>It is used to identify the originating delivery system, if applicable. Use the same
+ * coding for {@code original_network_id} for ETSI EN 300 468/TR 101 211 and ARIB STD-B10.
+ *
+ * <p>This is a required field only if the underlying broadcast standard defines the same
+ * name field. Otherwise, leave empty.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_ORIGINAL_NETWORK_ID = "original_network_id";
+
+ /**
+ * The transport stream ID of this channel.
+ *
+ * <p>It is used to identify the Transport Stream that contains the current channel from any
+ * other multiplex within a network, if applicable. Use the same coding for
+ * {@code transport_stream_id} defined in ISO/IEC 13818-1 if the channel is transmitted via
+ * the MPEG Transport Stream.
+ *
+ * <p>This is a required field only if the current channel is transmitted via the MPEG
+ * Transport Stream. Leave empty otherwise.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_TRANSPORT_STREAM_ID = "transport_stream_id";
+
+ /**
+ * The service ID of this channel.
+ *
+ * <p>It is used to identify the current service, or channel from any other services within
+ * a given Transport Stream, if applicable. Use the same coding for {@code service_id} in
+ * ETSI EN 300 468 and ARIB STD-B10 or {@code program_number} in ISO/IEC 13818-1.
+ *
+ * <p>This is a required field only if the underlying broadcast standard defines the same
+ * name field, or the current channel is transmitted via the MPEG Transport Stream. Leave
+ * empty otherwise.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_SERVICE_ID = "service_id";
+
+ /**
+ * The channel number that is displayed to the user.
+ *
+ * <p>The format can vary depending on broadcast standard and product specification.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_DISPLAY_NUMBER = "display_number";
+
+ /**
+ * The channel name that is displayed to the user.
+ *
+ * <p>A call sign is a good candidate to use for this purpose but any name that helps the
+ * user recognize the current channel will be enough. Can also be empty depending on
+ * broadcast standard.
+ *
+ * <p> Type: TEXT
+ */
+ public static final String COLUMN_DISPLAY_NAME = "display_name";
+
+ /**
+ * The network affiliation for this TV channel.
+ *
+ * <p>This is used to identify a channel that is commonly called by its network affiliation
+ * instead of the display name. Examples include ABC for the channel KGO-HD, FOX for the
+ * channel KTVU-HD and NBC for the channel KNTV-HD. Can be empty if not applicable.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_NETWORK_AFFILIATION = "network_affiliation";
+
+ /**
+ * The description of this TV channel.
+ *
+ * <p>Can be empty initially.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_DESCRIPTION = "description";
+
+ /**
+ * The typical video format for programs from this TV channel.
+ *
+ * <p>This is primarily used to filter out channels based on video format by applications.
+ * The value should match one of the followings: {@link #VIDEO_FORMAT_240P},
+ * {@link #VIDEO_FORMAT_360P}, {@link #VIDEO_FORMAT_480I}, {@link #VIDEO_FORMAT_480P},
+ * {@link #VIDEO_FORMAT_576I}, {@link #VIDEO_FORMAT_576P}, {@link #VIDEO_FORMAT_720P},
+ * {@link #VIDEO_FORMAT_1080I}, {@link #VIDEO_FORMAT_1080P}, {@link #VIDEO_FORMAT_2160P},
+ * {@link #VIDEO_FORMAT_4320P}. Note that the actual video resolution of each program from a
+ * given channel can vary thus one should use {@link Programs#COLUMN_VIDEO_WIDTH} and
+ * {@link Programs#COLUMN_VIDEO_HEIGHT} to get more accurate video resolution.
+ *
+ * <p>Type: TEXT
+ *
+ * @see #getVideoResolution
+ */
+ public static final String COLUMN_VIDEO_FORMAT = "video_format";
+
+ /**
+ * The flag indicating whether this TV channel is browsable or not.
+ *
+ * <p>This column can only be set by applications having proper system permission. For
+ * other applications, this is a read-only column.
+ *
+ * <p>A value of 1 indicates the channel is included in the channel list that applications
+ * use to browse channels, a value of 0 indicates the channel is not included in the list.
+ * If not specified, this value is set to 0 (not browsable) by default.
+ *
+ * <p>Type: INTEGER (boolean)
+ */
+ public static final String COLUMN_BROWSABLE = "browsable";
+
+ /**
+ * The flag indicating whether this TV channel is searchable or not.
+ *
+ * <p>The columns of searchable channels can be read by other applications that have proper
+ * permission. Care must be taken not to open sensitive data.
+ *
+ * <p>A value of 1 indicates that the channel is searchable and its columns can be read by
+ * other applications, a value of 0 indicates that the channel is hidden and its columns can
+ * be read only by the package that owns the channel and the system. If not specified, this
+ * value is set to 1 (searchable) by default.
+ *
+ * <p>Type: INTEGER (boolean)
+ */
+ public static final String COLUMN_SEARCHABLE = "searchable";
+
+ /**
+ * The flag indicating whether this TV channel is locked or not.
+ *
+ * <p>This is primarily used for alternative parental control to prevent unauthorized users
+ * from watching the current channel regardless of the content rating. A value of 1
+ * indicates the channel is locked and the user is required to enter passcode to unlock it
+ * in order to watch the current program from the channel, a value of 0 indicates the
+ * channel is not locked thus the user is not prompted to enter passcode If not specified,
+ * this value is set to 0 (not locked) by default.
+ *
+ * <p>This column can only be set by applications having proper system permission to
+ * modify parental control settings. For other applications, this is a read-only column.
+
+ * <p>Type: INTEGER (boolean)
+ */
+ public static final String COLUMN_LOCKED = "locked";
+
+ /**
+ * The URI for the app badge icon of the app link template for this channel.
+ *
+ * <p>This small icon is overlaid at the bottom of the poster art specified by
+ * {@link #COLUMN_APP_LINK_POSTER_ART_URI}. The data in the column must be a URI in one of
+ * the following formats:
+ *
+ * <ul>
+ * <li>content ({@link android.content.ContentResolver#SCHEME_CONTENT})</li>
+ * <li>android.resource ({@link android.content.ContentResolver#SCHEME_ANDROID_RESOURCE})
+ * </li>
+ * <li>file ({@link android.content.ContentResolver#SCHEME_FILE})</li>
+ * </ul>
+ *
+ * <p>The app-linking allows channel input sources to provide activity links from their live
+ * channel programming to another activity. This enables content providers to increase user
+ * engagement by offering the viewer other content or actions.
+ *
+ * <p>Type: TEXT
+ * @see #COLUMN_APP_LINK_COLOR
+ * @see #COLUMN_APP_LINK_INTENT_URI
+ * @see #COLUMN_APP_LINK_POSTER_ART_URI
+ * @see #COLUMN_APP_LINK_TEXT
+ */
+ public static final String COLUMN_APP_LINK_ICON_URI = "app_link_icon_uri";
+
+ /**
+ * The URI for the poster art used as the background of the app link template for this
+ * channel.
+ *
+ * <p>The data in the column must be a URL, or a URI in one of the following formats:
+ *
+ * <ul>
+ * <li>content ({@link android.content.ContentResolver#SCHEME_CONTENT})</li>
+ * <li>android.resource ({@link android.content.ContentResolver#SCHEME_ANDROID_RESOURCE})
+ * </li>
+ * <li>file ({@link android.content.ContentResolver#SCHEME_FILE})</li>
+ * </ul>
+ *
+ * <p>The app-linking allows channel input sources to provide activity links from their live
+ * channel programming to another activity. This enables content providers to increase user
+ * engagement by offering the viewer other content or actions.
+ *
+ * <p>Type: TEXT
+ * @see #COLUMN_APP_LINK_COLOR
+ * @see #COLUMN_APP_LINK_ICON_URI
+ * @see #COLUMN_APP_LINK_INTENT_URI
+ * @see #COLUMN_APP_LINK_TEXT
+ */
+ public static final String COLUMN_APP_LINK_POSTER_ART_URI = "app_link_poster_art_uri";
+
+ /**
+ * The link text of the app link template for this channel.
+ *
+ * <p>This provides a short description of the action that happens when the corresponding
+ * app link is clicked.
+ *
+ * <p>The app-linking allows channel input sources to provide activity links from their live
+ * channel programming to another activity. This enables content providers to increase user
+ * engagement by offering the viewer other content or actions.
+ *
+ * <p>Type: TEXT
+ * @see #COLUMN_APP_LINK_COLOR
+ * @see #COLUMN_APP_LINK_ICON_URI
+ * @see #COLUMN_APP_LINK_INTENT_URI
+ * @see #COLUMN_APP_LINK_POSTER_ART_URI
+ */
+ public static final String COLUMN_APP_LINK_TEXT = "app_link_text";
+
+ /**
+ * The accent color of the app link template for this channel. This is primarily used for
+ * the background color of the text box in the template.
+ *
+ * <p>The app-linking allows channel input sources to provide activity links from their live
+ * channel programming to another activity. This enables content providers to increase user
+ * engagement by offering the viewer other content or actions.
+ *
+ * <p>Type: INTEGER (color value)
+ * @see #COLUMN_APP_LINK_ICON_URI
+ * @see #COLUMN_APP_LINK_INTENT_URI
+ * @see #COLUMN_APP_LINK_POSTER_ART_URI
+ * @see #COLUMN_APP_LINK_TEXT
+ */
+ public static final String COLUMN_APP_LINK_COLOR = "app_link_color";
+
+ /**
+ * The intent URI of the app link for this channel.
+ *
+ * <p>The URI is created using {@link Intent#toUri} with {@link Intent#URI_INTENT_SCHEME}
+ * and converted back to the original intent with {@link Intent#parseUri}. The intent is
+ * launched when the user clicks the corresponding app link for the current channel.
+ *
+ * <p>The app-linking allows channel input sources to provide activity links from their live
+ * channel programming to another activity. This enables content providers to increase user
+ * engagement by offering the viewer other content or actions.
+ *
+ * <p>Type: TEXT
+ * @see #COLUMN_APP_LINK_COLOR
+ * @see #COLUMN_APP_LINK_ICON_URI
+ * @see #COLUMN_APP_LINK_POSTER_ART_URI
+ * @see #COLUMN_APP_LINK_TEXT
+ */
+ public static final String COLUMN_APP_LINK_INTENT_URI = "app_link_intent_uri";
+
+ /**
+ * The internal ID used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_ID = "internal_provider_id";
+
+ /**
+ * Internal data used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: BLOB
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_DATA = "internal_provider_data";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG1 = "internal_provider_flag1";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG2 = "internal_provider_flag2";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG3 = "internal_provider_flag3";
+
+ /**
+ * Internal integer flag used by individual TV input services.
+ *
+ * <p>This is internal to the provider that inserted it, and should not be decoded by other
+ * apps.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_INTERNAL_PROVIDER_FLAG4 = "internal_provider_flag4";
+
+ /**
+ * The version number of this row entry used by TV input services.
+ *
+ * <p>This is best used by sync adapters to identify the rows to update. The number can be
+ * defined by individual TV input services. One may assign the same value as
+ * {@code version_number} that appears in ETSI EN 300 468 or ATSC A/65, if the data are
+ * coming from a TV broadcast.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_VERSION_NUMBER = "version_number";
+
+ /**
+ * The flag indicating whether this TV channel is transient or not.
+ *
+ * <p>A value of 1 indicates that the channel will be automatically removed by the system on
+ * reboot, and a value of 0 indicates that the channel is persistent across reboot. If not
+ * specified, this value is set to 0 (not transient) by default.
+ *
+ * <p>Type: INTEGER (boolean)
+ * @see PreviewPrograms#COLUMN_TRANSIENT
+ * @see WatchNextPrograms#COLUMN_TRANSIENT
+ */
+ public static final String COLUMN_TRANSIENT = "transient";
+
+ private Channels() {}
+
+ /**
+ * A sub-directory of a single TV channel that represents its primary logo.
+ *
+ * <p>To access this directory, append {@link Channels.Logo#CONTENT_DIRECTORY} to the raw
+ * channel URI. The resulting URI represents an image file, and should be interacted
+ * using ContentResolver.openAssetFileDescriptor.
+ *
+ * <p>Note that this sub-directory also supports opening the logo as an asset file in write
+ * mode. Callers can create or replace the primary logo associated with this channel by
+ * opening the asset file and writing the full-size photo contents into it. (Make sure there
+ * is no padding around the logo image.) When the file is closed, the image will be parsed,
+ * sized down if necessary, and stored.
+ *
+ * <p>Usage example:
+ * <pre>
+ * public void writeChannelLogo(long channelId, byte[] logo) {
+ * Uri channelLogoUri = TvContract.buildChannelLogoUri(channelId);
+ * try {
+ * AssetFileDescriptor fd =
+ * getContentResolver().openAssetFileDescriptor(channelLogoUri, "rw");
+ * OutputStream os = fd.createOutputStream();
+ * os.write(logo);
+ * os.close();
+ * fd.close();
+ * } catch (IOException e) {
+ * // Handle error cases.
+ * }
+ * }
+ * </pre>
+ */
+ public static final class Logo {
+
+ /**
+ * The directory twig for this sub-table.
+ */
+ public static final String CONTENT_DIRECTORY = "logo";
+
+ private Logo() {}
+ }
+ }
+
+ /**
+ * Column definitions for the TV programs table.
+ *
+ * <p>By default, the query results will be sorted by
+ * {@link Programs#COLUMN_START_TIME_UTC_MILLIS} in ascending order.
+ */
+ public static final class Programs implements BaseTvColumns, ProgramColumns {
+
+ /**
+ * The content:// style URI for this table.
+ *
+ * <p>SQL selection is not supported for {@link ContentResolver#query},
+ * {@link ContentResolver#update} and {@link ContentResolver#delete} operations.
+ */
+ public static final Uri CONTENT_URI = Uri.parse("content://" + AUTHORITY + "/"
+ + PATH_PROGRAM);
+
+ /** The MIME type of a directory of TV programs. */
+ public static final String CONTENT_TYPE = "vnd.android.cursor.dir/program";
+
+ /** The MIME type of a single TV program. */
+ public static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/program";
+
+ /**
+ * The ID of the TV channel that provides this TV program.
+ *
+ * <p>This is a part of the channel URI and matches to {@link BaseColumns#_ID}.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_CHANNEL_ID = "channel_id";
+
+ /**
+ * The season number of this TV program for episodic TV shows.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ *
+ * @deprecated Use {@link #COLUMN_SEASON_DISPLAY_NUMBER} instead.
+ */
+ @Deprecated
+ public static final String COLUMN_SEASON_NUMBER = "season_number";
+
+ /**
+ * The episode number of this TV program for episodic TV shows.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ *
+ * @deprecated Use {@link #COLUMN_EPISODE_DISPLAY_NUMBER} instead.
+ */
+ @Deprecated
+ public static final String COLUMN_EPISODE_NUMBER = "episode_number";
+
+ /**
+ * The start time of this TV program, in milliseconds since the epoch.
+ *
+ * <p>The value should be equal to or larger than {@link #COLUMN_END_TIME_UTC_MILLIS} of the
+ * previous program in the same channel. In practice, start time will usually be the end
+ * time of the previous program.
+ *
+ * <p>Can be empty if this program belongs to a {@link Channels#TYPE_PREVIEW} channel.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_START_TIME_UTC_MILLIS = "start_time_utc_millis";
+
+ /**
+ * The end time of this TV program, in milliseconds since the epoch.
+ *
+ * <p>The value should be equal to or less than {@link #COLUMN_START_TIME_UTC_MILLIS} of the
+ * next program in the same channel. In practice, end time will usually be the start time of
+ * the next program.
+ *
+ * <p>Can be empty if this program belongs to a {@link Channels#TYPE_PREVIEW} channel.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_END_TIME_UTC_MILLIS = "end_time_utc_millis";
+
+ /**
+ * The comma-separated genre string of this TV program.
+ *
+ * <p>Use the same language appeared in the underlying broadcast standard, if applicable.
+ * (For example, one can refer to the genre strings used in Genre Descriptor of ATSC A/65 or
+ * Content Descriptor of ETSI EN 300 468, if appropriate.) Otherwise, leave empty. Use
+ * {@link Genres#encode} to create a text that can be stored in this column. Use
+ * {@link Genres#decode} to get the broadcast genre strings from the text stored in the
+ * column.
+ *
+ * <p>Type: TEXT
+ * @see Genres#encode
+ * @see Genres#decode
+ */
+ public static final String COLUMN_BROADCAST_GENRE = "broadcast_genre";
+
+ /**
+ * The flag indicating whether recording of this program is prohibited.
+ *
+ * <p>A value of 1 indicates that recording of this program is prohibited and application
+ * will not schedule any recording for this program. A value of 0 indicates that the
+ * recording is not prohibited. If not specified, this value is set to 0 (not prohibited) by
+ * default.
+ *
+ * <p>Type: INTEGER (boolean)
+ */
+ public static final String COLUMN_RECORDING_PROHIBITED = "recording_prohibited";
+
+ private Programs() {}
+
+ /** Canonical genres for TV programs. */
+ public static final class Genres {
+ /** @hide */
+ @StringDef({
+ FAMILY_KIDS,
+ SPORTS,
+ SHOPPING,
+ MOVIES,
+ COMEDY,
+ TRAVEL,
+ DRAMA,
+ EDUCATION,
+ ANIMAL_WILDLIFE,
+ NEWS,
+ GAMING,
+ ARTS,
+ ENTERTAINMENT,
+ LIFE_STYLE,
+ MUSIC,
+ PREMIER,
+ TECH_SCIENCE,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface Genre {}
+
+ /** The genre for Family/Kids. */
+ public static final String FAMILY_KIDS = "FAMILY_KIDS";
+
+ /** The genre for Sports. */
+ public static final String SPORTS = "SPORTS";
+
+ /** The genre for Shopping. */
+ public static final String SHOPPING = "SHOPPING";
+
+ /** The genre for Movies. */
+ public static final String MOVIES = "MOVIES";
+
+ /** The genre for Comedy. */
+ public static final String COMEDY = "COMEDY";
+
+ /** The genre for Travel. */
+ public static final String TRAVEL = "TRAVEL";
+
+ /** The genre for Drama. */
+ public static final String DRAMA = "DRAMA";
+
+ /** The genre for Education. */
+ public static final String EDUCATION = "EDUCATION";
+
+ /** The genre for Animal/Wildlife. */
+ public static final String ANIMAL_WILDLIFE = "ANIMAL_WILDLIFE";
+
+ /** The genre for News. */
+ public static final String NEWS = "NEWS";
+
+ /** The genre for Gaming. */
+ public static final String GAMING = "GAMING";
+
+ /** The genre for Arts. */
+ public static final String ARTS = "ARTS";
+
+ /** The genre for Entertainment. */
+ public static final String ENTERTAINMENT = "ENTERTAINMENT";
+
+ /** The genre for Life Style. */
+ public static final String LIFE_STYLE = "LIFE_STYLE";
+
+ /** The genre for Music. */
+ public static final String MUSIC = "MUSIC";
+
+ /** The genre for Premier. */
+ public static final String PREMIER = "PREMIER";
+
+ /** The genre for Tech/Science. */
+ public static final String TECH_SCIENCE = "TECH_SCIENCE";
+
+ private static final ArraySet<String> CANONICAL_GENRES = new ArraySet<>();
+ static {
+ CANONICAL_GENRES.add(FAMILY_KIDS);
+ CANONICAL_GENRES.add(SPORTS);
+ CANONICAL_GENRES.add(SHOPPING);
+ CANONICAL_GENRES.add(MOVIES);
+ CANONICAL_GENRES.add(COMEDY);
+ CANONICAL_GENRES.add(TRAVEL);
+ CANONICAL_GENRES.add(DRAMA);
+ CANONICAL_GENRES.add(EDUCATION);
+ CANONICAL_GENRES.add(ANIMAL_WILDLIFE);
+ CANONICAL_GENRES.add(NEWS);
+ CANONICAL_GENRES.add(GAMING);
+ CANONICAL_GENRES.add(ARTS);
+ CANONICAL_GENRES.add(ENTERTAINMENT);
+ CANONICAL_GENRES.add(LIFE_STYLE);
+ CANONICAL_GENRES.add(MUSIC);
+ CANONICAL_GENRES.add(PREMIER);
+ CANONICAL_GENRES.add(TECH_SCIENCE);
+ }
+
+ private static final char DOUBLE_QUOTE = '"';
+ private static final char COMMA = ',';
+ private static final String DELIMITER = ",";
+
+ private static final String[] EMPTY_STRING_ARRAY = new String[0];
+
+ private Genres() {}
+
+ /**
+ * Encodes genre strings to a text that can be put into the database.
+ *
+ * @param genres Genre strings.
+ * @return an encoded genre string that can be inserted into the
+ * {@link #COLUMN_BROADCAST_GENRE} or {@link #COLUMN_CANONICAL_GENRE} column.
+ */
+ public static String encode(@NonNull @Genre String... genres) {
+ if (genres == null) {
+ // MNC and before will throw a NPE.
+ return null;
+ }
+ StringBuilder sb = new StringBuilder();
+ String separator = "";
+ for (String genre : genres) {
+ sb.append(separator).append(encodeToCsv(genre));
+ separator = DELIMITER;
+ }
+ return sb.toString();
+ }
+
+ private static String encodeToCsv(String genre) {
+ StringBuilder sb = new StringBuilder();
+ int length = genre.length();
+ for (int i = 0; i < length; ++i) {
+ char c = genre.charAt(i);
+ switch (c) {
+ case DOUBLE_QUOTE:
+ sb.append(DOUBLE_QUOTE);
+ break;
+ case COMMA:
+ sb.append(DOUBLE_QUOTE);
+ break;
+ }
+ sb.append(c);
+ }
+ return sb.toString();
+ }
+
+ /**
+ * Decodes the genre strings from the text stored in the database.
+ *
+ * @param genres The encoded genre string retrieved from the
+ * {@link #COLUMN_BROADCAST_GENRE} or {@link #COLUMN_CANONICAL_GENRE} column.
+ * @return genre strings.
+ */
+ public static @Genre String[] decode(@NonNull String genres) {
+ if (TextUtils.isEmpty(genres)) {
+ // MNC and before will throw a NPE for {@code null} genres.
+ return EMPTY_STRING_ARRAY;
+ }
+ if (genres.indexOf(COMMA) == -1 && genres.indexOf(DOUBLE_QUOTE) == -1) {
+ return new String[] {genres.trim()};
+ }
+ StringBuilder sb = new StringBuilder();
+ List<String> results = new ArrayList<>();
+ int length = genres.length();
+ boolean escape = false;
+ for (int i = 0; i < length; ++i) {
+ char c = genres.charAt(i);
+ switch (c) {
+ case DOUBLE_QUOTE:
+ if (!escape) {
+ escape = true;
+ continue;
+ }
+ break;
+ case COMMA:
+ if (!escape) {
+ String string = sb.toString().trim();
+ if (string.length() > 0) {
+ results.add(string);
+ }
+ sb = new StringBuilder();
+ continue;
+ }
+ break;
+ }
+ sb.append(c);
+ escape = false;
+ }
+ String string = sb.toString().trim();
+ if (string.length() > 0) {
+ results.add(string);
+ }
+ return results.toArray(new String[results.size()]);
+ }
+
+ /**
+ * Returns whether a given text is a canonical genre defined in {@link Genres}.
+ *
+ * @param genre The name of genre to be checked.
+ * @return {@code true} if the genre is canonical, otherwise {@code false}.
+ */
+ public static boolean isCanonical(String genre) {
+ return CANONICAL_GENRES.contains(genre);
+ }
+ }
+ }
+
+ /**
+ * Column definitions for the recorded TV programs table.
+ *
+ * <p>By default, the query results will be sorted by {@link #COLUMN_START_TIME_UTC_MILLIS} in
+ * ascending order.
+ */
+ public static final class RecordedPrograms implements BaseTvColumns, ProgramColumns {
+
+ /**
+ * The content:// style URI for this table.
+ *
+ * <p>SQL selection is not supported for {@link ContentResolver#query},
+ * {@link ContentResolver#update} and {@link ContentResolver#delete} operations.
+ */
+ public static final Uri CONTENT_URI = Uri.parse("content://" + AUTHORITY + "/"
+ + PATH_RECORDED_PROGRAM);
+
+ /** The MIME type of a directory of recorded TV programs. */
+ public static final String CONTENT_TYPE = "vnd.android.cursor.dir/recorded_program";
+
+ /** The MIME type of a single recorded TV program. */
+ public static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/recorded_program";
+
+ /**
+ * The ID of the TV channel that provides this recorded program.
+ *
+ * <p>This is a part of the channel URI and matches to {@link BaseColumns#_ID}.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_CHANNEL_ID = "channel_id";
+
+ /**
+ * The ID of the TV input service that is associated with this recorded program.
+ *
+ * <p>Use {@link #buildInputId} to build the ID.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_INPUT_ID = "input_id";
+
+ /**
+ * The start time of the original TV program, in milliseconds since the epoch.
+ *
+ * <p>Type: INTEGER (long)
+ * @see Programs#COLUMN_START_TIME_UTC_MILLIS
+ */
+ public static final String COLUMN_START_TIME_UTC_MILLIS =
+ Programs.COLUMN_START_TIME_UTC_MILLIS;
+
+ /**
+ * The end time of the original TV program, in milliseconds since the epoch.
+ *
+ * <p>Type: INTEGER (long)
+ * @see Programs#COLUMN_END_TIME_UTC_MILLIS
+ */
+ public static final String COLUMN_END_TIME_UTC_MILLIS = Programs.COLUMN_END_TIME_UTC_MILLIS;
+
+ /**
+ * The comma-separated genre string of this recorded TV program.
+ *
+ * <p>Use the same language appeared in the underlying broadcast standard, if applicable.
+ * (For example, one can refer to the genre strings used in Genre Descriptor of ATSC A/65 or
+ * Content Descriptor of ETSI EN 300 468, if appropriate.) Otherwise, leave empty. Use
+ * {@link Genres#encode Genres.encode()} to create a text that can be stored in this column.
+ * Use {@link Genres#decode Genres.decode()} to get the broadcast genre strings from the
+ * text stored in the column.
+ *
+ * <p>Type: TEXT
+ * @see Programs#COLUMN_BROADCAST_GENRE
+ */
+ public static final String COLUMN_BROADCAST_GENRE = Programs.COLUMN_BROADCAST_GENRE;
+
+ /**
+ * The URI of the recording data for this recorded program.
+ *
+ * <p>Together with {@link #COLUMN_RECORDING_DATA_BYTES}, applications can use this
+ * information to manage recording storage. The URI should indicate a file or directory with
+ * the scheme {@link android.content.ContentResolver#SCHEME_FILE}.
+ *
+ * <p>Type: TEXT
+ * @see #COLUMN_RECORDING_DATA_BYTES
+ */
+ public static final String COLUMN_RECORDING_DATA_URI = "recording_data_uri";
+
+ /**
+ * The data size (in bytes) for this recorded program.
+ *
+ * <p>Together with {@link #COLUMN_RECORDING_DATA_URI}, applications can use this
+ * information to manage recording storage.
+ *
+ * <p>Type: INTEGER (long)
+ * @see #COLUMN_RECORDING_DATA_URI
+ */
+ public static final String COLUMN_RECORDING_DATA_BYTES = "recording_data_bytes";
+
+ /**
+ * The duration (in milliseconds) of this recorded program.
+ *
+ * <p>The actual duration of the recorded program can differ from the one calculated by
+ * {@link #COLUMN_END_TIME_UTC_MILLIS} - {@link #COLUMN_START_TIME_UTC_MILLIS} as program
+ * recording can be interrupted in the middle for some reason, resulting in a partially
+ * recorded program, which is still playable.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_RECORDING_DURATION_MILLIS = "recording_duration_millis";
+
+ /**
+ * The expiration time for this recorded program, in milliseconds since the epoch.
+ *
+ * <p>Recorded TV programs do not expire by default unless explicitly requested by the user
+ * or the user allows applications to delete them in order to free up disk space for future
+ * recording. However, some TV content can have expiration date set by the content provider
+ * when recorded. This field is used to indicate such a restriction.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_RECORDING_EXPIRE_TIME_UTC_MILLIS =
+ "recording_expire_time_utc_millis";
+
+ private RecordedPrograms() {}
+ }
+
+ /**
+ * Column definitions for the preview TV programs table.
+ */
+ public static final class PreviewPrograms implements BaseTvColumns, ProgramColumns,
+ PreviewProgramColumns {
+
+ /**
+ * The content:// style URI for this table.
+ *
+ * <p>SQL selection is not supported for {@link ContentResolver#query},
+ * {@link ContentResolver#update} and {@link ContentResolver#delete} operations.
+ */
+ public static final Uri CONTENT_URI = Uri.parse("content://" + AUTHORITY + "/"
+ + PATH_PREVIEW_PROGRAM);
+
+ /** The MIME type of a directory of preview TV programs. */
+ public static final String CONTENT_TYPE = "vnd.android.cursor.dir/preview_program";
+
+ /** The MIME type of a single preview TV program. */
+ public static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/preview_program";
+
+ /**
+ * The ID of the TV channel that provides this TV program.
+ *
+ * <p>This value cannot be changed once it's set. Trying to modify it will make the update
+ * fail.
+ *
+ * <p>This is a part of the channel URI and matches to {@link BaseColumns#_ID}.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_CHANNEL_ID = "channel_id";
+
+ /**
+ * The weight of the preview program within the channel.
+ *
+ * <p>The UI may choose to show this item in a different position in the channel row.
+ * A larger weight value means the program is more important than other programs having
+ * smaller weight values. The value is relevant for the preview programs in the same
+ * channel. This is only relevant to {@link Channels#TYPE_PREVIEW}.
+ *
+ * <p>Can be empty.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_WEIGHT = "weight";
+
+ private PreviewPrograms() {}
+ }
+
+ /**
+ * Column definitions for the "watch next" TV programs table.
+ */
+ public static final class WatchNextPrograms implements BaseTvColumns, ProgramColumns,
+ PreviewProgramColumns {
+
+ /**
+ * The content:// style URI for this table.
+ *
+ * <p>SQL selection is not supported for {@link ContentResolver#query},
+ * {@link ContentResolver#update} and {@link ContentResolver#delete} operations.
+ */
+ public static final Uri CONTENT_URI = Uri.parse("content://" + AUTHORITY + "/"
+ + PATH_WATCH_NEXT_PROGRAM);
+
+ /** The MIME type of a directory of "watch next" TV programs. */
+ public static final String CONTENT_TYPE = "vnd.android.cursor.dir/watch_next_program";
+
+ /** The MIME type of a single preview TV program. */
+ public static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/watch_next_program";
+
+ /** @hide */
+ @IntDef({
+ WATCH_NEXT_TYPE_CONTINUE,
+ WATCH_NEXT_TYPE_NEXT,
+ WATCH_NEXT_TYPE_NEW,
+ WATCH_NEXT_TYPE_WATCHLIST,
+ })
+ @Retention(RetentionPolicy.SOURCE)
+ public @interface WatchNextType {}
+
+ /**
+ * The watch next type for CONTINUE. Use this type when the user has already watched more
+ * than 1 minute of this content.
+ *
+ * @see #COLUMN_WATCH_NEXT_TYPE
+ */
+ public static final int WATCH_NEXT_TYPE_CONTINUE = 0;
+
+ /**
+ * The watch next type for NEXT. Use this type when the user has watched one or more
+ * complete episodes from some episodic content, but there remains more than one episode
+ * remaining or there is one last episode remaining, but it is not “new” in that it was
+ * released before the user started watching the show.
+ *
+ * @see #COLUMN_WATCH_NEXT_TYPE
+ */
+ public static final int WATCH_NEXT_TYPE_NEXT = 1;
+
+ /**
+ * The watch next type for NEW. Use this type when the user had watched all of the available
+ * episodes from some episodic content, but a new episode became available since the user
+ * started watching the first episode and now there is exactly one unwatched episode. This
+ * could also work for recorded events in a series e.g. soccer matches or football games.
+ *
+ * @see #COLUMN_WATCH_NEXT_TYPE
+ */
+ public static final int WATCH_NEXT_TYPE_NEW = 2;
+
+ /**
+ * The watch next type for WATCHLIST. Use this type when the user has elected to explicitly
+ * add a movie, event or series to a “watchlist” as a manual way of curating what they
+ * want to watch next.
+ *
+ * @see #COLUMN_WATCH_NEXT_TYPE
+ */
+ public static final int WATCH_NEXT_TYPE_WATCHLIST = 3;
+
+ /**
+ * The "watch next" type of this program content.
+ *
+ * <p>The value should match one of the followings:
+ * {@link #WATCH_NEXT_TYPE_CONTINUE},
+ * {@link #WATCH_NEXT_TYPE_NEXT},
+ * {@link #WATCH_NEXT_TYPE_NEW}, and
+ * {@link #WATCH_NEXT_TYPE_WATCHLIST}.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: INTEGER
+ */
+ public static final String COLUMN_WATCH_NEXT_TYPE = "watch_next_type";
+
+ /**
+ * The last UTC time that the user engaged in this TV program, in milliseconds since the
+ * epoch. This is a hint for the application that is used for ordering of "watch next"
+ * programs.
+ *
+ * <p>The meaning of the value varies depending on the {@link #COLUMN_WATCH_NEXT_TYPE}:
+ * <ul>
+ * <li>{@link #WATCH_NEXT_TYPE_CONTINUE}: the date that the user was last watching the
+ * content.</li>
+ * <li>{@link #WATCH_NEXT_TYPE_NEXT}: the date of the last episode watched.</li>
+ * <li>{@link #WATCH_NEXT_TYPE_NEW}: the release date of the new episode.</li>
+ * <li>{@link #WATCH_NEXT_TYPE_WATCHLIST}: the date the item was added to the Watchlist.
+ * </li>
+ * </ul>
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_LAST_ENGAGEMENT_TIME_UTC_MILLIS =
+ "last_engagement_time_utc_millis";
+
+ private WatchNextPrograms() {}
+ }
+
+ /**
+ * Column definitions for the TV programs that the user watched. Applications do not have access
+ * to this table.
+ *
+ * <p>By default, the query results will be sorted by
+ * {@link WatchedPrograms#COLUMN_WATCH_START_TIME_UTC_MILLIS} in descending order.
+ * @hide
+ */
+ @SystemApi
+ public static final class WatchedPrograms implements BaseTvColumns {
+
+ /** The content:// style URI for this table. */
+ public static final Uri CONTENT_URI =
+ Uri.parse("content://" + AUTHORITY + "/watched_program");
+
+ /** The MIME type of a directory of watched programs. */
+ public static final String CONTENT_TYPE = "vnd.android.cursor.dir/watched_program";
+
+ /** The MIME type of a single item in this table. */
+ public static final String CONTENT_ITEM_TYPE = "vnd.android.cursor.item/watched_program";
+
+ /**
+ * The UTC time that the user started watching this TV program, in milliseconds since the
+ * epoch.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_WATCH_START_TIME_UTC_MILLIS =
+ "watch_start_time_utc_millis";
+
+ /**
+ * The UTC time that the user stopped watching this TV program, in milliseconds since the
+ * epoch.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_WATCH_END_TIME_UTC_MILLIS = "watch_end_time_utc_millis";
+
+ /**
+ * The ID of the TV channel that provides this TV program.
+ *
+ * <p>This is a required field.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_CHANNEL_ID = "channel_id";
+
+ /**
+ * The title of this TV program.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_TITLE = "title";
+
+ /**
+ * The start time of this TV program, in milliseconds since the epoch.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_START_TIME_UTC_MILLIS = "start_time_utc_millis";
+
+ /**
+ * The end time of this TV program, in milliseconds since the epoch.
+ *
+ * <p>Type: INTEGER (long)
+ */
+ public static final String COLUMN_END_TIME_UTC_MILLIS = "end_time_utc_millis";
+
+ /**
+ * The description of this TV program.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_DESCRIPTION = "description";
+
+ /**
+ * Extra parameters given to {@link TvInputService.Session#tune(Uri, android.os.Bundle)
+ * TvInputService.Session.tune(Uri, android.os.Bundle)} when tuning to the channel that
+ * provides this TV program. (Used internally.)
+ *
+ * <p>This column contains an encoded string that represents comma-separated key-value pairs of
+ * the tune parameters. (Ex. "[key1]=[value1], [key2]=[value2]"). '%' is used as an escape
+ * character for '%', '=', and ','.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_INTERNAL_TUNE_PARAMS = "tune_params";
+
+ /**
+ * The session token of this TV program. (Used internally.)
+ *
+ * <p>This contains a String representation of {@link IBinder} for
+ * {@link TvInputService.Session} that provides the current TV program. It is used
+ * internally to distinguish watched programs entries from different TV input sessions.
+ *
+ * <p>Type: TEXT
+ */
+ public static final String COLUMN_INTERNAL_SESSION_TOKEN = "session_token";
+
+ private WatchedPrograms() {}
+ }
+}
diff --git a/android/media/tv/TvInputHardwareInfo.java b/android/media/tv/TvInputHardwareInfo.java
new file mode 100644
index 00000000..762f0c07
--- /dev/null
+++ b/android/media/tv/TvInputHardwareInfo.java
@@ -0,0 +1,255 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import static java.lang.annotation.RetentionPolicy.SOURCE;
+
+import android.annotation.IntDef;
+import android.annotation.SystemApi;
+import android.hardware.tv.input.V1_0.Constants;
+import android.media.AudioManager;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+import java.lang.annotation.Retention;
+
+/**
+ * Simple container for information about TV input hardware.
+ * Not for third-party developers.
+ *
+ * @hide
+ */
+@SystemApi
+public final class TvInputHardwareInfo implements Parcelable {
+ static final String TAG = "TvInputHardwareInfo";
+
+ // Match hardware/libhardware/include/hardware/tv_input.h
+ public static final int TV_INPUT_TYPE_OTHER_HARDWARE = Constants.TV_INPUT_TYPE_OTHER;
+ public static final int TV_INPUT_TYPE_TUNER = Constants.TV_INPUT_TYPE_TUNER;
+ public static final int TV_INPUT_TYPE_COMPOSITE = Constants.TV_INPUT_TYPE_COMPOSITE;
+ public static final int TV_INPUT_TYPE_SVIDEO = Constants.TV_INPUT_TYPE_SVIDEO;
+ public static final int TV_INPUT_TYPE_SCART = Constants.TV_INPUT_TYPE_SCART;
+ public static final int TV_INPUT_TYPE_COMPONENT = Constants.TV_INPUT_TYPE_COMPONENT;
+ public static final int TV_INPUT_TYPE_VGA = Constants.TV_INPUT_TYPE_VGA;
+ public static final int TV_INPUT_TYPE_DVI = Constants.TV_INPUT_TYPE_DVI;
+ public static final int TV_INPUT_TYPE_HDMI = Constants.TV_INPUT_TYPE_HDMI;
+ public static final int TV_INPUT_TYPE_DISPLAY_PORT = Constants.TV_INPUT_TYPE_DISPLAY_PORT;
+
+ /** @hide */
+ @Retention(SOURCE)
+ @IntDef({CABLE_CONNECTION_STATUS_UNKNOWN, CABLE_CONNECTION_STATUS_CONNECTED,
+ CABLE_CONNECTION_STATUS_DISCONNECTED})
+ public @interface CableConnectionStatus {}
+
+ // Match hardware/interfaces/tv/input/1.0/types.hal
+ /**
+ * The hardware is unsure about the connection status or does not support cable detection.
+ */
+ public static final int CABLE_CONNECTION_STATUS_UNKNOWN =
+ Constants.CABLE_CONNECTION_STATUS_UNKNOWN;
+
+ /**
+ * Cable is connected to the hardware.
+ */
+ public static final int CABLE_CONNECTION_STATUS_CONNECTED =
+ Constants.CABLE_CONNECTION_STATUS_CONNECTED;
+
+ /**
+ * Cable is disconnected to the hardware.
+ */
+ public static final int CABLE_CONNECTION_STATUS_DISCONNECTED =
+ Constants.CABLE_CONNECTION_STATUS_DISCONNECTED;
+
+ public static final Parcelable.Creator<TvInputHardwareInfo> CREATOR =
+ new Parcelable.Creator<TvInputHardwareInfo>() {
+ @Override
+ public TvInputHardwareInfo createFromParcel(Parcel source) {
+ try {
+ TvInputHardwareInfo info = new TvInputHardwareInfo();
+ info.readFromParcel(source);
+ return info;
+ } catch (Exception e) {
+ Log.e(TAG, "Exception creating TvInputHardwareInfo from parcel", e);
+ return null;
+ }
+ }
+
+ @Override
+ public TvInputHardwareInfo[] newArray(int size) {
+ return new TvInputHardwareInfo[size];
+ }
+ };
+
+ private int mDeviceId;
+ private int mType;
+ private int mAudioType;
+ private String mAudioAddress;
+ private int mHdmiPortId;
+ @CableConnectionStatus
+ private int mCableConnectionStatus;
+
+ private TvInputHardwareInfo() {
+ }
+
+ public int getDeviceId() {
+ return mDeviceId;
+ }
+
+ public int getType() {
+ return mType;
+ }
+
+ public int getAudioType() {
+ return mAudioType;
+ }
+
+ public String getAudioAddress() {
+ return mAudioAddress;
+ }
+
+ public int getHdmiPortId() {
+ if (mType != TV_INPUT_TYPE_HDMI) {
+ throw new IllegalStateException();
+ }
+ return mHdmiPortId;
+ }
+
+ /**
+ * Gets the cable connection status of the hardware.
+ *
+ * @return {@code CABLE_CONNECTION_STATUS_CONNECTED} if cable is connected.
+ * {@code CABLE_CONNECTION_STATUS_DISCONNECTED} if cable is disconnected.
+ * {@code CABLE_CONNECTION_STATUS_UNKNOWN} if the hardware is unsure about the
+ * connection status or does not support cable detection.
+ */
+ @CableConnectionStatus
+ public int getCableConnectionStatus() {
+ return mCableConnectionStatus;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder b = new StringBuilder(128);
+ b.append("TvInputHardwareInfo {id=").append(mDeviceId);
+ b.append(", type=").append(mType);
+ b.append(", audio_type=").append(mAudioType);
+ b.append(", audio_addr=").append(mAudioAddress);
+ if (mType == TV_INPUT_TYPE_HDMI) {
+ b.append(", hdmi_port=").append(mHdmiPortId);
+ }
+ b.append(", cable_connection_status=").append(mCableConnectionStatus);
+ b.append("}");
+ return b.toString();
+ }
+
+ // Parcelable
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mDeviceId);
+ dest.writeInt(mType);
+ dest.writeInt(mAudioType);
+ dest.writeString(mAudioAddress);
+ if (mType == TV_INPUT_TYPE_HDMI) {
+ dest.writeInt(mHdmiPortId);
+ }
+ dest.writeInt(mCableConnectionStatus);
+ }
+
+ public void readFromParcel(Parcel source) {
+ mDeviceId = source.readInt();
+ mType = source.readInt();
+ mAudioType = source.readInt();
+ mAudioAddress = source.readString();
+ if (mType == TV_INPUT_TYPE_HDMI) {
+ mHdmiPortId = source.readInt();
+ }
+ mCableConnectionStatus = source.readInt();
+ }
+
+ public static final class Builder {
+ private Integer mDeviceId = null;
+ private Integer mType = null;
+ private int mAudioType = AudioManager.DEVICE_NONE;
+ private String mAudioAddress = "";
+ private Integer mHdmiPortId = null;
+ private Integer mCableConnectionStatus = CABLE_CONNECTION_STATUS_UNKNOWN;
+
+ public Builder() {
+ }
+
+ public Builder deviceId(int deviceId) {
+ mDeviceId = deviceId;
+ return this;
+ }
+
+ public Builder type(int type) {
+ mType = type;
+ return this;
+ }
+
+ public Builder audioType(int audioType) {
+ mAudioType = audioType;
+ return this;
+ }
+
+ public Builder audioAddress(String audioAddress) {
+ mAudioAddress = audioAddress;
+ return this;
+ }
+
+ public Builder hdmiPortId(int hdmiPortId) {
+ mHdmiPortId = hdmiPortId;
+ return this;
+ }
+
+ /**
+ * Sets cable connection status.
+ */
+ public Builder cableConnectionStatus(@CableConnectionStatus int cableConnectionStatus) {
+ mCableConnectionStatus = cableConnectionStatus;
+ return this;
+ }
+
+ public TvInputHardwareInfo build() {
+ if (mDeviceId == null || mType == null) {
+ throw new UnsupportedOperationException();
+ }
+ if ((mType == TV_INPUT_TYPE_HDMI && mHdmiPortId == null) ||
+ (mType != TV_INPUT_TYPE_HDMI && mHdmiPortId != null)) {
+ throw new UnsupportedOperationException();
+ }
+
+ TvInputHardwareInfo info = new TvInputHardwareInfo();
+ info.mDeviceId = mDeviceId;
+ info.mType = mType;
+ info.mAudioType = mAudioType;
+ if (info.mAudioType != AudioManager.DEVICE_NONE) {
+ info.mAudioAddress = mAudioAddress;
+ }
+ if (mHdmiPortId != null) {
+ info.mHdmiPortId = mHdmiPortId;
+ }
+ info.mCableConnectionStatus = mCableConnectionStatus;
+ return info;
+ }
+ }
+}
diff --git a/android/media/tv/TvInputInfo.java b/android/media/tv/TvInputInfo.java
new file mode 100644
index 00000000..74085d39
--- /dev/null
+++ b/android/media/tv/TvInputInfo.java
@@ -0,0 +1,1115 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.StringRes;
+import android.annotation.SystemApi;
+import android.content.ComponentName;
+import android.content.Context;
+import android.content.Intent;
+import android.content.pm.PackageManager;
+import android.content.pm.PackageManager.NameNotFoundException;
+import android.content.pm.ResolveInfo;
+import android.content.pm.ServiceInfo;
+import android.content.res.Resources;
+import android.content.res.TypedArray;
+import android.content.res.XmlResourceParser;
+import android.graphics.drawable.Drawable;
+import android.graphics.drawable.Icon;
+import android.hardware.hdmi.HdmiDeviceInfo;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.os.UserHandle;
+import android.provider.Settings;
+import android.text.TextUtils;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.util.SparseIntArray;
+import android.util.Xml;
+
+import org.xmlpull.v1.XmlPullParser;
+import org.xmlpull.v1.XmlPullParserException;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+
+/**
+ * This class is used to specify meta information of a TV input.
+ */
+public final class TvInputInfo implements Parcelable {
+ private static final boolean DEBUG = false;
+ private static final String TAG = "TvInputInfo";
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({TYPE_TUNER, TYPE_OTHER, TYPE_COMPOSITE, TYPE_SVIDEO, TYPE_SCART, TYPE_COMPONENT,
+ TYPE_VGA, TYPE_DVI, TYPE_HDMI, TYPE_DISPLAY_PORT})
+ public @interface Type {}
+
+ // Should be in sync with frameworks/base/core/res/res/values/attrs.xml
+ /**
+ * TV input type: the TV input service is a tuner which provides channels.
+ */
+ public static final int TYPE_TUNER = 0;
+ /**
+ * TV input type: a generic hardware TV input type.
+ */
+ public static final int TYPE_OTHER = 1000;
+ /**
+ * TV input type: the TV input service represents a composite port.
+ */
+ public static final int TYPE_COMPOSITE = 1001;
+ /**
+ * TV input type: the TV input service represents a SVIDEO port.
+ */
+ public static final int TYPE_SVIDEO = 1002;
+ /**
+ * TV input type: the TV input service represents a SCART port.
+ */
+ public static final int TYPE_SCART = 1003;
+ /**
+ * TV input type: the TV input service represents a component port.
+ */
+ public static final int TYPE_COMPONENT = 1004;
+ /**
+ * TV input type: the TV input service represents a VGA port.
+ */
+ public static final int TYPE_VGA = 1005;
+ /**
+ * TV input type: the TV input service represents a DVI port.
+ */
+ public static final int TYPE_DVI = 1006;
+ /**
+ * TV input type: the TV input service is HDMI. (e.g. HDMI 1)
+ */
+ public static final int TYPE_HDMI = 1007;
+ /**
+ * TV input type: the TV input service represents a display port.
+ */
+ public static final int TYPE_DISPLAY_PORT = 1008;
+
+ /**
+ * Used as a String extra field in setup intents created by {@link #createSetupIntent()} to
+ * supply the ID of a specific TV input to set up.
+ */
+ public static final String EXTRA_INPUT_ID = "android.media.tv.extra.INPUT_ID";
+
+ private final ResolveInfo mService;
+
+ private final String mId;
+ private final int mType;
+ private final boolean mIsHardwareInput;
+
+ // TODO: Remove mIconUri when createTvInputInfo() is removed.
+ private Uri mIconUri;
+
+ private final CharSequence mLabel;
+ private final int mLabelResId;
+ private final Icon mIcon;
+ private final Icon mIconStandby;
+ private final Icon mIconDisconnected;
+
+ // Attributes from XML meta data.
+ private final String mSetupActivity;
+ private final boolean mCanRecord;
+ private final int mTunerCount;
+
+ // Attributes specific to HDMI
+ private final HdmiDeviceInfo mHdmiDeviceInfo;
+ private final boolean mIsConnectedToHdmiSwitch;
+ private final String mParentId;
+
+ private final Bundle mExtras;
+
+ /**
+ * Create a new instance of the TvInputInfo class, instantiating it from the given Context,
+ * ResolveInfo, and HdmiDeviceInfo.
+ *
+ * @param service The ResolveInfo returned from the package manager about this TV input service.
+ * @param hdmiDeviceInfo The HdmiDeviceInfo for a HDMI CEC logical device.
+ * @param parentId The ID of this TV input's parent input. {@code null} if none exists.
+ * @param label The label of this TvInputInfo. If it is {@code null} or empty, {@code service}
+ * label will be loaded.
+ * @param iconUri The {@link android.net.Uri} to load the icon image. See
+ * {@link android.content.ContentResolver#openInputStream}. If it is {@code null},
+ * the application icon of {@code service} will be loaded.
+ * @hide
+ * @deprecated Use {@link Builder} instead.
+ */
+ @Deprecated
+ @SystemApi
+ public static TvInputInfo createTvInputInfo(Context context, ResolveInfo service,
+ HdmiDeviceInfo hdmiDeviceInfo, String parentId, String label, Uri iconUri)
+ throws XmlPullParserException, IOException {
+ TvInputInfo info = new TvInputInfo.Builder(context, service)
+ .setHdmiDeviceInfo(hdmiDeviceInfo)
+ .setParentId(parentId)
+ .setLabel(label)
+ .build();
+ info.mIconUri = iconUri;
+ return info;
+ }
+
+ /**
+ * Create a new instance of the TvInputInfo class, instantiating it from the given Context,
+ * ResolveInfo, and HdmiDeviceInfo.
+ *
+ * @param service The ResolveInfo returned from the package manager about this TV input service.
+ * @param hdmiDeviceInfo The HdmiDeviceInfo for a HDMI CEC logical device.
+ * @param parentId The ID of this TV input's parent input. {@code null} if none exists.
+ * @param labelRes The label resource ID of this TvInputInfo. If it is {@code 0},
+ * {@code service} label will be loaded.
+ * @param icon The {@link android.graphics.drawable.Icon} to load the icon image. If it is
+ * {@code null}, the application icon of {@code service} will be loaded.
+ * @hide
+ * @deprecated Use {@link Builder} instead.
+ */
+ @Deprecated
+ @SystemApi
+ public static TvInputInfo createTvInputInfo(Context context, ResolveInfo service,
+ HdmiDeviceInfo hdmiDeviceInfo, String parentId, int labelRes, Icon icon)
+ throws XmlPullParserException, IOException {
+ return new TvInputInfo.Builder(context, service)
+ .setHdmiDeviceInfo(hdmiDeviceInfo)
+ .setParentId(parentId)
+ .setLabel(labelRes)
+ .setIcon(icon)
+ .build();
+ }
+
+ /**
+ * Create a new instance of the TvInputInfo class, instantiating it from the given Context,
+ * ResolveInfo, and TvInputHardwareInfo.
+ *
+ * @param service The ResolveInfo returned from the package manager about this TV input service.
+ * @param hardwareInfo The TvInputHardwareInfo for a TV input hardware device.
+ * @param label The label of this TvInputInfo. If it is {@code null} or empty, {@code service}
+ * label will be loaded.
+ * @param iconUri The {@link android.net.Uri} to load the icon image. See
+ * {@link android.content.ContentResolver#openInputStream}. If it is {@code null},
+ * the application icon of {@code service} will be loaded.
+ * @hide
+ * @deprecated Use {@link Builder} instead.
+ */
+ @Deprecated
+ @SystemApi
+ public static TvInputInfo createTvInputInfo(Context context, ResolveInfo service,
+ TvInputHardwareInfo hardwareInfo, String label, Uri iconUri)
+ throws XmlPullParserException, IOException {
+ TvInputInfo info = new TvInputInfo.Builder(context, service)
+ .setTvInputHardwareInfo(hardwareInfo)
+ .setLabel(label)
+ .build();
+ info.mIconUri = iconUri;
+ return info;
+ }
+
+ /**
+ * Create a new instance of the TvInputInfo class, instantiating it from the given Context,
+ * ResolveInfo, and TvInputHardwareInfo.
+ *
+ * @param service The ResolveInfo returned from the package manager about this TV input service.
+ * @param hardwareInfo The TvInputHardwareInfo for a TV input hardware device.
+ * @param labelRes The label resource ID of this TvInputInfo. If it is {@code 0},
+ * {@code service} label will be loaded.
+ * @param icon The {@link android.graphics.drawable.Icon} to load the icon image. If it is
+ * {@code null}, the application icon of {@code service} will be loaded.
+ * @hide
+ * @deprecated Use {@link Builder} instead.
+ */
+ @Deprecated
+ @SystemApi
+ public static TvInputInfo createTvInputInfo(Context context, ResolveInfo service,
+ TvInputHardwareInfo hardwareInfo, int labelRes, Icon icon)
+ throws XmlPullParserException, IOException {
+ return new TvInputInfo.Builder(context, service)
+ .setTvInputHardwareInfo(hardwareInfo)
+ .setLabel(labelRes)
+ .setIcon(icon)
+ .build();
+ }
+
+ private TvInputInfo(ResolveInfo service, String id, int type, boolean isHardwareInput,
+ CharSequence label, int labelResId, Icon icon, Icon iconStandby, Icon iconDisconnected,
+ String setupActivity, boolean canRecord, int tunerCount, HdmiDeviceInfo hdmiDeviceInfo,
+ boolean isConnectedToHdmiSwitch, String parentId, Bundle extras) {
+ mService = service;
+ mId = id;
+ mType = type;
+ mIsHardwareInput = isHardwareInput;
+ mLabel = label;
+ mLabelResId = labelResId;
+ mIcon = icon;
+ mIconStandby = iconStandby;
+ mIconDisconnected = iconDisconnected;
+ mSetupActivity = setupActivity;
+ mCanRecord = canRecord;
+ mTunerCount = tunerCount;
+ mHdmiDeviceInfo = hdmiDeviceInfo;
+ mIsConnectedToHdmiSwitch = isConnectedToHdmiSwitch;
+ mParentId = parentId;
+ mExtras = extras;
+ }
+
+ /**
+ * Returns a unique ID for this TV input. The ID is generated from the package and class name
+ * implementing the TV input service.
+ */
+ public String getId() {
+ return mId;
+ }
+
+ /**
+ * Returns the parent input ID.
+ *
+ * <p>A TV input may have a parent input if the TV input is actually a logical representation of
+ * a device behind the hardware port represented by the parent input.
+ * For example, a HDMI CEC logical device, connected to a HDMI port, appears as another TV
+ * input. In this case, the parent input of this logical device is the HDMI port.
+ *
+ * <p>Applications may group inputs by parent input ID to provide an easier access to inputs
+ * sharing the same physical port. In the example of HDMI CEC, logical HDMI CEC devices behind
+ * the same HDMI port have the same parent ID, which is the ID representing the port. Thus
+ * applications can group the hardware HDMI port and the logical HDMI CEC devices behind it
+ * together using this method.
+ *
+ * @return the ID of the parent input, if exists. Returns {@code null} if the parent input is
+ * not specified.
+ */
+ public String getParentId() {
+ return mParentId;
+ }
+
+ /**
+ * Returns the information of the service that implements this TV input.
+ */
+ public ServiceInfo getServiceInfo() {
+ return mService.serviceInfo;
+ }
+
+ /**
+ * Returns the component of the service that implements this TV input.
+ * @hide
+ */
+ public ComponentName getComponent() {
+ return new ComponentName(mService.serviceInfo.packageName, mService.serviceInfo.name);
+ }
+
+ /**
+ * Returns an intent to start the setup activity for this TV input.
+ */
+ public Intent createSetupIntent() {
+ if (!TextUtils.isEmpty(mSetupActivity)) {
+ Intent intent = new Intent(Intent.ACTION_MAIN);
+ intent.setClassName(mService.serviceInfo.packageName, mSetupActivity);
+ intent.putExtra(EXTRA_INPUT_ID, getId());
+ return intent;
+ }
+ return null;
+ }
+
+ /**
+ * Returns an intent to start the settings activity for this TV input.
+ *
+ * @deprecated Use {@link #createSetupIntent()} instead. Settings activity is deprecated.
+ * Use setup activity instead to provide settings.
+ */
+ @Deprecated
+ public Intent createSettingsIntent() {
+ return null;
+ }
+
+ /**
+ * Returns the type of this TV input.
+ */
+ @Type
+ public int getType() {
+ return mType;
+ }
+
+ /**
+ * Returns the number of tuners this TV input has.
+ *
+ * <p>This method is valid only for inputs of type {@link #TYPE_TUNER}. For inputs of other
+ * types, it returns 0.
+ *
+ * <p>Tuners correspond to physical/logical resources that allow reception of TV signal. Having
+ * <i>N</i> tuners means that the TV input is capable of receiving <i>N</i> different channels
+ * concurrently.
+ */
+ public int getTunerCount() {
+ return mTunerCount;
+ }
+
+ /**
+ * Returns {@code true} if this TV input can record TV programs, {@code false} otherwise.
+ */
+ public boolean canRecord() {
+ return mCanRecord;
+ }
+
+ /**
+ * Returns domain-specific extras associated with this TV input.
+ */
+ public Bundle getExtras() {
+ return mExtras;
+ }
+
+ /**
+ * Returns the HDMI device information of this TV input.
+ * @hide
+ */
+ @SystemApi
+ public HdmiDeviceInfo getHdmiDeviceInfo() {
+ if (mType == TYPE_HDMI) {
+ return mHdmiDeviceInfo;
+ }
+ return null;
+ }
+
+ /**
+ * Returns {@code true} if this TV input is pass-though which does not have any real channels in
+ * TvProvider. {@code false} otherwise.
+ *
+ * @see TvContract#buildChannelUriForPassthroughInput(String)
+ */
+ public boolean isPassthroughInput() {
+ return mType != TYPE_TUNER;
+ }
+
+ /**
+ * Returns {@code true} if this TV input represents a hardware device. (e.g. built-in tuner,
+ * HDMI1) {@code false} otherwise.
+ * @hide
+ */
+ @SystemApi
+ public boolean isHardwareInput() {
+ return mIsHardwareInput;
+ }
+
+ /**
+ * Returns {@code true}, if a CEC device for this TV input is connected to an HDMI switch, i.e.,
+ * the device isn't directly connected to a HDMI port.
+ * @hide
+ */
+ @SystemApi
+ public boolean isConnectedToHdmiSwitch() {
+ return mIsConnectedToHdmiSwitch;
+ }
+
+ /**
+ * Checks if this TV input is marked hidden by the user in the settings.
+ *
+ * @param context Supplies a {@link Context} used to check if this TV input is hidden.
+ * @return {@code true} if the user marked this TV input hidden in settings. {@code false}
+ * otherwise.
+ */
+ public boolean isHidden(Context context) {
+ return TvInputSettings.isHidden(context, mId, UserHandle.myUserId());
+ }
+
+ /**
+ * Loads the user-displayed label for this TV input.
+ *
+ * @param context Supplies a {@link Context} used to load the label.
+ * @return a CharSequence containing the TV input's label. If the TV input does not have
+ * a label, its name is returned.
+ */
+ public CharSequence loadLabel(@NonNull Context context) {
+ if (mLabelResId != 0) {
+ return context.getPackageManager().getText(mService.serviceInfo.packageName,
+ mLabelResId, null);
+ } else if (!TextUtils.isEmpty(mLabel)) {
+ return mLabel;
+ }
+ return mService.loadLabel(context.getPackageManager());
+ }
+
+ /**
+ * Loads the custom label set by user in settings.
+ *
+ * @param context Supplies a {@link Context} used to load the custom label.
+ * @return a CharSequence containing the TV input's custom label. {@code null} if there is no
+ * custom label.
+ */
+ public CharSequence loadCustomLabel(Context context) {
+ return TvInputSettings.getCustomLabel(context, mId, UserHandle.myUserId());
+ }
+
+ /**
+ * Loads the user-displayed icon for this TV input.
+ *
+ * @param context Supplies a {@link Context} used to load the icon.
+ * @return a Drawable containing the TV input's icon. If the TV input does not have an icon,
+ * application's icon is returned. If it's unavailable too, {@code null} is returned.
+ */
+ public Drawable loadIcon(@NonNull Context context) {
+ if (mIcon != null) {
+ return mIcon.loadDrawable(context);
+ } else if (mIconUri != null) {
+ try (InputStream is = context.getContentResolver().openInputStream(mIconUri)) {
+ Drawable drawable = Drawable.createFromStream(is, null);
+ if (drawable != null) {
+ return drawable;
+ }
+ } catch (IOException e) {
+ Log.w(TAG, "Loading the default icon due to a failure on loading " + mIconUri, e);
+ // Falls back.
+ }
+ }
+ return loadServiceIcon(context);
+ }
+
+ /**
+ * Loads the user-displayed icon for this TV input per input state.
+ *
+ * @param context Supplies a {@link Context} used to load the icon.
+ * @param state The input state. Should be one of the followings.
+ * {@link TvInputManager#INPUT_STATE_CONNECTED},
+ * {@link TvInputManager#INPUT_STATE_CONNECTED_STANDBY} and
+ * {@link TvInputManager#INPUT_STATE_DISCONNECTED}.
+ * @return a Drawable containing the TV input's icon for the given state or {@code null} if such
+ * an icon is not defined.
+ * @hide
+ */
+ @SystemApi
+ public Drawable loadIcon(@NonNull Context context, int state) {
+ if (state == TvInputManager.INPUT_STATE_CONNECTED) {
+ return loadIcon(context);
+ } else if (state == TvInputManager.INPUT_STATE_CONNECTED_STANDBY) {
+ if (mIconStandby != null) {
+ return mIconStandby.loadDrawable(context);
+ }
+ } else if (state == TvInputManager.INPUT_STATE_DISCONNECTED) {
+ if (mIconDisconnected != null) {
+ return mIconDisconnected.loadDrawable(context);
+ }
+ } else {
+ throw new IllegalArgumentException("Unknown state: " + state);
+ }
+ return null;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public int hashCode() {
+ return mId.hashCode();
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (o == this) {
+ return true;
+ }
+
+ if (!(o instanceof TvInputInfo)) {
+ return false;
+ }
+
+ TvInputInfo obj = (TvInputInfo) o;
+ return Objects.equals(mService, obj.mService)
+ && TextUtils.equals(mId, obj.mId)
+ && mType == obj.mType
+ && mIsHardwareInput == obj.mIsHardwareInput
+ && TextUtils.equals(mLabel, obj.mLabel)
+ && Objects.equals(mIconUri, obj.mIconUri)
+ && mLabelResId == obj.mLabelResId
+ && Objects.equals(mIcon, obj.mIcon)
+ && Objects.equals(mIconStandby, obj.mIconStandby)
+ && Objects.equals(mIconDisconnected, obj.mIconDisconnected)
+ && TextUtils.equals(mSetupActivity, obj.mSetupActivity)
+ && mCanRecord == obj.mCanRecord
+ && mTunerCount == obj.mTunerCount
+ && Objects.equals(mHdmiDeviceInfo, obj.mHdmiDeviceInfo)
+ && mIsConnectedToHdmiSwitch == obj.mIsConnectedToHdmiSwitch
+ && TextUtils.equals(mParentId, obj.mParentId)
+ && Objects.equals(mExtras, obj.mExtras);
+ }
+
+ @Override
+ public String toString() {
+ return "TvInputInfo{id=" + mId
+ + ", pkg=" + mService.serviceInfo.packageName
+ + ", service=" + mService.serviceInfo.name + "}";
+ }
+
+ /**
+ * Used to package this object into a {@link Parcel}.
+ *
+ * @param dest The {@link Parcel} to be written.
+ * @param flags The flags used for parceling.
+ */
+ @Override
+ public void writeToParcel(@NonNull Parcel dest, int flags) {
+ mService.writeToParcel(dest, flags);
+ dest.writeString(mId);
+ dest.writeInt(mType);
+ dest.writeByte(mIsHardwareInput ? (byte) 1 : 0);
+ TextUtils.writeToParcel(mLabel, dest, flags);
+ dest.writeParcelable(mIconUri, flags);
+ dest.writeInt(mLabelResId);
+ dest.writeParcelable(mIcon, flags);
+ dest.writeParcelable(mIconStandby, flags);
+ dest.writeParcelable(mIconDisconnected, flags);
+ dest.writeString(mSetupActivity);
+ dest.writeByte(mCanRecord ? (byte) 1 : 0);
+ dest.writeInt(mTunerCount);
+ dest.writeParcelable(mHdmiDeviceInfo, flags);
+ dest.writeByte(mIsConnectedToHdmiSwitch ? (byte) 1 : 0);
+ dest.writeString(mParentId);
+ dest.writeBundle(mExtras);
+ }
+
+ private Drawable loadServiceIcon(Context context) {
+ if (mService.serviceInfo.icon == 0
+ && mService.serviceInfo.applicationInfo.icon == 0) {
+ return null;
+ }
+ return mService.serviceInfo.loadIcon(context.getPackageManager());
+ }
+
+ public static final Parcelable.Creator<TvInputInfo> CREATOR =
+ new Parcelable.Creator<TvInputInfo>() {
+ @Override
+ public TvInputInfo createFromParcel(Parcel in) {
+ return new TvInputInfo(in);
+ }
+
+ @Override
+ public TvInputInfo[] newArray(int size) {
+ return new TvInputInfo[size];
+ }
+ };
+
+ private TvInputInfo(Parcel in) {
+ mService = ResolveInfo.CREATOR.createFromParcel(in);
+ mId = in.readString();
+ mType = in.readInt();
+ mIsHardwareInput = in.readByte() == 1;
+ mLabel = TextUtils.CHAR_SEQUENCE_CREATOR.createFromParcel(in);
+ mIconUri = in.readParcelable(null);
+ mLabelResId = in.readInt();
+ mIcon = in.readParcelable(null);
+ mIconStandby = in.readParcelable(null);
+ mIconDisconnected = in.readParcelable(null);
+ mSetupActivity = in.readString();
+ mCanRecord = in.readByte() == 1;
+ mTunerCount = in.readInt();
+ mHdmiDeviceInfo = in.readParcelable(null);
+ mIsConnectedToHdmiSwitch = in.readByte() == 1;
+ mParentId = in.readString();
+ mExtras = in.readBundle();
+ }
+
+ /**
+ * A convenience builder for creating {@link TvInputInfo} objects.
+ */
+ public static final class Builder {
+ private static final int LENGTH_HDMI_PHYSICAL_ADDRESS = 4;
+ private static final int LENGTH_HDMI_DEVICE_ID = 2;
+
+ private static final String XML_START_TAG_NAME = "tv-input";
+ private static final String DELIMITER_INFO_IN_ID = "/";
+ private static final String PREFIX_HDMI_DEVICE = "HDMI";
+ private static final String PREFIX_HARDWARE_DEVICE = "HW";
+
+ private static final SparseIntArray sHardwareTypeToTvInputType = new SparseIntArray();
+ static {
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_OTHER_HARDWARE,
+ TYPE_OTHER);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_TUNER, TYPE_TUNER);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_COMPOSITE,
+ TYPE_COMPOSITE);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_SVIDEO, TYPE_SVIDEO);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_SCART, TYPE_SCART);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_COMPONENT,
+ TYPE_COMPONENT);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_VGA, TYPE_VGA);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_DVI, TYPE_DVI);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_HDMI, TYPE_HDMI);
+ sHardwareTypeToTvInputType.put(TvInputHardwareInfo.TV_INPUT_TYPE_DISPLAY_PORT,
+ TYPE_DISPLAY_PORT);
+ }
+
+ private final Context mContext;
+ private final ResolveInfo mResolveInfo;
+ private CharSequence mLabel;
+ private int mLabelResId;
+ private Icon mIcon;
+ private Icon mIconStandby;
+ private Icon mIconDisconnected;
+ private String mSetupActivity;
+ private Boolean mCanRecord;
+ private Integer mTunerCount;
+ private TvInputHardwareInfo mTvInputHardwareInfo;
+ private HdmiDeviceInfo mHdmiDeviceInfo;
+ private String mParentId;
+ private Bundle mExtras;
+
+ /**
+ * Constructs a new builder for {@link TvInputInfo}.
+ *
+ * @param context A Context of the application package implementing this class.
+ * @param component The name of the application component to be used for the
+ * {@link TvInputService}.
+ */
+ public Builder(Context context, ComponentName component) {
+ if (context == null) {
+ throw new IllegalArgumentException("context cannot be null.");
+ }
+ Intent intent = new Intent(TvInputService.SERVICE_INTERFACE).setComponent(component);
+ mResolveInfo = context.getPackageManager().resolveService(intent,
+ PackageManager.GET_SERVICES | PackageManager.GET_META_DATA);
+ if (mResolveInfo == null) {
+ throw new IllegalArgumentException("Invalid component. Can't find the service.");
+ }
+ mContext = context;
+ }
+
+ /**
+ * Constructs a new builder for {@link TvInputInfo}.
+ *
+ * @param resolveInfo The ResolveInfo returned from the package manager about this TV input
+ * service.
+ * @hide
+ */
+ public Builder(Context context, ResolveInfo resolveInfo) {
+ if (context == null) {
+ throw new IllegalArgumentException("context cannot be null");
+ }
+ if (resolveInfo == null) {
+ throw new IllegalArgumentException("resolveInfo cannot be null");
+ }
+ mContext = context;
+ mResolveInfo = resolveInfo;
+ }
+
+ /**
+ * Sets the icon.
+ *
+ * @param icon The icon that represents this TV input.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ * @hide
+ */
+ @SystemApi
+ public Builder setIcon(Icon icon) {
+ this.mIcon = icon;
+ return this;
+ }
+
+ /**
+ * Sets the icon for a given input state.
+ *
+ * @param icon The icon that represents this TV input for the given state.
+ * @param state The input state. Should be one of the followings.
+ * {@link TvInputManager#INPUT_STATE_CONNECTED},
+ * {@link TvInputManager#INPUT_STATE_CONNECTED_STANDBY} and
+ * {@link TvInputManager#INPUT_STATE_DISCONNECTED}.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ * @hide
+ */
+ @SystemApi
+ public Builder setIcon(Icon icon, int state) {
+ if (state == TvInputManager.INPUT_STATE_CONNECTED) {
+ this.mIcon = icon;
+ } else if (state == TvInputManager.INPUT_STATE_CONNECTED_STANDBY) {
+ this.mIconStandby = icon;
+ } else if (state == TvInputManager.INPUT_STATE_DISCONNECTED) {
+ this.mIconDisconnected = icon;
+ } else {
+ throw new IllegalArgumentException("Unknown state: " + state);
+ }
+ return this;
+ }
+
+ /**
+ * Sets the label.
+ *
+ * @param label The text to be used as label.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ * @hide
+ */
+ @SystemApi
+ public Builder setLabel(CharSequence label) {
+ if (mLabelResId != 0) {
+ throw new IllegalStateException("Resource ID for label is already set.");
+ }
+ this.mLabel = label;
+ return this;
+ }
+
+ /**
+ * Sets the label.
+ *
+ * @param resId The resource ID of the text to use.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ * @hide
+ */
+ @SystemApi
+ public Builder setLabel(@StringRes int resId) {
+ if (mLabel != null) {
+ throw new IllegalStateException("Label text is already set.");
+ }
+ this.mLabelResId = resId;
+ return this;
+ }
+
+ /**
+ * Sets the HdmiDeviceInfo.
+ *
+ * @param hdmiDeviceInfo The HdmiDeviceInfo for a HDMI CEC logical device.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ * @hide
+ */
+ @SystemApi
+ public Builder setHdmiDeviceInfo(HdmiDeviceInfo hdmiDeviceInfo) {
+ if (mTvInputHardwareInfo != null) {
+ Log.w(TAG, "TvInputHardwareInfo will not be used to build this TvInputInfo");
+ mTvInputHardwareInfo = null;
+ }
+ this.mHdmiDeviceInfo = hdmiDeviceInfo;
+ return this;
+ }
+
+ /**
+ * Sets the parent ID.
+ *
+ * @param parentId The parent ID.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ * @hide
+ */
+ @SystemApi
+ public Builder setParentId(String parentId) {
+ this.mParentId = parentId;
+ return this;
+ }
+
+ /**
+ * Sets the TvInputHardwareInfo.
+ *
+ * @param tvInputHardwareInfo
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ * @hide
+ */
+ @SystemApi
+ public Builder setTvInputHardwareInfo(TvInputHardwareInfo tvInputHardwareInfo) {
+ if (mHdmiDeviceInfo != null) {
+ Log.w(TAG, "mHdmiDeviceInfo will not be used to build this TvInputInfo");
+ mHdmiDeviceInfo = null;
+ }
+ this.mTvInputHardwareInfo = tvInputHardwareInfo;
+ return this;
+ }
+
+ /**
+ * Sets the tuner count. Valid only for {@link #TYPE_TUNER}.
+ *
+ * @param tunerCount The number of tuners this TV input has.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ */
+ public Builder setTunerCount(int tunerCount) {
+ this.mTunerCount = tunerCount;
+ return this;
+ }
+
+ /**
+ * Sets whether this TV input can record TV programs or not.
+ *
+ * @param canRecord Whether this TV input can record TV programs.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ */
+ public Builder setCanRecord(boolean canRecord) {
+ this.mCanRecord = canRecord;
+ return this;
+ }
+
+ /**
+ * Sets domain-specific extras associated with this TV input.
+ *
+ * @param extras Domain-specific extras associated with this TV input. Keys <em>must</em> be
+ * a scoped name, i.e. prefixed with a package name you own, so that different
+ * developers will not create conflicting keys.
+ * @return This Builder object to allow for chaining of calls to builder methods.
+ */
+ public Builder setExtras(Bundle extras) {
+ this.mExtras = extras;
+ return this;
+ }
+
+ /**
+ * Creates a {@link TvInputInfo} instance with the specified fields. Most of the information
+ * is obtained by parsing the AndroidManifest and {@link TvInputService#SERVICE_META_DATA}
+ * for the {@link TvInputService} this TV input implements.
+ *
+ * @return TvInputInfo containing information about this TV input.
+ */
+ public TvInputInfo build() {
+ ComponentName componentName = new ComponentName(mResolveInfo.serviceInfo.packageName,
+ mResolveInfo.serviceInfo.name);
+ String id;
+ int type;
+ boolean isHardwareInput = false;
+ boolean isConnectedToHdmiSwitch = false;
+
+ if (mHdmiDeviceInfo != null) {
+ id = generateInputId(componentName, mHdmiDeviceInfo);
+ type = TYPE_HDMI;
+ isHardwareInput = true;
+ isConnectedToHdmiSwitch = (mHdmiDeviceInfo.getPhysicalAddress() & 0x0FFF) != 0;
+ } else if (mTvInputHardwareInfo != null) {
+ id = generateInputId(componentName, mTvInputHardwareInfo);
+ type = sHardwareTypeToTvInputType.get(mTvInputHardwareInfo.getType(), TYPE_TUNER);
+ isHardwareInput = true;
+ } else {
+ id = generateInputId(componentName);
+ type = TYPE_TUNER;
+ }
+ parseServiceMetadata(type);
+ return new TvInputInfo(mResolveInfo, id, type, isHardwareInput, mLabel, mLabelResId,
+ mIcon, mIconStandby, mIconDisconnected, mSetupActivity,
+ mCanRecord == null ? false : mCanRecord, mTunerCount == null ? 0 : mTunerCount,
+ mHdmiDeviceInfo, isConnectedToHdmiSwitch, mParentId, mExtras);
+ }
+
+ private static String generateInputId(ComponentName name) {
+ return name.flattenToShortString();
+ }
+
+ private static String generateInputId(ComponentName name, HdmiDeviceInfo hdmiDeviceInfo) {
+ // Example of the format : "/HDMI%04X%02X"
+ String format = DELIMITER_INFO_IN_ID + PREFIX_HDMI_DEVICE
+ + "%0" + LENGTH_HDMI_PHYSICAL_ADDRESS + "X"
+ + "%0" + LENGTH_HDMI_DEVICE_ID + "X";
+ return name.flattenToShortString() + String.format(Locale.ENGLISH, format,
+ hdmiDeviceInfo.getPhysicalAddress(), hdmiDeviceInfo.getId());
+ }
+
+ private static String generateInputId(ComponentName name,
+ TvInputHardwareInfo tvInputHardwareInfo) {
+ return name.flattenToShortString() + DELIMITER_INFO_IN_ID + PREFIX_HARDWARE_DEVICE
+ + tvInputHardwareInfo.getDeviceId();
+ }
+
+ private void parseServiceMetadata(int inputType) {
+ ServiceInfo si = mResolveInfo.serviceInfo;
+ PackageManager pm = mContext.getPackageManager();
+ try (XmlResourceParser parser =
+ si.loadXmlMetaData(pm, TvInputService.SERVICE_META_DATA)) {
+ if (parser == null) {
+ throw new IllegalStateException("No " + TvInputService.SERVICE_META_DATA
+ + " meta-data found for " + si.name);
+ }
+
+ Resources res = pm.getResourcesForApplication(si.applicationInfo);
+ AttributeSet attrs = Xml.asAttributeSet(parser);
+
+ int type;
+ while ((type = parser.next()) != XmlPullParser.END_DOCUMENT
+ && type != XmlPullParser.START_TAG) {
+ }
+
+ String nodeName = parser.getName();
+ if (!XML_START_TAG_NAME.equals(nodeName)) {
+ throw new IllegalStateException("Meta-data does not start with "
+ + XML_START_TAG_NAME + " tag for " + si.name);
+ }
+
+ TypedArray sa = res.obtainAttributes(attrs,
+ com.android.internal.R.styleable.TvInputService);
+ mSetupActivity = sa.getString(
+ com.android.internal.R.styleable.TvInputService_setupActivity);
+ if (mCanRecord == null) {
+ mCanRecord = sa.getBoolean(
+ com.android.internal.R.styleable.TvInputService_canRecord, false);
+ }
+ if (mTunerCount == null && inputType == TYPE_TUNER) {
+ mTunerCount = sa.getInt(
+ com.android.internal.R.styleable.TvInputService_tunerCount, 1);
+ }
+ sa.recycle();
+ } catch (IOException | XmlPullParserException e) {
+ throw new IllegalStateException("Failed reading meta-data for " + si.packageName, e);
+ } catch (NameNotFoundException e) {
+ throw new IllegalStateException("No resources found for " + si.packageName, e);
+ }
+ }
+ }
+
+ /**
+ * Utility class for putting and getting settings for TV input.
+ *
+ * @hide
+ */
+ @SystemApi
+ public static final class TvInputSettings {
+ private static final String TV_INPUT_SEPARATOR = ":";
+ private static final String CUSTOM_NAME_SEPARATOR = ",";
+
+ private TvInputSettings() { }
+
+ private static boolean isHidden(Context context, String inputId, int userId) {
+ return getHiddenTvInputIds(context, userId).contains(inputId);
+ }
+
+ private static String getCustomLabel(Context context, String inputId, int userId) {
+ return getCustomLabels(context, userId).get(inputId);
+ }
+
+ /**
+ * Returns a set of TV input IDs which are marked as hidden by user in the settings.
+ *
+ * @param context The application context
+ * @param userId The user ID for the stored hidden input set
+ * @hide
+ */
+ @SystemApi
+ public static Set<String> getHiddenTvInputIds(Context context, int userId) {
+ String hiddenIdsString = Settings.Secure.getStringForUser(
+ context.getContentResolver(), Settings.Secure.TV_INPUT_HIDDEN_INPUTS, userId);
+ Set<String> set = new HashSet<>();
+ if (TextUtils.isEmpty(hiddenIdsString)) {
+ return set;
+ }
+ String[] ids = hiddenIdsString.split(TV_INPUT_SEPARATOR);
+ for (String id : ids) {
+ set.add(Uri.decode(id));
+ }
+ return set;
+ }
+
+ /**
+ * Returns a map of TV input ID/custom label pairs set by the user in the settings.
+ *
+ * @param context The application context
+ * @param userId The user ID for the stored hidden input map
+ * @hide
+ */
+ @SystemApi
+ public static Map<String, String> getCustomLabels(Context context, int userId) {
+ String labelsString = Settings.Secure.getStringForUser(
+ context.getContentResolver(), Settings.Secure.TV_INPUT_CUSTOM_LABELS, userId);
+ Map<String, String> map = new HashMap<>();
+ if (TextUtils.isEmpty(labelsString)) {
+ return map;
+ }
+ String[] pairs = labelsString.split(TV_INPUT_SEPARATOR);
+ for (String pairString : pairs) {
+ String[] pair = pairString.split(CUSTOM_NAME_SEPARATOR);
+ map.put(Uri.decode(pair[0]), Uri.decode(pair[1]));
+ }
+ return map;
+ }
+
+ /**
+ * Stores a set of TV input IDs which are marked as hidden by user. This is expected to
+ * be called from the settings app.
+ *
+ * @param context The application context
+ * @param hiddenInputIds A set including all the hidden TV input IDs
+ * @param userId The user ID for the stored hidden input set
+ * @hide
+ */
+ @SystemApi
+ public static void putHiddenTvInputs(Context context, Set<String> hiddenInputIds,
+ int userId) {
+ StringBuilder builder = new StringBuilder();
+ boolean firstItem = true;
+ for (String inputId : hiddenInputIds) {
+ ensureValidField(inputId);
+ if (firstItem) {
+ firstItem = false;
+ } else {
+ builder.append(TV_INPUT_SEPARATOR);
+ }
+ builder.append(Uri.encode(inputId));
+ }
+ Settings.Secure.putStringForUser(context.getContentResolver(),
+ Settings.Secure.TV_INPUT_HIDDEN_INPUTS, builder.toString(), userId);
+
+ // Notify of the TvInputInfo changes.
+ TvInputManager tm = (TvInputManager) context.getSystemService(Context.TV_INPUT_SERVICE);
+ for (String inputId : hiddenInputIds) {
+ TvInputInfo info = tm.getTvInputInfo(inputId);
+ if (info != null) {
+ tm.updateTvInputInfo(info);
+ }
+ }
+ }
+
+ /**
+ * Stores a map of TV input ID/custom label set by user. This is expected to be
+ * called from the settings app.
+ *
+ * @param context The application context.
+ * @param customLabels A map of TV input ID/custom label pairs
+ * @param userId The user ID for the stored hidden input map
+ * @hide
+ */
+ @SystemApi
+ public static void putCustomLabels(Context context,
+ Map<String, String> customLabels, int userId) {
+ StringBuilder builder = new StringBuilder();
+ boolean firstItem = true;
+ for (Map.Entry<String, String> entry: customLabels.entrySet()) {
+ ensureValidField(entry.getKey());
+ ensureValidField(entry.getValue());
+ if (firstItem) {
+ firstItem = false;
+ } else {
+ builder.append(TV_INPUT_SEPARATOR);
+ }
+ builder.append(Uri.encode(entry.getKey()));
+ builder.append(CUSTOM_NAME_SEPARATOR);
+ builder.append(Uri.encode(entry.getValue()));
+ }
+ Settings.Secure.putStringForUser(context.getContentResolver(),
+ Settings.Secure.TV_INPUT_CUSTOM_LABELS, builder.toString(), userId);
+
+ // Notify of the TvInputInfo changes.
+ TvInputManager tm = (TvInputManager) context.getSystemService(Context.TV_INPUT_SERVICE);
+ for (String inputId : customLabels.keySet()) {
+ TvInputInfo info = tm.getTvInputInfo(inputId);
+ if (info != null) {
+ tm.updateTvInputInfo(info);
+ }
+ }
+ }
+
+ private static void ensureValidField(String value) {
+ if (TextUtils.isEmpty(value)) {
+ throw new IllegalArgumentException(value + " should not empty ");
+ }
+ }
+ }
+}
diff --git a/android/media/tv/TvInputManager.java b/android/media/tv/TvInputManager.java
new file mode 100644
index 00000000..d7a9edef
--- /dev/null
+++ b/android/media/tv/TvInputManager.java
@@ -0,0 +1,2611 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.RequiresPermission;
+import android.annotation.SystemApi;
+import android.annotation.SystemService;
+import android.content.Context;
+import android.content.Intent;
+import android.graphics.Rect;
+import android.media.PlaybackParams;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Message;
+import android.os.ParcelFileDescriptor;
+import android.os.RemoteException;
+import android.text.TextUtils;
+import android.util.ArrayMap;
+import android.util.Log;
+import android.util.Pools.Pool;
+import android.util.Pools.SimplePool;
+import android.util.SparseArray;
+import android.view.InputChannel;
+import android.view.InputEvent;
+import android.view.InputEventSender;
+import android.view.KeyEvent;
+import android.view.Surface;
+import android.view.View;
+
+import com.android.internal.util.Preconditions;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Central system API to the overall TV input framework (TIF) architecture, which arbitrates
+ * interaction between applications and the selected TV inputs.
+ *
+ * <p>There are three primary parties involved in the TV input framework (TIF) architecture:
+ *
+ * <ul>
+ * <li>The <strong>TV input manager</strong> as expressed by this class is the central point of the
+ * system that manages interaction between all other parts. It is expressed as the client-side API
+ * here which exists in each application context and communicates with a global system service that
+ * manages the interaction across all processes.
+ * <li>A <strong>TV input</strong> implemented by {@link TvInputService} represents an input source
+ * of TV, which can be a pass-through input such as HDMI, or a tuner input which provides broadcast
+ * TV programs. The system binds to the TV input per application’s request.
+ * on implementing TV inputs.
+ * <li><strong>Applications</strong> talk to the TV input manager to list TV inputs and check their
+ * status. Once an application find the input to use, it uses {@link TvView} or
+ * {@link TvRecordingClient} for further interaction such as watching and recording broadcast TV
+ * programs.
+ * </ul>
+ */
+@SystemService(Context.TV_INPUT_SERVICE)
+public final class TvInputManager {
+ private static final String TAG = "TvInputManager";
+
+ static final int DVB_DEVICE_START = 0;
+ static final int DVB_DEVICE_END = 2;
+
+ /**
+ * A demux device of DVB API for controlling the filters of DVB hardware/software.
+ * @hide
+ */
+ public static final int DVB_DEVICE_DEMUX = DVB_DEVICE_START;
+ /**
+ * A DVR device of DVB API for reading transport streams.
+ * @hide
+ */
+ public static final int DVB_DEVICE_DVR = 1;
+ /**
+ * A frontend device of DVB API for controlling the tuner and DVB demodulator hardware.
+ * @hide
+ */
+ public static final int DVB_DEVICE_FRONTEND = DVB_DEVICE_END;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({VIDEO_UNAVAILABLE_REASON_UNKNOWN, VIDEO_UNAVAILABLE_REASON_TUNING,
+ VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL, VIDEO_UNAVAILABLE_REASON_BUFFERING,
+ VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY})
+ public @interface VideoUnavailableReason {}
+
+ static final int VIDEO_UNAVAILABLE_REASON_START = 0;
+ static final int VIDEO_UNAVAILABLE_REASON_END = 4;
+
+ /**
+ * Reason for {@link TvInputService.Session#notifyVideoUnavailable(int)} and
+ * {@link TvView.TvInputCallback#onVideoUnavailable(String, int)}: Video is unavailable due to
+ * an unspecified error.
+ */
+ public static final int VIDEO_UNAVAILABLE_REASON_UNKNOWN = VIDEO_UNAVAILABLE_REASON_START;
+ /**
+ * Reason for {@link TvInputService.Session#notifyVideoUnavailable(int)} and
+ * {@link TvView.TvInputCallback#onVideoUnavailable(String, int)}: Video is unavailable because
+ * the corresponding TV input is in the middle of tuning to a new channel.
+ */
+ public static final int VIDEO_UNAVAILABLE_REASON_TUNING = 1;
+ /**
+ * Reason for {@link TvInputService.Session#notifyVideoUnavailable(int)} and
+ * {@link TvView.TvInputCallback#onVideoUnavailable(String, int)}: Video is unavailable due to
+ * weak TV signal.
+ */
+ public static final int VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL = 2;
+ /**
+ * Reason for {@link TvInputService.Session#notifyVideoUnavailable(int)} and
+ * {@link TvView.TvInputCallback#onVideoUnavailable(String, int)}: Video is unavailable because
+ * the corresponding TV input has stopped playback temporarily to buffer more data.
+ */
+ public static final int VIDEO_UNAVAILABLE_REASON_BUFFERING = 3;
+ /**
+ * Reason for {@link TvInputService.Session#notifyVideoUnavailable(int)} and
+ * {@link TvView.TvInputCallback#onVideoUnavailable(String, int)}: Video is unavailable because
+ * the current TV program is audio-only.
+ */
+ public static final int VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY = VIDEO_UNAVAILABLE_REASON_END;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({TIME_SHIFT_STATUS_UNKNOWN, TIME_SHIFT_STATUS_UNSUPPORTED,
+ TIME_SHIFT_STATUS_UNAVAILABLE, TIME_SHIFT_STATUS_AVAILABLE})
+ public @interface TimeShiftStatus {}
+
+ /**
+ * Status for {@link TvInputService.Session#notifyTimeShiftStatusChanged(int)} and
+ * {@link TvView.TvInputCallback#onTimeShiftStatusChanged(String, int)}: Unknown status. Also
+ * the status prior to calling {@code notifyTimeShiftStatusChanged}.
+ */
+ public static final int TIME_SHIFT_STATUS_UNKNOWN = 0;
+
+ /**
+ * Status for {@link TvInputService.Session#notifyTimeShiftStatusChanged(int)} and
+ * {@link TvView.TvInputCallback#onTimeShiftStatusChanged(String, int)}: The current TV input
+ * does not support time shifting.
+ */
+ public static final int TIME_SHIFT_STATUS_UNSUPPORTED = 1;
+
+ /**
+ * Status for {@link TvInputService.Session#notifyTimeShiftStatusChanged(int)} and
+ * {@link TvView.TvInputCallback#onTimeShiftStatusChanged(String, int)}: Time shifting is
+ * currently unavailable but might work again later.
+ */
+ public static final int TIME_SHIFT_STATUS_UNAVAILABLE = 2;
+
+ /**
+ * Status for {@link TvInputService.Session#notifyTimeShiftStatusChanged(int)} and
+ * {@link TvView.TvInputCallback#onTimeShiftStatusChanged(String, int)}: Time shifting is
+ * currently available. In this status, the application assumes it can pause/resume playback,
+ * seek to a specified time position and set playback rate and audio mode.
+ */
+ public static final int TIME_SHIFT_STATUS_AVAILABLE = 3;
+
+ /**
+ * Value returned by {@link TvInputService.Session#onTimeShiftGetCurrentPosition()} and
+ * {@link TvInputService.Session#onTimeShiftGetStartPosition()} when time shifting has not
+ * yet started.
+ */
+ public static final long TIME_SHIFT_INVALID_TIME = Long.MIN_VALUE;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({RECORDING_ERROR_UNKNOWN, RECORDING_ERROR_INSUFFICIENT_SPACE,
+ RECORDING_ERROR_RESOURCE_BUSY})
+ public @interface RecordingError {}
+
+ static final int RECORDING_ERROR_START = 0;
+ static final int RECORDING_ERROR_END = 2;
+
+ /**
+ * Error for {@link TvInputService.RecordingSession#notifyError(int)} and
+ * {@link TvRecordingClient.RecordingCallback#onError(int)}: The requested operation cannot be
+ * completed due to a problem that does not fit under any other error codes, or the error code
+ * for the problem is defined on the higher version than application's
+ * <code>android:targetSdkVersion</code>.
+ */
+ public static final int RECORDING_ERROR_UNKNOWN = RECORDING_ERROR_START;
+
+ /**
+ * Error for {@link TvInputService.RecordingSession#notifyError(int)} and
+ * {@link TvRecordingClient.RecordingCallback#onError(int)}: Recording cannot proceed due to
+ * insufficient storage space.
+ */
+ public static final int RECORDING_ERROR_INSUFFICIENT_SPACE = 1;
+
+ /**
+ * Error for {@link TvInputService.RecordingSession#notifyError(int)} and
+ * {@link TvRecordingClient.RecordingCallback#onError(int)}: Recording cannot proceed because
+ * a required recording resource was not able to be allocated.
+ */
+ public static final int RECORDING_ERROR_RESOURCE_BUSY = RECORDING_ERROR_END;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({INPUT_STATE_CONNECTED, INPUT_STATE_CONNECTED_STANDBY, INPUT_STATE_DISCONNECTED})
+ public @interface InputState {}
+
+ /**
+ * State for {@link #getInputState(String)} and
+ * {@link TvInputCallback#onInputStateChanged(String, int)}: The input source is connected.
+ *
+ * <p>This state indicates that a source device is connected to the input port and is in the
+ * normal operation mode. It is mostly relevant to hardware inputs such as HDMI input.
+ * Non-hardware inputs are considered connected all the time.
+ */
+ public static final int INPUT_STATE_CONNECTED = 0;
+
+ /**
+ * State for {@link #getInputState(String)} and
+ * {@link TvInputCallback#onInputStateChanged(String, int)}: The input source is connected but
+ * in standby mode.
+ *
+ * <p>This state indicates that a source device is connected to the input port but is in standby
+ * or low power mode. It is mostly relevant to hardware inputs such as HDMI input and Component
+ * inputs.
+ */
+ public static final int INPUT_STATE_CONNECTED_STANDBY = 1;
+
+ /**
+ * State for {@link #getInputState(String)} and
+ * {@link TvInputCallback#onInputStateChanged(String, int)}: The input source is disconnected.
+ *
+ * <p>This state indicates that a source device is disconnected from the input port. It is
+ * mostly relevant to hardware inputs such as HDMI input.
+ *
+ */
+ public static final int INPUT_STATE_DISCONNECTED = 2;
+
+ /**
+ * Broadcast intent action when the user blocked content ratings change. For use with the
+ * {@link #isRatingBlocked}.
+ */
+ public static final String ACTION_BLOCKED_RATINGS_CHANGED =
+ "android.media.tv.action.BLOCKED_RATINGS_CHANGED";
+
+ /**
+ * Broadcast intent action when the parental controls enabled state changes. For use with the
+ * {@link #isParentalControlsEnabled}.
+ */
+ public static final String ACTION_PARENTAL_CONTROLS_ENABLED_CHANGED =
+ "android.media.tv.action.PARENTAL_CONTROLS_ENABLED_CHANGED";
+
+ /**
+ * Broadcast intent action used to query available content rating systems.
+ *
+ * <p>The TV input manager service locates available content rating systems by querying
+ * broadcast receivers that are registered for this action. An application can offer additional
+ * content rating systems to the user by declaring a suitable broadcast receiver in its
+ * manifest.
+ *
+ * <p>Here is an example broadcast receiver declaration that an application might include in its
+ * AndroidManifest.xml to advertise custom content rating systems. The meta-data specifies a
+ * resource that contains a description of each content rating system that is provided by the
+ * application.
+ *
+ * <p><pre class="prettyprint">
+ * {@literal
+ * <receiver android:name=".TvInputReceiver">
+ * <intent-filter>
+ * <action android:name=
+ * "android.media.tv.action.QUERY_CONTENT_RATING_SYSTEMS" />
+ * </intent-filter>
+ * <meta-data
+ * android:name="android.media.tv.metadata.CONTENT_RATING_SYSTEMS"
+ * android:resource="@xml/tv_content_rating_systems" />
+ * </receiver>}</pre>
+ *
+ * <p>In the above example, the <code>@xml/tv_content_rating_systems</code> resource refers to an
+ * XML resource whose root element is <code>&lt;rating-system-definitions&gt;</code> that
+ * contains zero or more <code>&lt;rating-system-definition&gt;</code> elements. Each <code>
+ * &lt;rating-system-definition&gt;</code> element specifies the ratings, sub-ratings and rating
+ * orders of a particular content rating system.
+ *
+ * @see TvContentRating
+ */
+ public static final String ACTION_QUERY_CONTENT_RATING_SYSTEMS =
+ "android.media.tv.action.QUERY_CONTENT_RATING_SYSTEMS";
+
+ /**
+ * Content rating systems metadata associated with {@link #ACTION_QUERY_CONTENT_RATING_SYSTEMS}.
+ *
+ * <p>Specifies the resource ID of an XML resource that describes the content rating systems
+ * that are provided by the application.
+ */
+ public static final String META_DATA_CONTENT_RATING_SYSTEMS =
+ "android.media.tv.metadata.CONTENT_RATING_SYSTEMS";
+
+ /**
+ * Activity action to set up channel sources i.e.&nbsp;TV inputs of type
+ * {@link TvInputInfo#TYPE_TUNER}. When invoked, the system will display an appropriate UI for
+ * the user to initiate the individual setup flow provided by
+ * {@link android.R.attr#setupActivity} of each TV input service.
+ */
+ public static final String ACTION_SETUP_INPUTS = "android.media.tv.action.SETUP_INPUTS";
+
+ /**
+ * Activity action to display the recording schedules. When invoked, the system will display an
+ * appropriate UI to browse the schedules.
+ */
+ public static final String ACTION_VIEW_RECORDING_SCHEDULES =
+ "android.media.tv.action.VIEW_RECORDING_SCHEDULES";
+
+ private final ITvInputManager mService;
+
+ private final Object mLock = new Object();
+
+ // @GuardedBy("mLock")
+ private final List<TvInputCallbackRecord> mCallbackRecords = new LinkedList<>();
+
+ // A mapping from TV input ID to the state of corresponding input.
+ // @GuardedBy("mLock")
+ private final Map<String, Integer> mStateMap = new ArrayMap<>();
+
+ // A mapping from the sequence number of a session to its SessionCallbackRecord.
+ private final SparseArray<SessionCallbackRecord> mSessionCallbackRecordMap =
+ new SparseArray<>();
+
+ // A sequence number for the next session to be created. Should be protected by a lock
+ // {@code mSessionCallbackRecordMap}.
+ private int mNextSeq;
+
+ private final ITvInputClient mClient;
+
+ private final int mUserId;
+
+ /**
+ * Interface used to receive the created session.
+ * @hide
+ */
+ public abstract static class SessionCallback {
+ /**
+ * This is called after {@link TvInputManager#createSession} has been processed.
+ *
+ * @param session A {@link TvInputManager.Session} instance created. This can be
+ * {@code null} if the creation request failed.
+ */
+ public void onSessionCreated(@Nullable Session session) {
+ }
+
+ /**
+ * This is called when {@link TvInputManager.Session} is released.
+ * This typically happens when the process hosting the session has crashed or been killed.
+ *
+ * @param session A {@link TvInputManager.Session} instance released.
+ */
+ public void onSessionReleased(Session session) {
+ }
+
+ /**
+ * This is called when the channel of this session is changed by the underlying TV input
+ * without any {@link TvInputManager.Session#tune(Uri)} request.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param channelUri The URI of a channel.
+ */
+ public void onChannelRetuned(Session session, Uri channelUri) {
+ }
+
+ /**
+ * This is called when the track information of the session has been changed.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param tracks A list which includes track information.
+ */
+ public void onTracksChanged(Session session, List<TvTrackInfo> tracks) {
+ }
+
+ /**
+ * This is called when a track for a given type is selected.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param type The type of the selected track. The type can be
+ * {@link TvTrackInfo#TYPE_AUDIO}, {@link TvTrackInfo#TYPE_VIDEO} or
+ * {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @param trackId The ID of the selected track. When {@code null} the currently selected
+ * track for a given type should be unselected.
+ */
+ public void onTrackSelected(Session session, int type, @Nullable String trackId) {
+ }
+
+ /**
+ * This is invoked when the video size has been changed. It is also called when the first
+ * time video size information becomes available after the session is tuned to a specific
+ * channel.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param width The width of the video.
+ * @param height The height of the video.
+ */
+ public void onVideoSizeChanged(Session session, int width, int height) {
+ }
+
+ /**
+ * This is called when the video is available, so the TV input starts the playback.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ */
+ public void onVideoAvailable(Session session) {
+ }
+
+ /**
+ * This is called when the video is not available, so the TV input stops the playback.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param reason The reason why the TV input stopped the playback:
+ * <ul>
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_UNKNOWN}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
+ * </ul>
+ */
+ public void onVideoUnavailable(Session session, int reason) {
+ }
+
+ /**
+ * This is called when the current program content turns out to be allowed to watch since
+ * its content rating is not blocked by parental controls.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ */
+ public void onContentAllowed(Session session) {
+ }
+
+ /**
+ * This is called when the current program content turns out to be not allowed to watch
+ * since its content rating is blocked by parental controls.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param rating The content ration of the blocked program.
+ */
+ public void onContentBlocked(Session session, TvContentRating rating) {
+ }
+
+ /**
+ * This is called when {@link TvInputService.Session#layoutSurface} is called to change the
+ * layout of surface.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param left Left position.
+ * @param top Top position.
+ * @param right Right position.
+ * @param bottom Bottom position.
+ */
+ public void onLayoutSurface(Session session, int left, int top, int right, int bottom) {
+ }
+
+ /**
+ * This is called when a custom event has been sent from this session.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback
+ * @param eventType The type of the event.
+ * @param eventArgs Optional arguments of the event.
+ */
+ public void onSessionEvent(Session session, String eventType, Bundle eventArgs) {
+ }
+
+ /**
+ * This is called when the time shift status is changed.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void onTimeShiftStatusChanged(Session session, int status) {
+ }
+
+ /**
+ * This is called when the start position for time shifting has changed.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param timeMs The start position for time shifting, in milliseconds since the epoch.
+ */
+ public void onTimeShiftStartPositionChanged(Session session, long timeMs) {
+ }
+
+ /**
+ * This is called when the current position for time shifting is changed.
+ *
+ * @param session A {@link TvInputManager.Session} associated with this callback.
+ * @param timeMs The current position for time shifting, in milliseconds since the epoch.
+ */
+ public void onTimeShiftCurrentPositionChanged(Session session, long timeMs) {
+ }
+
+ // For the recording session only
+ /**
+ * This is called when the recording session has been tuned to the given channel and is
+ * ready to start recording.
+ *
+ * @param channelUri The URI of a channel.
+ */
+ void onTuned(Session session, Uri channelUri) {
+ }
+
+ // For the recording session only
+ /**
+ * This is called when the current recording session has stopped recording and created a
+ * new data entry in the {@link TvContract.RecordedPrograms} table that describes the newly
+ * recorded program.
+ *
+ * @param recordedProgramUri The URI for the newly recorded program.
+ **/
+ void onRecordingStopped(Session session, Uri recordedProgramUri) {
+ }
+
+ // For the recording session only
+ /**
+ * This is called when an issue has occurred. It may be called at any time after the current
+ * recording session is created until it is released.
+ *
+ * @param error The error code.
+ */
+ void onError(Session session, @TvInputManager.RecordingError int error) {
+ }
+ }
+
+ private static final class SessionCallbackRecord {
+ private final SessionCallback mSessionCallback;
+ private final Handler mHandler;
+ private Session mSession;
+
+ SessionCallbackRecord(SessionCallback sessionCallback,
+ Handler handler) {
+ mSessionCallback = sessionCallback;
+ mHandler = handler;
+ }
+
+ void postSessionCreated(final Session session) {
+ mSession = session;
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onSessionCreated(session);
+ }
+ });
+ }
+
+ void postSessionReleased() {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onSessionReleased(mSession);
+ }
+ });
+ }
+
+ void postChannelRetuned(final Uri channelUri) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onChannelRetuned(mSession, channelUri);
+ }
+ });
+ }
+
+ void postTracksChanged(final List<TvTrackInfo> tracks) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTracksChanged(mSession, tracks);
+ }
+ });
+ }
+
+ void postTrackSelected(final int type, final String trackId) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTrackSelected(mSession, type, trackId);
+ }
+ });
+ }
+
+ void postVideoSizeChanged(final int width, final int height) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onVideoSizeChanged(mSession, width, height);
+ }
+ });
+ }
+
+ void postVideoAvailable() {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onVideoAvailable(mSession);
+ }
+ });
+ }
+
+ void postVideoUnavailable(final int reason) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onVideoUnavailable(mSession, reason);
+ }
+ });
+ }
+
+ void postContentAllowed() {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onContentAllowed(mSession);
+ }
+ });
+ }
+
+ void postContentBlocked(final TvContentRating rating) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onContentBlocked(mSession, rating);
+ }
+ });
+ }
+
+ void postLayoutSurface(final int left, final int top, final int right,
+ final int bottom) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onLayoutSurface(mSession, left, top, right, bottom);
+ }
+ });
+ }
+
+ void postSessionEvent(final String eventType, final Bundle eventArgs) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onSessionEvent(mSession, eventType, eventArgs);
+ }
+ });
+ }
+
+ void postTimeShiftStatusChanged(final int status) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftStatusChanged(mSession, status);
+ }
+ });
+ }
+
+ void postTimeShiftStartPositionChanged(final long timeMs) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftStartPositionChanged(mSession, timeMs);
+ }
+ });
+ }
+
+ void postTimeShiftCurrentPositionChanged(final long timeMs) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTimeShiftCurrentPositionChanged(mSession, timeMs);
+ }
+ });
+ }
+
+ // For the recording session only
+ void postTuned(final Uri channelUri) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onTuned(mSession, channelUri);
+ }
+ });
+ }
+
+ // For the recording session only
+ void postRecordingStopped(final Uri recordedProgramUri) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onRecordingStopped(mSession, recordedProgramUri);
+ }
+ });
+ }
+
+ // For the recording session only
+ void postError(final int error) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mSessionCallback.onError(mSession, error);
+ }
+ });
+ }
+ }
+
+ /**
+ * Callback used to monitor status of the TV inputs.
+ */
+ public abstract static class TvInputCallback {
+ /**
+ * This is called when the state of a given TV input is changed.
+ *
+ * @param inputId The ID of the TV input.
+ * @param state State of the TV input. The value is one of the following:
+ * <ul>
+ * <li>{@link TvInputManager#INPUT_STATE_CONNECTED}
+ * <li>{@link TvInputManager#INPUT_STATE_CONNECTED_STANDBY}
+ * <li>{@link TvInputManager#INPUT_STATE_DISCONNECTED}
+ * </ul>
+ */
+ public void onInputStateChanged(String inputId, @InputState int state) {
+ }
+
+ /**
+ * This is called when a TV input is added to the system.
+ *
+ * <p>Normally it happens when the user installs a new TV input package that implements
+ * {@link TvInputService} interface.
+ *
+ * @param inputId The ID of the TV input.
+ */
+ public void onInputAdded(String inputId) {
+ }
+
+ /**
+ * This is called when a TV input is removed from the system.
+ *
+ * <p>Normally it happens when the user uninstalls the previously installed TV input
+ * package.
+ *
+ * @param inputId The ID of the TV input.
+ */
+ public void onInputRemoved(String inputId) {
+ }
+
+ /**
+ * This is called when a TV input is updated on the system.
+ *
+ * <p>Normally it happens when a previously installed TV input package is re-installed or
+ * the media on which a newer version of the package exists becomes available/unavailable.
+ *
+ * @param inputId The ID of the TV input.
+ */
+ public void onInputUpdated(String inputId) {
+ }
+
+ /**
+ * This is called when the information about an existing TV input has been updated.
+ *
+ * <p>Because the system automatically creates a <code>TvInputInfo</code> object for each TV
+ * input based on the information collected from the <code>AndroidManifest.xml</code>, this
+ * method is only called back when such information has changed dynamically.
+ *
+ * @param inputInfo The <code>TvInputInfo</code> object that contains new information.
+ */
+ public void onTvInputInfoUpdated(TvInputInfo inputInfo) {
+ }
+ }
+
+ private static final class TvInputCallbackRecord {
+ private final TvInputCallback mCallback;
+ private final Handler mHandler;
+
+ public TvInputCallbackRecord(TvInputCallback callback, Handler handler) {
+ mCallback = callback;
+ mHandler = handler;
+ }
+
+ public TvInputCallback getCallback() {
+ return mCallback;
+ }
+
+ public void postInputAdded(final String inputId) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onInputAdded(inputId);
+ }
+ });
+ }
+
+ public void postInputRemoved(final String inputId) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onInputRemoved(inputId);
+ }
+ });
+ }
+
+ public void postInputUpdated(final String inputId) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onInputUpdated(inputId);
+ }
+ });
+ }
+
+ public void postInputStateChanged(final String inputId, final int state) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onInputStateChanged(inputId, state);
+ }
+ });
+ }
+
+ public void postTvInputInfoUpdated(final TvInputInfo inputInfo) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCallback.onTvInputInfoUpdated(inputInfo);
+ }
+ });
+ }
+ }
+
+ /**
+ * Interface used to receive events from Hardware objects.
+ *
+ * @hide
+ */
+ @SystemApi
+ public abstract static class HardwareCallback {
+ /**
+ * This is called when {@link Hardware} is no longer available for the client.
+ */
+ public abstract void onReleased();
+
+ /**
+ * This is called when the underlying {@link TvStreamConfig} has been changed.
+ *
+ * @param configs The new {@link TvStreamConfig}s.
+ */
+ public abstract void onStreamConfigChanged(TvStreamConfig[] configs);
+ }
+
+ /**
+ * @hide
+ */
+ public TvInputManager(ITvInputManager service, int userId) {
+ mService = service;
+ mUserId = userId;
+ mClient = new ITvInputClient.Stub() {
+ @Override
+ public void onSessionCreated(String inputId, IBinder token, InputChannel channel,
+ int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for " + token);
+ return;
+ }
+ Session session = null;
+ if (token != null) {
+ session = new Session(token, channel, mService, mUserId, seq,
+ mSessionCallbackRecordMap);
+ }
+ record.postSessionCreated(session);
+ }
+ }
+
+ @Override
+ public void onSessionReleased(int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ mSessionCallbackRecordMap.delete(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq:" + seq);
+ return;
+ }
+ record.mSession.releaseInternal();
+ record.postSessionReleased();
+ }
+ }
+
+ @Override
+ public void onChannelRetuned(Uri channelUri, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postChannelRetuned(channelUri);
+ }
+ }
+
+ @Override
+ public void onTracksChanged(List<TvTrackInfo> tracks, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ if (record.mSession.updateTracks(tracks)) {
+ record.postTracksChanged(tracks);
+ postVideoSizeChangedIfNeededLocked(record);
+ }
+ }
+ }
+
+ @Override
+ public void onTrackSelected(int type, String trackId, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ if (record.mSession.updateTrackSelection(type, trackId)) {
+ record.postTrackSelected(type, trackId);
+ postVideoSizeChangedIfNeededLocked(record);
+ }
+ }
+ }
+
+ private void postVideoSizeChangedIfNeededLocked(SessionCallbackRecord record) {
+ TvTrackInfo track = record.mSession.getVideoTrackToNotify();
+ if (track != null) {
+ record.postVideoSizeChanged(track.getVideoWidth(), track.getVideoHeight());
+ }
+ }
+
+ @Override
+ public void onVideoAvailable(int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postVideoAvailable();
+ }
+ }
+
+ @Override
+ public void onVideoUnavailable(int reason, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postVideoUnavailable(reason);
+ }
+ }
+
+ @Override
+ public void onContentAllowed(int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postContentAllowed();
+ }
+ }
+
+ @Override
+ public void onContentBlocked(String rating, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postContentBlocked(TvContentRating.unflattenFromString(rating));
+ }
+ }
+
+ @Override
+ public void onLayoutSurface(int left, int top, int right, int bottom, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postLayoutSurface(left, top, right, bottom);
+ }
+ }
+
+ @Override
+ public void onSessionEvent(String eventType, Bundle eventArgs, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postSessionEvent(eventType, eventArgs);
+ }
+ }
+
+ @Override
+ public void onTimeShiftStatusChanged(int status, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftStatusChanged(status);
+ }
+ }
+
+ @Override
+ public void onTimeShiftStartPositionChanged(long timeMs, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftStartPositionChanged(timeMs);
+ }
+ }
+
+ @Override
+ public void onTimeShiftCurrentPositionChanged(long timeMs, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTimeShiftCurrentPositionChanged(timeMs);
+ }
+ }
+
+ @Override
+ public void onTuned(int seq, Uri channelUri) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postTuned(channelUri);
+ }
+ }
+
+ @Override
+ public void onRecordingStopped(Uri recordedProgramUri, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postRecordingStopped(recordedProgramUri);
+ }
+ }
+
+ @Override
+ public void onError(int error, int seq) {
+ synchronized (mSessionCallbackRecordMap) {
+ SessionCallbackRecord record = mSessionCallbackRecordMap.get(seq);
+ if (record == null) {
+ Log.e(TAG, "Callback not found for seq " + seq);
+ return;
+ }
+ record.postError(error);
+ }
+ }
+ };
+ ITvInputManagerCallback managerCallback = new ITvInputManagerCallback.Stub() {
+ @Override
+ public void onInputAdded(String inputId) {
+ synchronized (mLock) {
+ mStateMap.put(inputId, INPUT_STATE_CONNECTED);
+ for (TvInputCallbackRecord record : mCallbackRecords) {
+ record.postInputAdded(inputId);
+ }
+ }
+ }
+
+ @Override
+ public void onInputRemoved(String inputId) {
+ synchronized (mLock) {
+ mStateMap.remove(inputId);
+ for (TvInputCallbackRecord record : mCallbackRecords) {
+ record.postInputRemoved(inputId);
+ }
+ }
+ }
+
+ @Override
+ public void onInputUpdated(String inputId) {
+ synchronized (mLock) {
+ for (TvInputCallbackRecord record : mCallbackRecords) {
+ record.postInputUpdated(inputId);
+ }
+ }
+ }
+
+ @Override
+ public void onInputStateChanged(String inputId, int state) {
+ synchronized (mLock) {
+ mStateMap.put(inputId, state);
+ for (TvInputCallbackRecord record : mCallbackRecords) {
+ record.postInputStateChanged(inputId, state);
+ }
+ }
+ }
+
+ @Override
+ public void onTvInputInfoUpdated(TvInputInfo inputInfo) {
+ synchronized (mLock) {
+ for (TvInputCallbackRecord record : mCallbackRecords) {
+ record.postTvInputInfoUpdated(inputInfo);
+ }
+ }
+ }
+ };
+ try {
+ if (mService != null) {
+ mService.registerCallback(managerCallback, mUserId);
+ List<TvInputInfo> infos = mService.getTvInputList(mUserId);
+ synchronized (mLock) {
+ for (TvInputInfo info : infos) {
+ String inputId = info.getId();
+ mStateMap.put(inputId, mService.getTvInputState(inputId, mUserId));
+ }
+ }
+ }
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the complete list of TV inputs on the system.
+ *
+ * @return List of {@link TvInputInfo} for each TV input that describes its meta information.
+ */
+ public List<TvInputInfo> getTvInputList() {
+ try {
+ return mService.getTvInputList(mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the {@link TvInputInfo} for a given TV input.
+ *
+ * @param inputId The ID of the TV input.
+ * @return the {@link TvInputInfo} for a given TV input. {@code null} if not found.
+ */
+ @Nullable
+ public TvInputInfo getTvInputInfo(@NonNull String inputId) {
+ Preconditions.checkNotNull(inputId);
+ try {
+ return mService.getTvInputInfo(inputId, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Updates the <code>TvInputInfo</code> for an existing TV input. A TV input service
+ * implementation may call this method to pass the application and system an up-to-date
+ * <code>TvInputInfo</code> object that describes itself.
+ *
+ * <p>The system automatically creates a <code>TvInputInfo</code> object for each TV input,
+ * based on the information collected from the <code>AndroidManifest.xml</code>, thus it is not
+ * necessary to call this method unless such information has changed dynamically.
+ * Use {@link TvInputInfo.Builder} to build a new <code>TvInputInfo</code> object.
+ *
+ * <p>Attempting to change information about a TV input that the calling package does not own
+ * does nothing.
+ *
+ * @param inputInfo The <code>TvInputInfo</code> object that contains new information.
+ * @throws IllegalArgumentException if the argument is {@code null}.
+ * @see TvInputCallback#onTvInputInfoUpdated(TvInputInfo)
+ */
+ public void updateTvInputInfo(@NonNull TvInputInfo inputInfo) {
+ Preconditions.checkNotNull(inputInfo);
+ try {
+ mService.updateTvInputInfo(inputInfo, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the state of a given TV input.
+ *
+ * <p>The state is one of the following:
+ * <ul>
+ * <li>{@link #INPUT_STATE_CONNECTED}
+ * <li>{@link #INPUT_STATE_CONNECTED_STANDBY}
+ * <li>{@link #INPUT_STATE_DISCONNECTED}
+ * </ul>
+ *
+ * @param inputId The ID of the TV input.
+ * @throws IllegalArgumentException if the argument is {@code null}.
+ */
+ @InputState
+ public int getInputState(@NonNull String inputId) {
+ Preconditions.checkNotNull(inputId);
+ synchronized (mLock) {
+ Integer state = mStateMap.get(inputId);
+ if (state == null) {
+ Log.w(TAG, "Unrecognized input ID: " + inputId);
+ return INPUT_STATE_DISCONNECTED;
+ }
+ return state;
+ }
+ }
+
+ /**
+ * Registers a {@link TvInputCallback}.
+ *
+ * @param callback A callback used to monitor status of the TV inputs.
+ * @param handler A {@link Handler} that the status change will be delivered to.
+ */
+ public void registerCallback(@NonNull TvInputCallback callback, @NonNull Handler handler) {
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
+ synchronized (mLock) {
+ mCallbackRecords.add(new TvInputCallbackRecord(callback, handler));
+ }
+ }
+
+ /**
+ * Unregisters the existing {@link TvInputCallback}.
+ *
+ * @param callback The existing callback to remove.
+ */
+ public void unregisterCallback(@NonNull final TvInputCallback callback) {
+ Preconditions.checkNotNull(callback);
+ synchronized (mLock) {
+ for (Iterator<TvInputCallbackRecord> it = mCallbackRecords.iterator();
+ it.hasNext(); ) {
+ TvInputCallbackRecord record = it.next();
+ if (record.getCallback() == callback) {
+ it.remove();
+ break;
+ }
+ }
+ }
+ }
+
+ /**
+ * Returns the user's parental controls enabled state.
+ *
+ * @return {@code true} if the user enabled the parental controls, {@code false} otherwise.
+ */
+ public boolean isParentalControlsEnabled() {
+ try {
+ return mService.isParentalControlsEnabled(mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Sets the user's parental controls enabled state.
+ *
+ * @param enabled The user's parental controls enabled state. {@code true} if the user enabled
+ * the parental controls, {@code false} otherwise.
+ * @see #isParentalControlsEnabled
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_PARENTAL_CONTROLS)
+ public void setParentalControlsEnabled(boolean enabled) {
+ try {
+ mService.setParentalControlsEnabled(enabled, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Checks whether a given TV content rating is blocked by the user.
+ *
+ * @param rating The TV content rating to check. Can be {@link TvContentRating#UNRATED}.
+ * @return {@code true} if the given TV content rating is blocked, {@code false} otherwise.
+ */
+ public boolean isRatingBlocked(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
+ try {
+ return mService.isRatingBlocked(rating.flattenToString(), mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the list of blocked content ratings.
+ *
+ * @return the list of content ratings blocked by the user.
+ */
+ public List<TvContentRating> getBlockedRatings() {
+ try {
+ List<TvContentRating> ratings = new ArrayList<>();
+ for (String rating : mService.getBlockedRatings(mUserId)) {
+ ratings.add(TvContentRating.unflattenFromString(rating));
+ }
+ return ratings;
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Adds a user blocked content rating.
+ *
+ * @param rating The content rating to block.
+ * @see #isRatingBlocked
+ * @see #removeBlockedRating
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_PARENTAL_CONTROLS)
+ public void addBlockedRating(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
+ try {
+ mService.addBlockedRating(rating.flattenToString(), mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Removes a user blocked content rating.
+ *
+ * @param rating The content rating to unblock.
+ * @see #isRatingBlocked
+ * @see #addBlockedRating
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_PARENTAL_CONTROLS)
+ public void removeBlockedRating(@NonNull TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
+ try {
+ mService.removeBlockedRating(rating.flattenToString(), mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the list of all TV content rating systems defined.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.READ_CONTENT_RATING_SYSTEMS)
+ public List<TvContentRatingSystemInfo> getTvContentRatingSystemList() {
+ try {
+ return mService.getTvContentRatingSystemList(mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Notifies the TV input of the given preview program that the program's browsable state is
+ * disabled.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.NOTIFY_TV_INPUTS)
+ public void notifyPreviewProgramBrowsableDisabled(String packageName, long programId) {
+ Intent intent = new Intent();
+ intent.setAction(TvContract.ACTION_PREVIEW_PROGRAM_BROWSABLE_DISABLED);
+ intent.putExtra(TvContract.EXTRA_PREVIEW_PROGRAM_ID, programId);
+ intent.setPackage(packageName);
+ try {
+ mService.sendTvInputNotifyIntent(intent, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Notifies the TV input of the given watch next program that the program's browsable state is
+ * disabled.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.NOTIFY_TV_INPUTS)
+ public void notifyWatchNextProgramBrowsableDisabled(String packageName, long programId) {
+ Intent intent = new Intent();
+ intent.setAction(TvContract.ACTION_WATCH_NEXT_PROGRAM_BROWSABLE_DISABLED);
+ intent.putExtra(TvContract.EXTRA_WATCH_NEXT_PROGRAM_ID, programId);
+ intent.setPackage(packageName);
+ try {
+ mService.sendTvInputNotifyIntent(intent, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Notifies the TV input of the given preview program that the program is added to watch next.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.NOTIFY_TV_INPUTS)
+ public void notifyPreviewProgramAddedToWatchNext(String packageName, long previewProgramId,
+ long watchNextProgramId) {
+ Intent intent = new Intent();
+ intent.setAction(TvContract.ACTION_PREVIEW_PROGRAM_ADDED_TO_WATCH_NEXT);
+ intent.putExtra(TvContract.EXTRA_PREVIEW_PROGRAM_ID, previewProgramId);
+ intent.putExtra(TvContract.EXTRA_WATCH_NEXT_PROGRAM_ID, watchNextProgramId);
+ intent.setPackage(packageName);
+ try {
+ mService.sendTvInputNotifyIntent(intent, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Creates a {@link Session} for a given TV input.
+ *
+ * <p>The number of sessions that can be created at the same time is limited by the capability
+ * of the given TV input.
+ *
+ * @param inputId The ID of the TV input.
+ * @param callback A callback used to receive the created session.
+ * @param handler A {@link Handler} that the session creation will be delivered to.
+ * @hide
+ */
+ public void createSession(@NonNull String inputId, @NonNull final SessionCallback callback,
+ @NonNull Handler handler) {
+ createSessionInternal(inputId, false, callback, handler);
+ }
+
+ /**
+ * Creates a recording {@link Session} for a given TV input.
+ *
+ * <p>The number of sessions that can be created at the same time is limited by the capability
+ * of the given TV input.
+ *
+ * @param inputId The ID of the TV input.
+ * @param callback A callback used to receive the created session.
+ * @param handler A {@link Handler} that the session creation will be delivered to.
+ * @hide
+ */
+ public void createRecordingSession(@NonNull String inputId,
+ @NonNull final SessionCallback callback, @NonNull Handler handler) {
+ createSessionInternal(inputId, true, callback, handler);
+ }
+
+ private void createSessionInternal(String inputId, boolean isRecordingSession,
+ SessionCallback callback, Handler handler) {
+ Preconditions.checkNotNull(inputId);
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
+ SessionCallbackRecord record = new SessionCallbackRecord(callback, handler);
+ synchronized (mSessionCallbackRecordMap) {
+ int seq = mNextSeq++;
+ mSessionCallbackRecordMap.put(seq, record);
+ try {
+ mService.createSession(mClient, inputId, isRecordingSession, seq, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+ }
+
+ /**
+ * Returns the TvStreamConfig list of the given TV input.
+ *
+ * If you are using {@link Hardware} object from {@link
+ * #acquireTvInputHardware}, you should get the list of available streams
+ * from {@link HardwareCallback#onStreamConfigChanged} method, not from
+ * here. This method is designed to be used with {@link #captureFrame} in
+ * capture scenarios specifically and not suitable for any other use.
+ *
+ * @param inputId The ID of the TV input.
+ * @return List of {@link TvStreamConfig} which is available for capturing
+ * of the given TV input.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.CAPTURE_TV_INPUT)
+ public List<TvStreamConfig> getAvailableTvStreamConfigList(String inputId) {
+ try {
+ return mService.getAvailableTvStreamConfigList(inputId, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Take a snapshot of the given TV input into the provided Surface.
+ *
+ * @param inputId The ID of the TV input.
+ * @param surface the {@link Surface} to which the snapshot is captured.
+ * @param config the {@link TvStreamConfig} which is used for capturing.
+ * @return true when the {@link Surface} is ready to be captured.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.CAPTURE_TV_INPUT)
+ public boolean captureFrame(String inputId, Surface surface, TvStreamConfig config) {
+ try {
+ return mService.captureFrame(inputId, surface, config, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns true if there is only a single TV input session.
+ *
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.CAPTURE_TV_INPUT)
+ public boolean isSingleSessionActive() {
+ try {
+ return mService.isSingleSessionActive(mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns a list of TvInputHardwareInfo objects representing available hardware.
+ *
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.TV_INPUT_HARDWARE)
+ public List<TvInputHardwareInfo> getHardwareList() {
+ try {
+ return mService.getHardwareList();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Acquires {@link Hardware} object for the given device ID.
+ *
+ * <p>A subsequent call to this method on the same {@code deviceId} will release the currently
+ * acquired Hardware.
+ *
+ * @param deviceId The device ID to acquire Hardware for.
+ * @param callback A callback to receive updates on Hardware.
+ * @param info The TV input which will use the acquired Hardware.
+ * @return Hardware on success, {@code null} otherwise.
+ *
+ * @removed
+ */
+ @RequiresPermission(android.Manifest.permission.TV_INPUT_HARDWARE)
+ public Hardware acquireTvInputHardware(int deviceId, final HardwareCallback callback,
+ TvInputInfo info) {
+ return acquireTvInputHardware(deviceId, info, callback);
+ }
+
+ /**
+ * Acquires {@link Hardware} object for the given device ID.
+ *
+ * <p>A subsequent call to this method on the same {@code deviceId} will release the currently
+ * acquired Hardware.
+ *
+ * @param deviceId The device ID to acquire Hardware for.
+ * @param callback A callback to receive updates on Hardware.
+ * @param info The TV input which will use the acquired Hardware.
+ * @return Hardware on success, {@code null} otherwise.
+ *
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.TV_INPUT_HARDWARE)
+ public Hardware acquireTvInputHardware(int deviceId, TvInputInfo info,
+ final HardwareCallback callback) {
+ try {
+ return new Hardware(
+ mService.acquireTvInputHardware(deviceId, new ITvInputHardwareCallback.Stub() {
+ @Override
+ public void onReleased() {
+ callback.onReleased();
+ }
+
+ @Override
+ public void onStreamConfigChanged(TvStreamConfig[] configs) {
+ callback.onStreamConfigChanged(configs);
+ }
+ }, info, mUserId));
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Releases previously acquired hardware object.
+ *
+ * @param deviceId The device ID this Hardware was acquired for
+ * @param hardware Hardware to release.
+ *
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.TV_INPUT_HARDWARE)
+ public void releaseTvInputHardware(int deviceId, Hardware hardware) {
+ try {
+ mService.releaseTvInputHardware(deviceId, hardware.getInterface(), mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns the list of currently available DVB devices on the system.
+ *
+ * @return the list of {@link DvbDeviceInfo} objects representing available DVB devices.
+ * @hide
+ */
+ public List<DvbDeviceInfo> getDvbDeviceList() {
+ try {
+ return mService.getDvbDeviceList();
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Returns a {@link ParcelFileDescriptor} of a specified DVB device for a given
+ * {@link DvbDeviceInfo}
+ *
+ * @param info A {@link DvbDeviceInfo} to open a DVB device.
+ * @param device A DVB device. The DVB device can be {@link #DVB_DEVICE_DEMUX},
+ * {@link #DVB_DEVICE_DVR} or {@link #DVB_DEVICE_FRONTEND}.
+ * @return a {@link ParcelFileDescriptor} of a specified DVB device for a given
+ * {@link DvbDeviceInfo}, or {@code null} if the given {@link DvbDeviceInfo} was invalid
+ * or the specified DVB device was busy with a previous request.
+ * @hide
+ */
+ public ParcelFileDescriptor openDvbDevice(DvbDeviceInfo info, int device) {
+ try {
+ if (DVB_DEVICE_START > device || DVB_DEVICE_END < device) {
+ throw new IllegalArgumentException("Invalid DVB device: " + device);
+ }
+ return mService.openDvbDevice(info, device);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Requests to make a channel browsable.
+ *
+ * <p>Once called, the system will review the request and make the channel browsable based on
+ * its policy. The first request from a package is guaranteed to be approved.
+ *
+ * @param channelUri The URI for the channel to be browsable.
+ * @hide
+ */
+ public void requestChannelBrowsable(Uri channelUri) {
+ try {
+ mService.requestChannelBrowsable(channelUri, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * The Session provides the per-session functionality of TV inputs.
+ * @hide
+ */
+ public static final class Session {
+ static final int DISPATCH_IN_PROGRESS = -1;
+ static final int DISPATCH_NOT_HANDLED = 0;
+ static final int DISPATCH_HANDLED = 1;
+
+ private static final long INPUT_SESSION_NOT_RESPONDING_TIMEOUT = 2500;
+
+ private final ITvInputManager mService;
+ private final int mUserId;
+ private final int mSeq;
+
+ // For scheduling input event handling on the main thread. This also serves as a lock to
+ // protect pending input events and the input channel.
+ private final InputEventHandler mHandler = new InputEventHandler(Looper.getMainLooper());
+
+ private final Pool<PendingEvent> mPendingEventPool = new SimplePool<>(20);
+ private final SparseArray<PendingEvent> mPendingEvents = new SparseArray<>(20);
+ private final SparseArray<SessionCallbackRecord> mSessionCallbackRecordMap;
+
+ private IBinder mToken;
+ private TvInputEventSender mSender;
+ private InputChannel mChannel;
+
+ private final Object mMetadataLock = new Object();
+ // @GuardedBy("mMetadataLock")
+ private final List<TvTrackInfo> mAudioTracks = new ArrayList<>();
+ // @GuardedBy("mMetadataLock")
+ private final List<TvTrackInfo> mVideoTracks = new ArrayList<>();
+ // @GuardedBy("mMetadataLock")
+ private final List<TvTrackInfo> mSubtitleTracks = new ArrayList<>();
+ // @GuardedBy("mMetadataLock")
+ private String mSelectedAudioTrackId;
+ // @GuardedBy("mMetadataLock")
+ private String mSelectedVideoTrackId;
+ // @GuardedBy("mMetadataLock")
+ private String mSelectedSubtitleTrackId;
+ // @GuardedBy("mMetadataLock")
+ private int mVideoWidth;
+ // @GuardedBy("mMetadataLock")
+ private int mVideoHeight;
+
+ private Session(IBinder token, InputChannel channel, ITvInputManager service, int userId,
+ int seq, SparseArray<SessionCallbackRecord> sessionCallbackRecordMap) {
+ mToken = token;
+ mChannel = channel;
+ mService = service;
+ mUserId = userId;
+ mSeq = seq;
+ mSessionCallbackRecordMap = sessionCallbackRecordMap;
+ }
+
+ /**
+ * Releases this session.
+ */
+ public void release() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.releaseSession(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+
+ releaseInternal();
+ }
+
+ /**
+ * Sets this as the main session. The main session is a session whose corresponding TV
+ * input determines the HDMI-CEC active source device.
+ *
+ * @see TvView#setMain
+ */
+ void setMain() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.setMainSession(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Sets the {@link android.view.Surface} for this session.
+ *
+ * @param surface A {@link android.view.Surface} used to render video.
+ */
+ public void setSurface(Surface surface) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ // surface can be null.
+ try {
+ mService.setSurface(mToken, surface, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Notifies of any structural changes (format or size) of the surface passed in
+ * {@link #setSurface}.
+ *
+ * @param format The new PixelFormat of the surface.
+ * @param width The new width of the surface.
+ * @param height The new height of the surface.
+ */
+ public void dispatchSurfaceChanged(int format, int width, int height) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.dispatchSurfaceChanged(mToken, format, width, height, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Sets the relative stream volume of this session to handle a change of audio focus.
+ *
+ * @param volume A volume value between 0.0f to 1.0f.
+ * @throws IllegalArgumentException if the volume value is out of range.
+ */
+ public void setStreamVolume(float volume) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ if (volume < 0.0f || volume > 1.0f) {
+ throw new IllegalArgumentException("volume should be between 0.0f and 1.0f");
+ }
+ mService.setVolume(mToken, volume, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Tunes to a given channel.
+ *
+ * @param channelUri The URI of a channel.
+ */
+ public void tune(Uri channelUri) {
+ tune(channelUri, null);
+ }
+
+ /**
+ * Tunes to a given channel.
+ *
+ * @param channelUri The URI of a channel.
+ * @param params A set of extra parameters which might be handled with this tune event.
+ */
+ public void tune(@NonNull Uri channelUri, Bundle params) {
+ Preconditions.checkNotNull(channelUri);
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ synchronized (mMetadataLock) {
+ mAudioTracks.clear();
+ mVideoTracks.clear();
+ mSubtitleTracks.clear();
+ mSelectedAudioTrackId = null;
+ mSelectedVideoTrackId = null;
+ mSelectedSubtitleTrackId = null;
+ mVideoWidth = 0;
+ mVideoHeight = 0;
+ }
+ try {
+ mService.tune(mToken, channelUri, params, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Enables or disables the caption for this session.
+ *
+ * @param enabled {@code true} to enable, {@code false} to disable.
+ */
+ public void setCaptionEnabled(boolean enabled) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.setCaptionEnabled(mToken, enabled, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Selects a track.
+ *
+ * @param type The type of the track to select. The type can be
+ * {@link TvTrackInfo#TYPE_AUDIO}, {@link TvTrackInfo#TYPE_VIDEO} or
+ * {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @param trackId The ID of the track to select. When {@code null}, the currently selected
+ * track of the given type will be unselected.
+ * @see #getTracks
+ */
+ public void selectTrack(int type, @Nullable String trackId) {
+ synchronized (mMetadataLock) {
+ if (type == TvTrackInfo.TYPE_AUDIO) {
+ if (trackId != null && !containsTrack(mAudioTracks, trackId)) {
+ Log.w(TAG, "Invalid audio trackId: " + trackId);
+ return;
+ }
+ } else if (type == TvTrackInfo.TYPE_VIDEO) {
+ if (trackId != null && !containsTrack(mVideoTracks, trackId)) {
+ Log.w(TAG, "Invalid video trackId: " + trackId);
+ return;
+ }
+ } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
+ if (trackId != null && !containsTrack(mSubtitleTracks, trackId)) {
+ Log.w(TAG, "Invalid subtitle trackId: " + trackId);
+ return;
+ }
+ } else {
+ throw new IllegalArgumentException("invalid type: " + type);
+ }
+ }
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.selectTrack(mToken, type, trackId, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ private boolean containsTrack(List<TvTrackInfo> tracks, String trackId) {
+ for (TvTrackInfo track : tracks) {
+ if (track.getId().equals(trackId)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Returns the list of tracks for a given type. Returns {@code null} if the information is
+ * not available.
+ *
+ * @param type The type of the tracks. The type can be {@link TvTrackInfo#TYPE_AUDIO},
+ * {@link TvTrackInfo#TYPE_VIDEO} or {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @return the list of tracks for the given type.
+ */
+ @Nullable
+ public List<TvTrackInfo> getTracks(int type) {
+ synchronized (mMetadataLock) {
+ if (type == TvTrackInfo.TYPE_AUDIO) {
+ if (mAudioTracks == null) {
+ return null;
+ }
+ return new ArrayList<>(mAudioTracks);
+ } else if (type == TvTrackInfo.TYPE_VIDEO) {
+ if (mVideoTracks == null) {
+ return null;
+ }
+ return new ArrayList<>(mVideoTracks);
+ } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
+ if (mSubtitleTracks == null) {
+ return null;
+ }
+ return new ArrayList<>(mSubtitleTracks);
+ }
+ }
+ throw new IllegalArgumentException("invalid type: " + type);
+ }
+
+ /**
+ * Returns the selected track for a given type. Returns {@code null} if the information is
+ * not available or any of the tracks for the given type is not selected.
+ *
+ * @return The ID of the selected track.
+ * @see #selectTrack
+ */
+ @Nullable
+ public String getSelectedTrack(int type) {
+ synchronized (mMetadataLock) {
+ if (type == TvTrackInfo.TYPE_AUDIO) {
+ return mSelectedAudioTrackId;
+ } else if (type == TvTrackInfo.TYPE_VIDEO) {
+ return mSelectedVideoTrackId;
+ } else if (type == TvTrackInfo.TYPE_SUBTITLE) {
+ return mSelectedSubtitleTrackId;
+ }
+ }
+ throw new IllegalArgumentException("invalid type: " + type);
+ }
+
+ /**
+ * Responds to onTracksChanged() and updates the internal track information. Returns true if
+ * there is an update.
+ */
+ boolean updateTracks(List<TvTrackInfo> tracks) {
+ synchronized (mMetadataLock) {
+ mAudioTracks.clear();
+ mVideoTracks.clear();
+ mSubtitleTracks.clear();
+ for (TvTrackInfo track : tracks) {
+ if (track.getType() == TvTrackInfo.TYPE_AUDIO) {
+ mAudioTracks.add(track);
+ } else if (track.getType() == TvTrackInfo.TYPE_VIDEO) {
+ mVideoTracks.add(track);
+ } else if (track.getType() == TvTrackInfo.TYPE_SUBTITLE) {
+ mSubtitleTracks.add(track);
+ }
+ }
+ return !mAudioTracks.isEmpty() || !mVideoTracks.isEmpty()
+ || !mSubtitleTracks.isEmpty();
+ }
+ }
+
+ /**
+ * Responds to onTrackSelected() and updates the internal track selection information.
+ * Returns true if there is an update.
+ */
+ boolean updateTrackSelection(int type, String trackId) {
+ synchronized (mMetadataLock) {
+ if (type == TvTrackInfo.TYPE_AUDIO
+ && !TextUtils.equals(trackId, mSelectedAudioTrackId)) {
+ mSelectedAudioTrackId = trackId;
+ return true;
+ } else if (type == TvTrackInfo.TYPE_VIDEO
+ && !TextUtils.equals(trackId, mSelectedVideoTrackId)) {
+ mSelectedVideoTrackId = trackId;
+ return true;
+ } else if (type == TvTrackInfo.TYPE_SUBTITLE
+ && !TextUtils.equals(trackId, mSelectedSubtitleTrackId)) {
+ mSelectedSubtitleTrackId = trackId;
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Returns the new/updated video track that contains new video size information. Returns
+ * null if there is no video track to notify. Subsequent calls of this method results in a
+ * non-null video track returned only by the first call and null returned by following
+ * calls. The caller should immediately notify of the video size change upon receiving the
+ * track.
+ */
+ TvTrackInfo getVideoTrackToNotify() {
+ synchronized (mMetadataLock) {
+ if (!mVideoTracks.isEmpty() && mSelectedVideoTrackId != null) {
+ for (TvTrackInfo track : mVideoTracks) {
+ if (track.getId().equals(mSelectedVideoTrackId)) {
+ int videoWidth = track.getVideoWidth();
+ int videoHeight = track.getVideoHeight();
+ if (mVideoWidth != videoWidth || mVideoHeight != videoHeight) {
+ mVideoWidth = videoWidth;
+ mVideoHeight = videoHeight;
+ return track;
+ }
+ }
+ }
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Plays a given recorded TV program.
+ */
+ void timeShiftPlay(Uri recordedProgramUri) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftPlay(mToken, recordedProgramUri, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Pauses the playback. Call {@link #timeShiftResume()} to restart the playback.
+ */
+ void timeShiftPause() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftPause(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Resumes the playback. No-op if it is already playing the channel.
+ */
+ void timeShiftResume() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftResume(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Seeks to a specified time position.
+ *
+ * <p>Normally, the position is given within range between the start and the current time,
+ * inclusively.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ * @see TvView.TimeShiftPositionCallback#onTimeShiftStartPositionChanged
+ */
+ void timeShiftSeekTo(long timeMs) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftSeekTo(mToken, timeMs, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Sets playback rate using {@link android.media.PlaybackParams}.
+ *
+ * @param params The playback params.
+ */
+ void timeShiftSetPlaybackParams(PlaybackParams params) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftSetPlaybackParams(mToken, params, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Enable/disable position tracking.
+ *
+ * @param enable {@code true} to enable tracking, {@code false} otherwise.
+ */
+ void timeShiftEnablePositionTracking(boolean enable) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.timeShiftEnablePositionTracking(mToken, enable, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Starts TV program recording in the current recording session.
+ *
+ * @param programUri The URI for the TV program to record as a hint, built by
+ * {@link TvContract#buildProgramUri(long)}. Can be {@code null}.
+ */
+ void startRecording(@Nullable Uri programUri) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.startRecording(mToken, programUri, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Stops TV program recording in the current recording session.
+ */
+ void stopRecording() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.stopRecording(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Calls {@link TvInputService.Session#appPrivateCommand(String, Bundle)
+ * TvInputService.Session.appPrivateCommand()} on the current TvView.
+ *
+ * @param action Name of the command to be performed. This <em>must</em> be a scoped name,
+ * i.e. prefixed with a package name you own, so that different developers will
+ * not create conflicting commands.
+ * @param data Any data to include with the command.
+ */
+ public void sendAppPrivateCommand(String action, Bundle data) {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.sendAppPrivateCommand(mToken, action, data, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Creates an overlay view. Once the overlay view is created, {@link #relayoutOverlayView}
+ * should be called whenever the layout of its containing view is changed.
+ * {@link #removeOverlayView()} should be called to remove the overlay view.
+ * Since a session can have only one overlay view, this method should be called only once
+ * or it can be called again after calling {@link #removeOverlayView()}.
+ *
+ * @param view A view playing TV.
+ * @param frame A position of the overlay view.
+ * @throws IllegalStateException if {@code view} is not attached to a window.
+ */
+ void createOverlayView(@NonNull View view, @NonNull Rect frame) {
+ Preconditions.checkNotNull(view);
+ Preconditions.checkNotNull(frame);
+ if (view.getWindowToken() == null) {
+ throw new IllegalStateException("view must be attached to a window");
+ }
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.createOverlayView(mToken, view.getWindowToken(), frame, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Relayouts the current overlay view.
+ *
+ * @param frame A new position of the overlay view.
+ */
+ void relayoutOverlayView(@NonNull Rect frame) {
+ Preconditions.checkNotNull(frame);
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.relayoutOverlayView(mToken, frame, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Removes the current overlay view.
+ */
+ void removeOverlayView() {
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.removeOverlayView(mToken, mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Requests to unblock content blocked by parental controls.
+ */
+ void unblockContent(@NonNull TvContentRating unblockedRating) {
+ Preconditions.checkNotNull(unblockedRating);
+ if (mToken == null) {
+ Log.w(TAG, "The session has been already released");
+ return;
+ }
+ try {
+ mService.unblockContent(mToken, unblockedRating.flattenToString(), mUserId);
+ } catch (RemoteException e) {
+ throw e.rethrowFromSystemServer();
+ }
+ }
+
+ /**
+ * Dispatches an input event to this session.
+ *
+ * @param event An {@link InputEvent} to dispatch. Cannot be {@code null}.
+ * @param token A token used to identify the input event later in the callback.
+ * @param callback A callback used to receive the dispatch result. Cannot be {@code null}.
+ * @param handler A {@link Handler} that the dispatch result will be delivered to. Cannot be
+ * {@code null}.
+ * @return Returns {@link #DISPATCH_HANDLED} if the event was handled. Returns
+ * {@link #DISPATCH_NOT_HANDLED} if the event was not handled. Returns
+ * {@link #DISPATCH_IN_PROGRESS} if the event is in progress and the callback will
+ * be invoked later.
+ * @hide
+ */
+ public int dispatchInputEvent(@NonNull InputEvent event, Object token,
+ @NonNull FinishedInputEventCallback callback, @NonNull Handler handler) {
+ Preconditions.checkNotNull(event);
+ Preconditions.checkNotNull(callback);
+ Preconditions.checkNotNull(handler);
+ synchronized (mHandler) {
+ if (mChannel == null) {
+ return DISPATCH_NOT_HANDLED;
+ }
+ PendingEvent p = obtainPendingEventLocked(event, token, callback, handler);
+ if (Looper.myLooper() == Looper.getMainLooper()) {
+ // Already running on the main thread so we can send the event immediately.
+ return sendInputEventOnMainLooperLocked(p);
+ }
+
+ // Post the event to the main thread.
+ Message msg = mHandler.obtainMessage(InputEventHandler.MSG_SEND_INPUT_EVENT, p);
+ msg.setAsynchronous(true);
+ mHandler.sendMessage(msg);
+ return DISPATCH_IN_PROGRESS;
+ }
+ }
+
+ /**
+ * Callback that is invoked when an input event that was dispatched to this session has been
+ * finished.
+ *
+ * @hide
+ */
+ public interface FinishedInputEventCallback {
+ /**
+ * Called when the dispatched input event is finished.
+ *
+ * @param token A token passed to {@link #dispatchInputEvent}.
+ * @param handled {@code true} if the dispatched input event was handled properly.
+ * {@code false} otherwise.
+ */
+ void onFinishedInputEvent(Object token, boolean handled);
+ }
+
+ // Must be called on the main looper
+ private void sendInputEventAndReportResultOnMainLooper(PendingEvent p) {
+ synchronized (mHandler) {
+ int result = sendInputEventOnMainLooperLocked(p);
+ if (result == DISPATCH_IN_PROGRESS) {
+ return;
+ }
+ }
+
+ invokeFinishedInputEventCallback(p, false);
+ }
+
+ private int sendInputEventOnMainLooperLocked(PendingEvent p) {
+ if (mChannel != null) {
+ if (mSender == null) {
+ mSender = new TvInputEventSender(mChannel, mHandler.getLooper());
+ }
+
+ final InputEvent event = p.mEvent;
+ final int seq = event.getSequenceNumber();
+ if (mSender.sendInputEvent(seq, event)) {
+ mPendingEvents.put(seq, p);
+ Message msg = mHandler.obtainMessage(InputEventHandler.MSG_TIMEOUT_INPUT_EVENT, p);
+ msg.setAsynchronous(true);
+ mHandler.sendMessageDelayed(msg, INPUT_SESSION_NOT_RESPONDING_TIMEOUT);
+ return DISPATCH_IN_PROGRESS;
+ }
+
+ Log.w(TAG, "Unable to send input event to session: " + mToken + " dropping:"
+ + event);
+ }
+ return DISPATCH_NOT_HANDLED;
+ }
+
+ void finishedInputEvent(int seq, boolean handled, boolean timeout) {
+ final PendingEvent p;
+ synchronized (mHandler) {
+ int index = mPendingEvents.indexOfKey(seq);
+ if (index < 0) {
+ return; // spurious, event already finished or timed out
+ }
+
+ p = mPendingEvents.valueAt(index);
+ mPendingEvents.removeAt(index);
+
+ if (timeout) {
+ Log.w(TAG, "Timeout waiting for session to handle input event after "
+ + INPUT_SESSION_NOT_RESPONDING_TIMEOUT + " ms: " + mToken);
+ } else {
+ mHandler.removeMessages(InputEventHandler.MSG_TIMEOUT_INPUT_EVENT, p);
+ }
+ }
+
+ invokeFinishedInputEventCallback(p, handled);
+ }
+
+ // Assumes the event has already been removed from the queue.
+ void invokeFinishedInputEventCallback(PendingEvent p, boolean handled) {
+ p.mHandled = handled;
+ if (p.mEventHandler.getLooper().isCurrentThread()) {
+ // Already running on the callback handler thread so we can send the callback
+ // immediately.
+ p.run();
+ } else {
+ // Post the event to the callback handler thread.
+ // In this case, the callback will be responsible for recycling the event.
+ Message msg = Message.obtain(p.mEventHandler, p);
+ msg.setAsynchronous(true);
+ msg.sendToTarget();
+ }
+ }
+
+ private void flushPendingEventsLocked() {
+ mHandler.removeMessages(InputEventHandler.MSG_FLUSH_INPUT_EVENT);
+
+ final int count = mPendingEvents.size();
+ for (int i = 0; i < count; i++) {
+ int seq = mPendingEvents.keyAt(i);
+ Message msg = mHandler.obtainMessage(InputEventHandler.MSG_FLUSH_INPUT_EVENT, seq, 0);
+ msg.setAsynchronous(true);
+ msg.sendToTarget();
+ }
+ }
+
+ private PendingEvent obtainPendingEventLocked(InputEvent event, Object token,
+ FinishedInputEventCallback callback, Handler handler) {
+ PendingEvent p = mPendingEventPool.acquire();
+ if (p == null) {
+ p = new PendingEvent();
+ }
+ p.mEvent = event;
+ p.mEventToken = token;
+ p.mCallback = callback;
+ p.mEventHandler = handler;
+ return p;
+ }
+
+ private void recyclePendingEventLocked(PendingEvent p) {
+ p.recycle();
+ mPendingEventPool.release(p);
+ }
+
+ IBinder getToken() {
+ return mToken;
+ }
+
+ private void releaseInternal() {
+ mToken = null;
+ synchronized (mHandler) {
+ if (mChannel != null) {
+ if (mSender != null) {
+ flushPendingEventsLocked();
+ mSender.dispose();
+ mSender = null;
+ }
+ mChannel.dispose();
+ mChannel = null;
+ }
+ }
+ synchronized (mSessionCallbackRecordMap) {
+ mSessionCallbackRecordMap.remove(mSeq);
+ }
+ }
+
+ private final class InputEventHandler extends Handler {
+ public static final int MSG_SEND_INPUT_EVENT = 1;
+ public static final int MSG_TIMEOUT_INPUT_EVENT = 2;
+ public static final int MSG_FLUSH_INPUT_EVENT = 3;
+
+ InputEventHandler(Looper looper) {
+ super(looper, null, true);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_SEND_INPUT_EVENT: {
+ sendInputEventAndReportResultOnMainLooper((PendingEvent) msg.obj);
+ return;
+ }
+ case MSG_TIMEOUT_INPUT_EVENT: {
+ finishedInputEvent(msg.arg1, false, true);
+ return;
+ }
+ case MSG_FLUSH_INPUT_EVENT: {
+ finishedInputEvent(msg.arg1, false, false);
+ return;
+ }
+ }
+ }
+ }
+
+ private final class TvInputEventSender extends InputEventSender {
+ public TvInputEventSender(InputChannel inputChannel, Looper looper) {
+ super(inputChannel, looper);
+ }
+
+ @Override
+ public void onInputEventFinished(int seq, boolean handled) {
+ finishedInputEvent(seq, handled, false);
+ }
+ }
+
+ private final class PendingEvent implements Runnable {
+ public InputEvent mEvent;
+ public Object mEventToken;
+ public FinishedInputEventCallback mCallback;
+ public Handler mEventHandler;
+ public boolean mHandled;
+
+ public void recycle() {
+ mEvent = null;
+ mEventToken = null;
+ mCallback = null;
+ mEventHandler = null;
+ mHandled = false;
+ }
+
+ @Override
+ public void run() {
+ mCallback.onFinishedInputEvent(mEventToken, mHandled);
+
+ synchronized (mEventHandler) {
+ recyclePendingEventLocked(this);
+ }
+ }
+ }
+ }
+
+ /**
+ * The Hardware provides the per-hardware functionality of TV hardware.
+ *
+ * <p>TV hardware is physical hardware attached to the Android device; for example, HDMI ports,
+ * Component/Composite ports, etc. Specifically, logical devices such as HDMI CEC logical
+ * devices don't fall into this category.
+ *
+ * @hide
+ */
+ @SystemApi
+ public final static class Hardware {
+ private final ITvInputHardware mInterface;
+
+ private Hardware(ITvInputHardware hardwareInterface) {
+ mInterface = hardwareInterface;
+ }
+
+ private ITvInputHardware getInterface() {
+ return mInterface;
+ }
+
+ public boolean setSurface(Surface surface, TvStreamConfig config) {
+ try {
+ return mInterface.setSurface(surface, config);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public void setStreamVolume(float volume) {
+ try {
+ mInterface.setStreamVolume(volume);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public boolean dispatchKeyEventToHdmi(KeyEvent event) {
+ try {
+ return mInterface.dispatchKeyEventToHdmi(event);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public void overrideAudioSink(int audioType, String audioAddress, int samplingRate,
+ int channelMask, int format) {
+ try {
+ mInterface.overrideAudioSink(audioType, audioAddress, samplingRate, channelMask,
+ format);
+ } catch (RemoteException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+}
diff --git a/android/media/tv/TvInputService.java b/android/media/tv/TvInputService.java
new file mode 100644
index 00000000..e24124db
--- /dev/null
+++ b/android/media/tv/TvInputService.java
@@ -0,0 +1,2141 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.FloatRange;
+import android.annotation.MainThread;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.SuppressLint;
+import android.annotation.SystemApi;
+import android.app.ActivityManager;
+import android.app.Service;
+import android.content.Context;
+import android.content.Intent;
+import android.graphics.PixelFormat;
+import android.graphics.Rect;
+import android.hardware.hdmi.HdmiDeviceInfo;
+import android.media.PlaybackParams;
+import android.net.Uri;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Message;
+import android.os.Process;
+import android.os.RemoteCallbackList;
+import android.os.RemoteException;
+import android.text.TextUtils;
+import android.util.Log;
+import android.view.Gravity;
+import android.view.InputChannel;
+import android.view.InputDevice;
+import android.view.InputEvent;
+import android.view.InputEventReceiver;
+import android.view.KeyEvent;
+import android.view.MotionEvent;
+import android.view.Surface;
+import android.view.View;
+import android.view.WindowManager;
+import android.view.accessibility.CaptioningManager;
+import android.widget.FrameLayout;
+
+import com.android.internal.os.SomeArgs;
+import com.android.internal.util.Preconditions;
+
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Set;
+
+/**
+ * The TvInputService class represents a TV input or source such as HDMI or built-in tuner which
+ * provides pass-through video or broadcast TV programs.
+ *
+ * <p>Applications will not normally use this service themselves, instead relying on the standard
+ * interaction provided by {@link TvView}. Those implementing TV input services should normally do
+ * so by deriving from this class and providing their own session implementation based on
+ * {@link TvInputService.Session}. All TV input services must require that clients hold the
+ * {@link android.Manifest.permission#BIND_TV_INPUT} in order to interact with the service; if this
+ * permission is not specified in the manifest, the system will refuse to bind to that TV input
+ * service.
+ */
+public abstract class TvInputService extends Service {
+ private static final boolean DEBUG = false;
+ private static final String TAG = "TvInputService";
+
+ private static final int DETACH_OVERLAY_VIEW_TIMEOUT_MS = 5000;
+
+ /**
+ * This is the interface name that a service implementing a TV input should say that it support
+ * -- that is, this is the action it uses for its intent filter. To be supported, the service
+ * must also require the {@link android.Manifest.permission#BIND_TV_INPUT} permission so that
+ * other applications cannot abuse it.
+ */
+ public static final String SERVICE_INTERFACE = "android.media.tv.TvInputService";
+
+ /**
+ * Name under which a TvInputService component publishes information about itself.
+ * This meta-data must reference an XML resource containing an
+ * <code>&lt;{@link android.R.styleable#TvInputService tv-input}&gt;</code>
+ * tag.
+ */
+ public static final String SERVICE_META_DATA = "android.media.tv.input";
+
+ /**
+ * Handler instance to handle request from TV Input Manager Service. Should be run in the main
+ * looper to be synchronously run with {@code Session.mHandler}.
+ */
+ private final Handler mServiceHandler = new ServiceHandler();
+ private final RemoteCallbackList<ITvInputServiceCallback> mCallbacks =
+ new RemoteCallbackList<>();
+
+ private TvInputManager mTvInputManager;
+
+ @Override
+ public final IBinder onBind(Intent intent) {
+ return new ITvInputService.Stub() {
+ @Override
+ public void registerCallback(ITvInputServiceCallback cb) {
+ if (cb != null) {
+ mCallbacks.register(cb);
+ }
+ }
+
+ @Override
+ public void unregisterCallback(ITvInputServiceCallback cb) {
+ if (cb != null) {
+ mCallbacks.unregister(cb);
+ }
+ }
+
+ @Override
+ public void createSession(InputChannel channel, ITvInputSessionCallback cb,
+ String inputId) {
+ if (channel == null) {
+ Log.w(TAG, "Creating session without input channel");
+ }
+ if (cb == null) {
+ return;
+ }
+ SomeArgs args = SomeArgs.obtain();
+ args.arg1 = channel;
+ args.arg2 = cb;
+ args.arg3 = inputId;
+ mServiceHandler.obtainMessage(ServiceHandler.DO_CREATE_SESSION, args).sendToTarget();
+ }
+
+ @Override
+ public void createRecordingSession(ITvInputSessionCallback cb, String inputId) {
+ if (cb == null) {
+ return;
+ }
+ SomeArgs args = SomeArgs.obtain();
+ args.arg1 = cb;
+ args.arg2 = inputId;
+ mServiceHandler.obtainMessage(ServiceHandler.DO_CREATE_RECORDING_SESSION, args)
+ .sendToTarget();
+ }
+
+ @Override
+ public void notifyHardwareAdded(TvInputHardwareInfo hardwareInfo) {
+ mServiceHandler.obtainMessage(ServiceHandler.DO_ADD_HARDWARE_INPUT,
+ hardwareInfo).sendToTarget();
+ }
+
+ @Override
+ public void notifyHardwareRemoved(TvInputHardwareInfo hardwareInfo) {
+ mServiceHandler.obtainMessage(ServiceHandler.DO_REMOVE_HARDWARE_INPUT,
+ hardwareInfo).sendToTarget();
+ }
+
+ @Override
+ public void notifyHdmiDeviceAdded(HdmiDeviceInfo deviceInfo) {
+ mServiceHandler.obtainMessage(ServiceHandler.DO_ADD_HDMI_INPUT,
+ deviceInfo).sendToTarget();
+ }
+
+ @Override
+ public void notifyHdmiDeviceRemoved(HdmiDeviceInfo deviceInfo) {
+ mServiceHandler.obtainMessage(ServiceHandler.DO_REMOVE_HDMI_INPUT,
+ deviceInfo).sendToTarget();
+ }
+ };
+ }
+
+ /**
+ * Returns a concrete implementation of {@link Session}.
+ *
+ * <p>May return {@code null} if this TV input service fails to create a session for some
+ * reason. If TV input represents an external device connected to a hardware TV input,
+ * {@link HardwareSession} should be returned.
+ *
+ * @param inputId The ID of the TV input associated with the session.
+ */
+ @Nullable
+ public abstract Session onCreateSession(String inputId);
+
+ /**
+ * Returns a concrete implementation of {@link RecordingSession}.
+ *
+ * <p>May return {@code null} if this TV input service fails to create a recording session for
+ * some reason.
+ *
+ * @param inputId The ID of the TV input associated with the recording session.
+ */
+ @Nullable
+ public RecordingSession onCreateRecordingSession(String inputId) {
+ return null;
+ }
+
+ /**
+ * Returns a new {@link TvInputInfo} object if this service is responsible for
+ * {@code hardwareInfo}; otherwise, return {@code null}. Override to modify default behavior of
+ * ignoring all hardware input.
+ *
+ * @param hardwareInfo {@link TvInputHardwareInfo} object just added.
+ * @hide
+ */
+ @Nullable
+ @SystemApi
+ public TvInputInfo onHardwareAdded(TvInputHardwareInfo hardwareInfo) {
+ return null;
+ }
+
+ /**
+ * Returns the input ID for {@code deviceId} if it is handled by this service;
+ * otherwise, return {@code null}. Override to modify default behavior of ignoring all hardware
+ * input.
+ *
+ * @param hardwareInfo {@link TvInputHardwareInfo} object just removed.
+ * @hide
+ */
+ @Nullable
+ @SystemApi
+ public String onHardwareRemoved(TvInputHardwareInfo hardwareInfo) {
+ return null;
+ }
+
+ /**
+ * Returns a new {@link TvInputInfo} object if this service is responsible for
+ * {@code deviceInfo}; otherwise, return {@code null}. Override to modify default behavior of
+ * ignoring all HDMI logical input device.
+ *
+ * @param deviceInfo {@link HdmiDeviceInfo} object just added.
+ * @hide
+ */
+ @Nullable
+ @SystemApi
+ public TvInputInfo onHdmiDeviceAdded(HdmiDeviceInfo deviceInfo) {
+ return null;
+ }
+
+ /**
+ * Returns the input ID for {@code deviceInfo} if it is handled by this service; otherwise,
+ * return {@code null}. Override to modify default behavior of ignoring all HDMI logical input
+ * device.
+ *
+ * @param deviceInfo {@link HdmiDeviceInfo} object just removed.
+ * @hide
+ */
+ @Nullable
+ @SystemApi
+ public String onHdmiDeviceRemoved(HdmiDeviceInfo deviceInfo) {
+ return null;
+ }
+
+ private boolean isPassthroughInput(String inputId) {
+ if (mTvInputManager == null) {
+ mTvInputManager = (TvInputManager) getSystemService(Context.TV_INPUT_SERVICE);
+ }
+ TvInputInfo info = mTvInputManager.getTvInputInfo(inputId);
+ return info != null && info.isPassthroughInput();
+ }
+
+ /**
+ * Base class for derived classes to implement to provide a TV input session.
+ */
+ public abstract static class Session implements KeyEvent.Callback {
+ private static final int POSITION_UPDATE_INTERVAL_MS = 1000;
+
+ private final KeyEvent.DispatcherState mDispatcherState = new KeyEvent.DispatcherState();
+ private final WindowManager mWindowManager;
+ final Handler mHandler;
+ private WindowManager.LayoutParams mWindowParams;
+ private Surface mSurface;
+ private final Context mContext;
+ private FrameLayout mOverlayViewContainer;
+ private View mOverlayView;
+ private OverlayViewCleanUpTask mOverlayViewCleanUpTask;
+ private boolean mOverlayViewEnabled;
+ private IBinder mWindowToken;
+ private Rect mOverlayFrame;
+ private long mStartPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ private long mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ private final TimeShiftPositionTrackingRunnable
+ mTimeShiftPositionTrackingRunnable = new TimeShiftPositionTrackingRunnable();
+
+ private final Object mLock = new Object();
+ // @GuardedBy("mLock")
+ private ITvInputSessionCallback mSessionCallback;
+ // @GuardedBy("mLock")
+ private final List<Runnable> mPendingActions = new ArrayList<>();
+
+ /**
+ * Creates a new Session.
+ *
+ * @param context The context of the application
+ */
+ public Session(Context context) {
+ mContext = context;
+ mWindowManager = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
+ mHandler = new Handler(context.getMainLooper());
+ }
+
+ /**
+ * Enables or disables the overlay view.
+ *
+ * <p>By default, the overlay view is disabled. Must be called explicitly after the
+ * session is created to enable the overlay view.
+ *
+ * <p>The TV input service can disable its overlay view when the size of the overlay view is
+ * insufficient to display the whole information, such as when used in Picture-in-picture.
+ * Override {@link #onOverlayViewSizeChanged} to get the size of the overlay view, which
+ * then can be used to determine whether to enable/disable the overlay view.
+ *
+ * @param enable {@code true} if you want to enable the overlay view. {@code false}
+ * otherwise.
+ */
+ public void setOverlayViewEnabled(final boolean enable) {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (enable == mOverlayViewEnabled) {
+ return;
+ }
+ mOverlayViewEnabled = enable;
+ if (enable) {
+ if (mWindowToken != null) {
+ createOverlayView(mWindowToken, mOverlayFrame);
+ }
+ } else {
+ removeOverlayView(false);
+ }
+ }
+ });
+ }
+
+ /**
+ * Dispatches an event to the application using this session.
+ *
+ * @param eventType The type of the event.
+ * @param eventArgs Optional arguments of the event.
+ * @hide
+ */
+ @SystemApi
+ public void notifySessionEvent(@NonNull final String eventType, final Bundle eventArgs) {
+ Preconditions.checkNotNull(eventType);
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifySessionEvent(" + eventType + ")");
+ if (mSessionCallback != null) {
+ mSessionCallback.onSessionEvent(eventType, eventArgs);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in sending event (event=" + eventType + ")", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that the current channel is re-tuned for some reason and the
+ * session now displays the content from a new channel. This is used to handle special cases
+ * such as when the current channel becomes unavailable, it is necessary to send the user to
+ * a certain channel or the user changes channel in some other way (e.g. by using a
+ * dedicated remote).
+ *
+ * @param channelUri The URI of the new channel.
+ */
+ public void notifyChannelRetuned(final Uri channelUri) {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyChannelRetuned");
+ if (mSessionCallback != null) {
+ mSessionCallback.onChannelRetuned(channelUri);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyChannelRetuned", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Sends the list of all audio/video/subtitle tracks. The is used by the framework to
+ * maintain the track information for a given session, which in turn is used by
+ * {@link TvView#getTracks} for the application to retrieve metadata for a given track type.
+ * The TV input service must call this method as soon as the track information becomes
+ * available or is updated. Note that in a case where a part of the information for a
+ * certain track is updated, it is not necessary to create a new {@link TvTrackInfo} object
+ * with a different track ID.
+ *
+ * @param tracks A list which includes track information.
+ */
+ public void notifyTracksChanged(final List<TvTrackInfo> tracks) {
+ final List<TvTrackInfo> tracksCopy = new ArrayList<>(tracks);
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTracksChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTracksChanged(tracksCopy);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTracksChanged", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Sends the type and ID of a selected track. This is used to inform the application that a
+ * specific track is selected. The TV input service must call this method as soon as a track
+ * is selected either by default or in response to a call to {@link #onSelectTrack}. The
+ * selected track ID for a given type is maintained in the framework until the next call to
+ * this method even after the entire track list is updated (but is reset when the session is
+ * tuned to a new channel), so care must be taken not to result in an obsolete track ID.
+ *
+ * @param type The type of the selected track. The type can be
+ * {@link TvTrackInfo#TYPE_AUDIO}, {@link TvTrackInfo#TYPE_VIDEO} or
+ * {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @param trackId The ID of the selected track.
+ * @see #onSelectTrack
+ */
+ public void notifyTrackSelected(final int type, final String trackId) {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTrackSelected");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTrackSelected(type, trackId);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTrackSelected", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that the video is now available for watching. Video is blocked
+ * until this method is called.
+ *
+ * <p>The TV input service must call this method as soon as the content rendered onto its
+ * surface is ready for viewing. This method must be called each time {@link #onTune}
+ * is called.
+ *
+ * @see #notifyVideoUnavailable
+ */
+ public void notifyVideoAvailable() {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyVideoAvailable");
+ if (mSessionCallback != null) {
+ mSessionCallback.onVideoAvailable();
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyVideoAvailable", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that the video became unavailable for some reason. This is
+ * primarily used to signal the application to block the screen not to show any intermittent
+ * video artifacts.
+ *
+ * @param reason The reason why the video became unavailable:
+ * <ul>
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_UNKNOWN}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
+ * </ul>
+ * @see #notifyVideoAvailable
+ */
+ public void notifyVideoUnavailable(
+ @TvInputManager.VideoUnavailableReason final int reason) {
+ if (reason < TvInputManager.VIDEO_UNAVAILABLE_REASON_START
+ || reason > TvInputManager.VIDEO_UNAVAILABLE_REASON_END) {
+ Log.e(TAG, "notifyVideoUnavailable - unknown reason: " + reason);
+ }
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyVideoUnavailable");
+ if (mSessionCallback != null) {
+ mSessionCallback.onVideoUnavailable(reason);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyVideoUnavailable", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that the user is allowed to watch the current program content.
+ *
+ * <p>Each TV input service is required to query the system whether the user is allowed to
+ * watch the current program before showing it to the user if the parental controls is
+ * enabled (i.e. {@link TvInputManager#isParentalControlsEnabled
+ * TvInputManager.isParentalControlsEnabled()} returns {@code true}). Whether the TV input
+ * service should block the content or not is determined by invoking
+ * {@link TvInputManager#isRatingBlocked TvInputManager.isRatingBlocked(TvContentRating)}
+ * with the content rating for the current program. Then the {@link TvInputManager} makes a
+ * judgment based on the user blocked ratings stored in the secure settings and returns the
+ * result. If the rating in question turns out to be allowed by the user, the TV input
+ * service must call this method to notify the application that is permitted to show the
+ * content.
+ *
+ * <p>Each TV input service also needs to continuously listen to any changes made to the
+ * parental controls settings by registering a broadcast receiver to receive
+ * {@link TvInputManager#ACTION_BLOCKED_RATINGS_CHANGED} and
+ * {@link TvInputManager#ACTION_PARENTAL_CONTROLS_ENABLED_CHANGED} and immediately
+ * reevaluate the current program with the new parental controls settings.
+ *
+ * @see #notifyContentBlocked
+ * @see TvInputManager
+ */
+ public void notifyContentAllowed() {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyContentAllowed");
+ if (mSessionCallback != null) {
+ mSessionCallback.onContentAllowed();
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyContentAllowed", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that the current program content is blocked by parent controls.
+ *
+ * <p>Each TV input service is required to query the system whether the user is allowed to
+ * watch the current program before showing it to the user if the parental controls is
+ * enabled (i.e. {@link TvInputManager#isParentalControlsEnabled
+ * TvInputManager.isParentalControlsEnabled()} returns {@code true}). Whether the TV input
+ * service should block the content or not is determined by invoking
+ * {@link TvInputManager#isRatingBlocked TvInputManager.isRatingBlocked(TvContentRating)}
+ * with the content rating for the current program or {@link TvContentRating#UNRATED} in
+ * case the rating information is missing. Then the {@link TvInputManager} makes a judgment
+ * based on the user blocked ratings stored in the secure settings and returns the result.
+ * If the rating in question turns out to be blocked, the TV input service must immediately
+ * block the content and call this method with the content rating of the current program to
+ * prompt the PIN verification screen.
+ *
+ * <p>Each TV input service also needs to continuously listen to any changes made to the
+ * parental controls settings by registering a broadcast receiver to receive
+ * {@link TvInputManager#ACTION_BLOCKED_RATINGS_CHANGED} and
+ * {@link TvInputManager#ACTION_PARENTAL_CONTROLS_ENABLED_CHANGED} and immediately
+ * reevaluate the current program with the new parental controls settings.
+ *
+ * @param rating The content rating for the current TV program. Can be
+ * {@link TvContentRating#UNRATED}.
+ * @see #notifyContentAllowed
+ * @see TvInputManager
+ */
+ public void notifyContentBlocked(@NonNull final TvContentRating rating) {
+ Preconditions.checkNotNull(rating);
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyContentBlocked");
+ if (mSessionCallback != null) {
+ mSessionCallback.onContentBlocked(rating.flattenToString());
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyContentBlocked", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that the time shift status is changed.
+ *
+ * <p>Prior to calling this method, the application assumes the status
+ * {@link TvInputManager#TIME_SHIFT_STATUS_UNKNOWN}. Right after the session is created, it
+ * is important to invoke the method with the status
+ * {@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE} if the implementation does support
+ * time shifting, or {@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED} otherwise. Failure
+ * to notifying the current status change immediately might result in an undesirable
+ * behavior in the application such as hiding the play controls.
+ *
+ * <p>If the status {@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE} is reported, the
+ * application assumes it can pause/resume playback, seek to a specified time position and
+ * set playback rate and audio mode. The implementation should override
+ * {@link #onTimeShiftPause}, {@link #onTimeShiftResume}, {@link #onTimeShiftSeekTo},
+ * {@link #onTimeShiftGetStartPosition}, {@link #onTimeShiftGetCurrentPosition} and
+ * {@link #onTimeShiftSetPlaybackParams}.
+ *
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void notifyTimeShiftStatusChanged(@TvInputManager.TimeShiftStatus final int status) {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ timeShiftEnablePositionTracking(
+ status == TvInputManager.TIME_SHIFT_STATUS_AVAILABLE);
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftStatusChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftStatusChanged(status);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftStatusChanged", e);
+ }
+ }
+ });
+ }
+
+ private void notifyTimeShiftStartPositionChanged(final long timeMs) {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftStartPositionChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftStartPositionChanged(timeMs);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftStartPositionChanged", e);
+ }
+ }
+ });
+ }
+
+ private void notifyTimeShiftCurrentPositionChanged(final long timeMs) {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTimeShiftCurrentPositionChanged");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTimeShiftCurrentPositionChanged(timeMs);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTimeShiftCurrentPositionChanged", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Assigns a size and position to the surface passed in {@link #onSetSurface}. The position
+ * is relative to the overlay view that sits on top of this surface.
+ *
+ * @param left Left position in pixels, relative to the overlay view.
+ * @param top Top position in pixels, relative to the overlay view.
+ * @param right Right position in pixels, relative to the overlay view.
+ * @param bottom Bottom position in pixels, relative to the overlay view.
+ * @see #onOverlayViewSizeChanged
+ */
+ public void layoutSurface(final int left, final int top, final int right,
+ final int bottom) {
+ if (left > right || top > bottom) {
+ throw new IllegalArgumentException("Invalid parameter");
+ }
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "layoutSurface (l=" + left + ", t=" + top + ", r="
+ + right + ", b=" + bottom + ",)");
+ if (mSessionCallback != null) {
+ mSessionCallback.onLayoutSurface(left, top, right, bottom);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in layoutSurface", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Called when the session is released.
+ */
+ public abstract void onRelease();
+
+ /**
+ * Sets the current session as the main session. The main session is a session whose
+ * corresponding TV input determines the HDMI-CEC active source device.
+ *
+ * <p>TV input service that manages HDMI-CEC logical device should implement {@link
+ * #onSetMain} to (1) select the corresponding HDMI logical device as the source device
+ * when {@code isMain} is {@code true}, and to (2) select the internal device (= TV itself)
+ * as the source device when {@code isMain} is {@code false} and the session is still main.
+ * Also, if a surface is passed to a non-main session and active source is changed to
+ * initiate the surface, the active source should be returned to the main session.
+ *
+ * <p>{@link TvView} guarantees that, when tuning involves a session transition, {@code
+ * onSetMain(true)} for new session is called first, {@code onSetMain(false)} for old
+ * session is called afterwards. This allows {@code onSetMain(false)} to be no-op when TV
+ * input service knows that the next main session corresponds to another HDMI logical
+ * device. Practically, this implies that one TV input service should handle all HDMI port
+ * and HDMI-CEC logical devices for smooth active source transition.
+ *
+ * @param isMain If true, session should become main.
+ * @see TvView#setMain
+ * @hide
+ */
+ @SystemApi
+ public void onSetMain(boolean isMain) {
+ }
+
+ /**
+ * Called when the application sets the surface.
+ *
+ * <p>The TV input service should render video onto the given surface. When called with
+ * {@code null}, the input service should immediately free any references to the
+ * currently set surface and stop using it.
+ *
+ * @param surface The surface to be used for video rendering. Can be {@code null}.
+ * @return {@code true} if the surface was set successfully, {@code false} otherwise.
+ */
+ public abstract boolean onSetSurface(@Nullable Surface surface);
+
+ /**
+ * Called after any structural changes (format or size) have been made to the surface passed
+ * in {@link #onSetSurface}. This method is always called at least once, after
+ * {@link #onSetSurface} is called with non-null surface.
+ *
+ * @param format The new PixelFormat of the surface.
+ * @param width The new width of the surface.
+ * @param height The new height of the surface.
+ */
+ public void onSurfaceChanged(int format, int width, int height) {
+ }
+
+ /**
+ * Called when the size of the overlay view is changed by the application.
+ *
+ * <p>This is always called at least once when the session is created regardless of whether
+ * the overlay view is enabled or not. The overlay view size is the same as the containing
+ * {@link TvView}. Note that the size of the underlying surface can be different if the
+ * surface was changed by calling {@link #layoutSurface}.
+ *
+ * @param width The width of the overlay view.
+ * @param height The height of the overlay view.
+ */
+ public void onOverlayViewSizeChanged(int width, int height) {
+ }
+
+ /**
+ * Sets the relative stream volume of the current TV input session.
+ *
+ * <p>The implementation should honor this request in order to handle audio focus changes or
+ * mute the current session when multiple sessions, possibly from different inputs are
+ * active. If the method has not yet been called, the implementation should assume the
+ * default value of {@code 1.0f}.
+ *
+ * @param volume A volume value between {@code 0.0f} to {@code 1.0f}.
+ */
+ public abstract void onSetStreamVolume(@FloatRange(from = 0.0, to = 1.0) float volume);
+
+ /**
+ * Tunes to a given channel.
+ *
+ * <p>No video will be displayed until {@link #notifyVideoAvailable()} is called.
+ * Also, {@link #notifyVideoUnavailable(int)} should be called when the TV input cannot
+ * continue playing the given channel.
+ *
+ * @param channelUri The URI of the channel.
+ * @return {@code true} if the tuning was successful, {@code false} otherwise.
+ */
+ public abstract boolean onTune(Uri channelUri);
+
+ /**
+ * Tunes to a given channel. Override this method in order to handle domain-specific
+ * features that are only known between certain TV inputs and their clients.
+ *
+ * <p>The default implementation calls {@link #onTune(Uri)}.
+ *
+ * @param channelUri The URI of the channel.
+ * @param params Domain-specific data for this tune request. Keys <em>must</em> be a scoped
+ * name, i.e. prefixed with a package name you own, so that different developers
+ * will not create conflicting keys.
+ * @return {@code true} if the tuning was successful, {@code false} otherwise.
+ */
+ public boolean onTune(Uri channelUri, Bundle params) {
+ return onTune(channelUri);
+ }
+
+ /**
+ * Enables or disables the caption.
+ *
+ * <p>The locale for the user's preferred captioning language can be obtained by calling
+ * {@link CaptioningManager#getLocale CaptioningManager.getLocale()}.
+ *
+ * @param enabled {@code true} to enable, {@code false} to disable.
+ * @see CaptioningManager
+ */
+ public abstract void onSetCaptionEnabled(boolean enabled);
+
+ /**
+ * Requests to unblock the content according to the given rating.
+ *
+ * <p>The implementation should unblock the content.
+ * TV input service has responsibility to decide when/how the unblock expires
+ * while it can keep previously unblocked ratings in order not to ask a user
+ * to unblock whenever a content rating is changed.
+ * Therefore an unblocked rating can be valid for a channel, a program,
+ * or certain amount of time depending on the implementation.
+ *
+ * @param unblockedRating An unblocked content rating
+ */
+ public void onUnblockContent(TvContentRating unblockedRating) {
+ }
+
+ /**
+ * Selects a given track.
+ *
+ * <p>If this is done successfully, the implementation should call
+ * {@link #notifyTrackSelected} to help applications maintain the up-to-date list of the
+ * selected tracks.
+ *
+ * @param trackId The ID of the track to select. {@code null} means to unselect the current
+ * track for a given type.
+ * @param type The type of the track to select. The type can be
+ * {@link TvTrackInfo#TYPE_AUDIO}, {@link TvTrackInfo#TYPE_VIDEO} or
+ * {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @return {@code true} if the track selection was successful, {@code false} otherwise.
+ * @see #notifyTrackSelected
+ */
+ public boolean onSelectTrack(int type, @Nullable String trackId) {
+ return false;
+ }
+
+ /**
+ * Processes a private command sent from the application to the TV input. This can be used
+ * to provide domain-specific features that are only known between certain TV inputs and
+ * their clients.
+ *
+ * @param action Name of the command to be performed. This <em>must</em> be a scoped name,
+ * i.e. prefixed with a package name you own, so that different developers will
+ * not create conflicting commands.
+ * @param data Any data to include with the command.
+ */
+ public void onAppPrivateCommand(@NonNull String action, Bundle data) {
+ }
+
+ /**
+ * Called when the application requests to create an overlay view. Each session
+ * implementation can override this method and return its own view.
+ *
+ * @return a view attached to the overlay window
+ */
+ public View onCreateOverlayView() {
+ return null;
+ }
+
+ /**
+ * Called when the application requests to play a given recorded TV program.
+ *
+ * @param recordedProgramUri The URI of a recorded TV program.
+ * @see #onTimeShiftResume()
+ * @see #onTimeShiftPause()
+ * @see #onTimeShiftSeekTo(long)
+ * @see #onTimeShiftSetPlaybackParams(PlaybackParams)
+ * @see #onTimeShiftGetStartPosition()
+ * @see #onTimeShiftGetCurrentPosition()
+ */
+ public void onTimeShiftPlay(Uri recordedProgramUri) {
+ }
+
+ /**
+ * Called when the application requests to pause playback.
+ *
+ * @see #onTimeShiftPlay(Uri)
+ * @see #onTimeShiftResume()
+ * @see #onTimeShiftSeekTo(long)
+ * @see #onTimeShiftSetPlaybackParams(PlaybackParams)
+ * @see #onTimeShiftGetStartPosition()
+ * @see #onTimeShiftGetCurrentPosition()
+ */
+ public void onTimeShiftPause() {
+ }
+
+ /**
+ * Called when the application requests to resume playback.
+ *
+ * @see #onTimeShiftPlay(Uri)
+ * @see #onTimeShiftPause()
+ * @see #onTimeShiftSeekTo(long)
+ * @see #onTimeShiftSetPlaybackParams(PlaybackParams)
+ * @see #onTimeShiftGetStartPosition()
+ * @see #onTimeShiftGetCurrentPosition()
+ */
+ public void onTimeShiftResume() {
+ }
+
+ /**
+ * Called when the application requests to seek to a specified time position. Normally, the
+ * position is given within range between the start and the current time, inclusively. The
+ * implementation is expected to seek to the nearest time position if the given position is
+ * not in the range.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ * @see #onTimeShiftPlay(Uri)
+ * @see #onTimeShiftResume()
+ * @see #onTimeShiftPause()
+ * @see #onTimeShiftSetPlaybackParams(PlaybackParams)
+ * @see #onTimeShiftGetStartPosition()
+ * @see #onTimeShiftGetCurrentPosition()
+ */
+ public void onTimeShiftSeekTo(long timeMs) {
+ }
+
+ /**
+ * Called when the application sets playback parameters containing the speed and audio mode.
+ *
+ * <p>Once the playback parameters are set, the implementation should honor the current
+ * settings until the next tune request. Pause/resume/seek request does not reset the
+ * parameters previously set.
+ *
+ * @param params The playback params.
+ * @see #onTimeShiftPlay(Uri)
+ * @see #onTimeShiftResume()
+ * @see #onTimeShiftPause()
+ * @see #onTimeShiftSeekTo(long)
+ * @see #onTimeShiftGetStartPosition()
+ * @see #onTimeShiftGetCurrentPosition()
+ */
+ public void onTimeShiftSetPlaybackParams(PlaybackParams params) {
+ }
+
+ /**
+ * Returns the start position for time shifting, in milliseconds since the epoch.
+ * Returns {@link TvInputManager#TIME_SHIFT_INVALID_TIME} if the position is unknown at the
+ * moment.
+ *
+ * <p>The start position for time shifting indicates the earliest possible time the user can
+ * seek to. Initially this is equivalent to the time when the implementation starts
+ * recording. Later it may be adjusted because there is insufficient space or the duration
+ * of recording is limited by the implementation. The application does not allow the user to
+ * seek to a position earlier than the start position.
+ *
+ * <p>For playback of a recorded program initiated by {@link #onTimeShiftPlay(Uri)}, the
+ * start position should be 0 and does not change.
+ *
+ * @see #onTimeShiftPlay(Uri)
+ * @see #onTimeShiftResume()
+ * @see #onTimeShiftPause()
+ * @see #onTimeShiftSeekTo(long)
+ * @see #onTimeShiftSetPlaybackParams(PlaybackParams)
+ * @see #onTimeShiftGetCurrentPosition()
+ */
+ public long onTimeShiftGetStartPosition() {
+ return TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+
+ /**
+ * Returns the current position for time shifting, in milliseconds since the epoch.
+ * Returns {@link TvInputManager#TIME_SHIFT_INVALID_TIME} if the position is unknown at the
+ * moment.
+ *
+ * <p>The current position for time shifting is the same as the current position of
+ * playback. It should be equal to or greater than the start position reported by
+ * {@link #onTimeShiftGetStartPosition()}. When playback is completed, the current position
+ * should stay where the playback ends, in other words, the returned value of this mehtod
+ * should be equal to the start position plus the duration of the program.
+ *
+ * @see #onTimeShiftPlay(Uri)
+ * @see #onTimeShiftResume()
+ * @see #onTimeShiftPause()
+ * @see #onTimeShiftSeekTo(long)
+ * @see #onTimeShiftSetPlaybackParams(PlaybackParams)
+ * @see #onTimeShiftGetStartPosition()
+ */
+ public long onTimeShiftGetCurrentPosition() {
+ return TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+
+ /**
+ * Default implementation of {@link android.view.KeyEvent.Callback#onKeyDown(int, KeyEvent)
+ * KeyEvent.Callback.onKeyDown()}: always returns false (doesn't handle the event).
+ *
+ * <p>Override this to intercept key down events before they are processed by the
+ * application. If you return true, the application will not process the event itself. If
+ * you return false, the normal application processing will occur as if the TV input had not
+ * seen the event at all.
+ *
+ * @param keyCode The value in event.getKeyCode().
+ * @param event Description of the key event.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to
+ * be handled by the next receiver, return {@code false}.
+ */
+ @Override
+ public boolean onKeyDown(int keyCode, KeyEvent event) {
+ return false;
+ }
+
+ /**
+ * Default implementation of
+ * {@link android.view.KeyEvent.Callback#onKeyLongPress(int, KeyEvent)
+ * KeyEvent.Callback.onKeyLongPress()}: always returns false (doesn't handle the event).
+ *
+ * <p>Override this to intercept key long press events before they are processed by the
+ * application. If you return true, the application will not process the event itself. If
+ * you return false, the normal application processing will occur as if the TV input had not
+ * seen the event at all.
+ *
+ * @param keyCode The value in event.getKeyCode().
+ * @param event Description of the key event.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to
+ * be handled by the next receiver, return {@code false}.
+ */
+ @Override
+ public boolean onKeyLongPress(int keyCode, KeyEvent event) {
+ return false;
+ }
+
+ /**
+ * Default implementation of
+ * {@link android.view.KeyEvent.Callback#onKeyMultiple(int, int, KeyEvent)
+ * KeyEvent.Callback.onKeyMultiple()}: always returns false (doesn't handle the event).
+ *
+ * <p>Override this to intercept special key multiple events before they are processed by
+ * the application. If you return true, the application will not itself process the event.
+ * If you return false, the normal application processing will occur as if the TV input had
+ * not seen the event at all.
+ *
+ * @param keyCode The value in event.getKeyCode().
+ * @param count The number of times the action was made.
+ * @param event Description of the key event.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to
+ * be handled by the next receiver, return {@code false}.
+ */
+ @Override
+ public boolean onKeyMultiple(int keyCode, int count, KeyEvent event) {
+ return false;
+ }
+
+ /**
+ * Default implementation of {@link android.view.KeyEvent.Callback#onKeyUp(int, KeyEvent)
+ * KeyEvent.Callback.onKeyUp()}: always returns false (doesn't handle the event).
+ *
+ * <p>Override this to intercept key up events before they are processed by the application.
+ * If you return true, the application will not itself process the event. If you return false,
+ * the normal application processing will occur as if the TV input had not seen the event at
+ * all.
+ *
+ * @param keyCode The value in event.getKeyCode().
+ * @param event Description of the key event.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to
+ * be handled by the next receiver, return {@code false}.
+ */
+ @Override
+ public boolean onKeyUp(int keyCode, KeyEvent event) {
+ return false;
+ }
+
+ /**
+ * Implement this method to handle touch screen motion events on the current input session.
+ *
+ * @param event The motion event being received.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to
+ * be handled by the next receiver, return {@code false}.
+ * @see View#onTouchEvent
+ */
+ public boolean onTouchEvent(MotionEvent event) {
+ return false;
+ }
+
+ /**
+ * Implement this method to handle trackball events on the current input session.
+ *
+ * @param event The motion event being received.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to
+ * be handled by the next receiver, return {@code false}.
+ * @see View#onTrackballEvent
+ */
+ public boolean onTrackballEvent(MotionEvent event) {
+ return false;
+ }
+
+ /**
+ * Implement this method to handle generic motion events on the current input session.
+ *
+ * @param event The motion event being received.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to
+ * be handled by the next receiver, return {@code false}.
+ * @see View#onGenericMotionEvent
+ */
+ public boolean onGenericMotionEvent(MotionEvent event) {
+ return false;
+ }
+
+ /**
+ * This method is called when the application would like to stop using the current input
+ * session.
+ */
+ void release() {
+ onRelease();
+ if (mSurface != null) {
+ mSurface.release();
+ mSurface = null;
+ }
+ synchronized(mLock) {
+ mSessionCallback = null;
+ mPendingActions.clear();
+ }
+ // Removes the overlay view lastly so that any hanging on the main thread can be handled
+ // in {@link #scheduleOverlayViewCleanup}.
+ removeOverlayView(true);
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
+ }
+
+ /**
+ * Calls {@link #onSetMain}.
+ */
+ void setMain(boolean isMain) {
+ onSetMain(isMain);
+ }
+
+ /**
+ * Calls {@link #onSetSurface}.
+ */
+ void setSurface(Surface surface) {
+ onSetSurface(surface);
+ if (mSurface != null) {
+ mSurface.release();
+ }
+ mSurface = surface;
+ // TODO: Handle failure.
+ }
+
+ /**
+ * Calls {@link #onSurfaceChanged}.
+ */
+ void dispatchSurfaceChanged(int format, int width, int height) {
+ if (DEBUG) {
+ Log.d(TAG, "dispatchSurfaceChanged(format=" + format + ", width=" + width
+ + ", height=" + height + ")");
+ }
+ onSurfaceChanged(format, width, height);
+ }
+
+ /**
+ * Calls {@link #onSetStreamVolume}.
+ */
+ void setStreamVolume(float volume) {
+ onSetStreamVolume(volume);
+ }
+
+ /**
+ * Calls {@link #onTune(Uri, Bundle)}.
+ */
+ void tune(Uri channelUri, Bundle params) {
+ mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ onTune(channelUri, params);
+ // TODO: Handle failure.
+ }
+
+ /**
+ * Calls {@link #onSetCaptionEnabled}.
+ */
+ void setCaptionEnabled(boolean enabled) {
+ onSetCaptionEnabled(enabled);
+ }
+
+ /**
+ * Calls {@link #onSelectTrack}.
+ */
+ void selectTrack(int type, String trackId) {
+ onSelectTrack(type, trackId);
+ }
+
+ /**
+ * Calls {@link #onUnblockContent}.
+ */
+ void unblockContent(String unblockedRating) {
+ onUnblockContent(TvContentRating.unflattenFromString(unblockedRating));
+ // TODO: Handle failure.
+ }
+
+ /**
+ * Calls {@link #onAppPrivateCommand}.
+ */
+ void appPrivateCommand(String action, Bundle data) {
+ onAppPrivateCommand(action, data);
+ }
+
+ /**
+ * Creates an overlay view. This calls {@link #onCreateOverlayView} to get a view to attach
+ * to the overlay window.
+ *
+ * @param windowToken A window token of the application.
+ * @param frame A position of the overlay view.
+ */
+ void createOverlayView(IBinder windowToken, Rect frame) {
+ if (mOverlayViewContainer != null) {
+ removeOverlayView(false);
+ }
+ if (DEBUG) Log.d(TAG, "create overlay view(" + frame + ")");
+ mWindowToken = windowToken;
+ mOverlayFrame = frame;
+ onOverlayViewSizeChanged(frame.right - frame.left, frame.bottom - frame.top);
+ if (!mOverlayViewEnabled) {
+ return;
+ }
+ mOverlayView = onCreateOverlayView();
+ if (mOverlayView == null) {
+ return;
+ }
+ if (mOverlayViewCleanUpTask != null) {
+ mOverlayViewCleanUpTask.cancel(true);
+ mOverlayViewCleanUpTask = null;
+ }
+ // Creates a container view to check hanging on the overlay view detaching.
+ // Adding/removing the overlay view to/from the container make the view attach/detach
+ // logic run on the main thread.
+ mOverlayViewContainer = new FrameLayout(mContext.getApplicationContext());
+ mOverlayViewContainer.addView(mOverlayView);
+ // TvView's window type is TYPE_APPLICATION_MEDIA and we want to create
+ // an overlay window above the media window but below the application window.
+ int type = WindowManager.LayoutParams.TYPE_APPLICATION_MEDIA_OVERLAY;
+ // We make the overlay view non-focusable and non-touchable so that
+ // the application that owns the window token can decide whether to consume or
+ // dispatch the input events.
+ int flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE
+ | WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE
+ | WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS;
+ if (ActivityManager.isHighEndGfx()) {
+ flags |= WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED;
+ }
+ mWindowParams = new WindowManager.LayoutParams(
+ frame.right - frame.left, frame.bottom - frame.top,
+ frame.left, frame.top, type, flags, PixelFormat.TRANSPARENT);
+ mWindowParams.privateFlags |=
+ WindowManager.LayoutParams.PRIVATE_FLAG_NO_MOVE_ANIMATION;
+ mWindowParams.gravity = Gravity.START | Gravity.TOP;
+ mWindowParams.token = windowToken;
+ mWindowManager.addView(mOverlayViewContainer, mWindowParams);
+ }
+
+ /**
+ * Relayouts the current overlay view.
+ *
+ * @param frame A new position of the overlay view.
+ */
+ void relayoutOverlayView(Rect frame) {
+ if (DEBUG) Log.d(TAG, "relayoutOverlayView(" + frame + ")");
+ if (mOverlayFrame == null || mOverlayFrame.width() != frame.width()
+ || mOverlayFrame.height() != frame.height()) {
+ // Note: relayoutOverlayView is called whenever TvView's layout is changed
+ // regardless of setOverlayViewEnabled.
+ onOverlayViewSizeChanged(frame.right - frame.left, frame.bottom - frame.top);
+ }
+ mOverlayFrame = frame;
+ if (!mOverlayViewEnabled || mOverlayViewContainer == null) {
+ return;
+ }
+ mWindowParams.x = frame.left;
+ mWindowParams.y = frame.top;
+ mWindowParams.width = frame.right - frame.left;
+ mWindowParams.height = frame.bottom - frame.top;
+ mWindowManager.updateViewLayout(mOverlayViewContainer, mWindowParams);
+ }
+
+ /**
+ * Removes the current overlay view.
+ */
+ void removeOverlayView(boolean clearWindowToken) {
+ if (DEBUG) Log.d(TAG, "removeOverlayView(" + mOverlayViewContainer + ")");
+ if (clearWindowToken) {
+ mWindowToken = null;
+ mOverlayFrame = null;
+ }
+ if (mOverlayViewContainer != null) {
+ // Removes the overlay view from the view hierarchy in advance so that it can be
+ // cleaned up in the {@link OverlayViewCleanUpTask} if the remove process is
+ // hanging.
+ mOverlayViewContainer.removeView(mOverlayView);
+ mOverlayView = null;
+ mWindowManager.removeView(mOverlayViewContainer);
+ mOverlayViewContainer = null;
+ mWindowParams = null;
+ }
+ }
+
+ /**
+ * Calls {@link #onTimeShiftPlay(Uri)}.
+ */
+ void timeShiftPlay(Uri recordedProgramUri) {
+ mCurrentPositionMs = 0;
+ onTimeShiftPlay(recordedProgramUri);
+ }
+
+ /**
+ * Calls {@link #onTimeShiftPause}.
+ */
+ void timeShiftPause() {
+ onTimeShiftPause();
+ }
+
+ /**
+ * Calls {@link #onTimeShiftResume}.
+ */
+ void timeShiftResume() {
+ onTimeShiftResume();
+ }
+
+ /**
+ * Calls {@link #onTimeShiftSeekTo}.
+ */
+ void timeShiftSeekTo(long timeMs) {
+ onTimeShiftSeekTo(timeMs);
+ }
+
+ /**
+ * Calls {@link #onTimeShiftSetPlaybackParams}.
+ */
+ void timeShiftSetPlaybackParams(PlaybackParams params) {
+ onTimeShiftSetPlaybackParams(params);
+ }
+
+ /**
+ * Enable/disable position tracking.
+ *
+ * @param enable {@code true} to enable tracking, {@code false} otherwise.
+ */
+ void timeShiftEnablePositionTracking(boolean enable) {
+ if (enable) {
+ mHandler.post(mTimeShiftPositionTrackingRunnable);
+ } else {
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
+ mStartPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ mCurrentPositionMs = TvInputManager.TIME_SHIFT_INVALID_TIME;
+ }
+ }
+
+ /**
+ * Schedules a task which checks whether the overlay view is detached and kills the process
+ * if it is not. Note that this method is expected to be called in a non-main thread.
+ */
+ void scheduleOverlayViewCleanup() {
+ View overlayViewParent = mOverlayViewContainer;
+ if (overlayViewParent != null) {
+ mOverlayViewCleanUpTask = new OverlayViewCleanUpTask();
+ mOverlayViewCleanUpTask.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR,
+ overlayViewParent);
+ }
+ }
+
+ /**
+ * Takes care of dispatching incoming input events and tells whether the event was handled.
+ */
+ int dispatchInputEvent(InputEvent event, InputEventReceiver receiver) {
+ if (DEBUG) Log.d(TAG, "dispatchInputEvent(" + event + ")");
+ boolean isNavigationKey = false;
+ boolean skipDispatchToOverlayView = false;
+ if (event instanceof KeyEvent) {
+ KeyEvent keyEvent = (KeyEvent) event;
+ if (keyEvent.dispatch(this, mDispatcherState, this)) {
+ return TvInputManager.Session.DISPATCH_HANDLED;
+ }
+ isNavigationKey = isNavigationKey(keyEvent.getKeyCode());
+ // When media keys and KEYCODE_MEDIA_AUDIO_TRACK are dispatched to ViewRootImpl,
+ // ViewRootImpl always consumes the keys. In this case, the application loses
+ // a chance to handle media keys. Therefore, media keys are not dispatched to
+ // ViewRootImpl.
+ skipDispatchToOverlayView = KeyEvent.isMediaKey(keyEvent.getKeyCode())
+ || keyEvent.getKeyCode() == KeyEvent.KEYCODE_MEDIA_AUDIO_TRACK;
+ } else if (event instanceof MotionEvent) {
+ MotionEvent motionEvent = (MotionEvent) event;
+ final int source = motionEvent.getSource();
+ if (motionEvent.isTouchEvent()) {
+ if (onTouchEvent(motionEvent)) {
+ return TvInputManager.Session.DISPATCH_HANDLED;
+ }
+ } else if ((source & InputDevice.SOURCE_CLASS_TRACKBALL) != 0) {
+ if (onTrackballEvent(motionEvent)) {
+ return TvInputManager.Session.DISPATCH_HANDLED;
+ }
+ } else {
+ if (onGenericMotionEvent(motionEvent)) {
+ return TvInputManager.Session.DISPATCH_HANDLED;
+ }
+ }
+ }
+ if (mOverlayViewContainer == null || !mOverlayViewContainer.isAttachedToWindow()
+ || skipDispatchToOverlayView) {
+ return TvInputManager.Session.DISPATCH_NOT_HANDLED;
+ }
+ if (!mOverlayViewContainer.hasWindowFocus()) {
+ mOverlayViewContainer.getViewRootImpl().windowFocusChanged(true, true);
+ }
+ if (isNavigationKey && mOverlayViewContainer.hasFocusable()) {
+ // If mOverlayView has focusable views, navigation key events should be always
+ // handled. If not, it can make the application UI navigation messed up.
+ // For example, in the case that the left-most view is focused, a left key event
+ // will not be handled in ViewRootImpl. Then, the left key event will be handled in
+ // the application during the UI navigation of the TV input.
+ mOverlayViewContainer.getViewRootImpl().dispatchInputEvent(event);
+ return TvInputManager.Session.DISPATCH_HANDLED;
+ } else {
+ mOverlayViewContainer.getViewRootImpl().dispatchInputEvent(event, receiver);
+ return TvInputManager.Session.DISPATCH_IN_PROGRESS;
+ }
+ }
+
+ private void initialize(ITvInputSessionCallback callback) {
+ synchronized(mLock) {
+ mSessionCallback = callback;
+ for (Runnable runnable : mPendingActions) {
+ runnable.run();
+ }
+ mPendingActions.clear();
+ }
+ }
+
+ private void executeOrPostRunnableOnMainThread(Runnable action) {
+ synchronized(mLock) {
+ if (mSessionCallback == null) {
+ // The session is not initialized yet.
+ mPendingActions.add(action);
+ } else {
+ if (mHandler.getLooper().isCurrentThread()) {
+ action.run();
+ } else {
+ // Posts the runnable if this is not called from the main thread
+ mHandler.post(action);
+ }
+ }
+ }
+ }
+
+ private final class TimeShiftPositionTrackingRunnable implements Runnable {
+ @Override
+ public void run() {
+ long startPositionMs = onTimeShiftGetStartPosition();
+ if (mStartPositionMs == TvInputManager.TIME_SHIFT_INVALID_TIME
+ || mStartPositionMs != startPositionMs) {
+ mStartPositionMs = startPositionMs;
+ notifyTimeShiftStartPositionChanged(startPositionMs);
+ }
+ long currentPositionMs = onTimeShiftGetCurrentPosition();
+ if (currentPositionMs < mStartPositionMs) {
+ Log.w(TAG, "Current position (" + currentPositionMs + ") cannot be earlier than"
+ + " start position (" + mStartPositionMs + "). Reset to the start "
+ + "position.");
+ currentPositionMs = mStartPositionMs;
+ }
+ if (mCurrentPositionMs == TvInputManager.TIME_SHIFT_INVALID_TIME
+ || mCurrentPositionMs != currentPositionMs) {
+ mCurrentPositionMs = currentPositionMs;
+ notifyTimeShiftCurrentPositionChanged(currentPositionMs);
+ }
+ mHandler.removeCallbacks(mTimeShiftPositionTrackingRunnable);
+ mHandler.postDelayed(mTimeShiftPositionTrackingRunnable,
+ POSITION_UPDATE_INTERVAL_MS);
+ }
+ }
+ }
+
+ private static final class OverlayViewCleanUpTask extends AsyncTask<View, Void, Void> {
+ @Override
+ protected Void doInBackground(View... views) {
+ View overlayViewParent = views[0];
+ try {
+ Thread.sleep(DETACH_OVERLAY_VIEW_TIMEOUT_MS);
+ } catch (InterruptedException e) {
+ return null;
+ }
+ if (isCancelled()) {
+ return null;
+ }
+ if (overlayViewParent.isAttachedToWindow()) {
+ Log.e(TAG, "Time out on releasing overlay view. Killing "
+ + overlayViewParent.getContext().getPackageName());
+ Process.killProcess(Process.myPid());
+ }
+ return null;
+ }
+ }
+
+ /**
+ * Base class for derived classes to implement to provide a TV input recording session.
+ */
+ public abstract static class RecordingSession {
+ final Handler mHandler;
+
+ private final Object mLock = new Object();
+ // @GuardedBy("mLock")
+ private ITvInputSessionCallback mSessionCallback;
+ // @GuardedBy("mLock")
+ private final List<Runnable> mPendingActions = new ArrayList<>();
+
+ /**
+ * Creates a new RecordingSession.
+ *
+ * @param context The context of the application
+ */
+ public RecordingSession(Context context) {
+ mHandler = new Handler(context.getMainLooper());
+ }
+
+ /**
+ * Informs the application that this recording session has been tuned to the given channel
+ * and is ready to start recording.
+ *
+ * <p>Upon receiving a call to {@link #onTune(Uri)}, the session is expected to tune to the
+ * passed channel and call this method to indicate that it is now available for immediate
+ * recording. When {@link #onStartRecording(Uri)} is called, recording must start with
+ * minimal delay.
+ *
+ * @param channelUri The URI of a channel.
+ */
+ public void notifyTuned(Uri channelUri) {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyTuned");
+ if (mSessionCallback != null) {
+ mSessionCallback.onTuned(channelUri);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyTuned", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that this recording session has stopped recording and created a
+ * new data entry in the {@link TvContract.RecordedPrograms} table that describes the newly
+ * recorded program.
+ *
+ * <p>The recording session must call this method in response to {@link #onStopRecording()}.
+ * The session may call it even before receiving a call to {@link #onStopRecording()} if a
+ * partially recorded program is available when there is an error.
+ *
+ * @param recordedProgramUri The URI of the newly recorded program.
+ */
+ public void notifyRecordingStopped(final Uri recordedProgramUri) {
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyRecordingStopped");
+ if (mSessionCallback != null) {
+ mSessionCallback.onRecordingStopped(recordedProgramUri);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyRecordingStopped", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Informs the application that there is an error and this recording session is no longer
+ * able to start or continue recording. It may be called at any time after the recording
+ * session is created until {@link #onRelease()} is called.
+ *
+ * <p>The application may release the current session upon receiving the error code through
+ * {@link TvRecordingClient.RecordingCallback#onError(int)}. The session may call
+ * {@link #notifyRecordingStopped(Uri)} if a partially recorded but still playable program
+ * is available, before calling this method.
+ *
+ * @param error The error code. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#RECORDING_ERROR_UNKNOWN}
+ * <li>{@link TvInputManager#RECORDING_ERROR_INSUFFICIENT_SPACE}
+ * <li>{@link TvInputManager#RECORDING_ERROR_RESOURCE_BUSY}
+ * </ul>
+ */
+ public void notifyError(@TvInputManager.RecordingError int error) {
+ if (error < TvInputManager.RECORDING_ERROR_START
+ || error > TvInputManager.RECORDING_ERROR_END) {
+ Log.w(TAG, "notifyError - invalid error code (" + error
+ + ") is changed to RECORDING_ERROR_UNKNOWN.");
+ error = TvInputManager.RECORDING_ERROR_UNKNOWN;
+ }
+ final int validError = error;
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifyError");
+ if (mSessionCallback != null) {
+ mSessionCallback.onError(validError);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in notifyError", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Dispatches an event to the application using this recording session.
+ *
+ * @param eventType The type of the event.
+ * @param eventArgs Optional arguments of the event.
+ * @hide
+ */
+ @SystemApi
+ public void notifySessionEvent(@NonNull final String eventType, final Bundle eventArgs) {
+ Preconditions.checkNotNull(eventType);
+ executeOrPostRunnableOnMainThread(new Runnable() {
+ @MainThread
+ @Override
+ public void run() {
+ try {
+ if (DEBUG) Log.d(TAG, "notifySessionEvent(" + eventType + ")");
+ if (mSessionCallback != null) {
+ mSessionCallback.onSessionEvent(eventType, eventArgs);
+ }
+ } catch (RemoteException e) {
+ Log.w(TAG, "error in sending event (event=" + eventType + ")", e);
+ }
+ }
+ });
+ }
+
+ /**
+ * Called when the application requests to tune to a given channel for TV program recording.
+ *
+ * <p>The application may call this method before starting or after stopping recording, but
+ * not during recording.
+ *
+ * <p>The session must call {@link #notifyTuned(Uri)} if the tune request was fulfilled, or
+ * {@link #notifyError(int)} otherwise.
+ *
+ * @param channelUri The URI of a channel.
+ */
+ public abstract void onTune(Uri channelUri);
+
+ /**
+ * Called when the application requests to tune to a given channel for TV program recording.
+ * Override this method in order to handle domain-specific features that are only known
+ * between certain TV inputs and their clients.
+ *
+ * <p>The application may call this method before starting or after stopping recording, but
+ * not during recording. The default implementation calls {@link #onTune(Uri)}.
+ *
+ * <p>The session must call {@link #notifyTuned(Uri)} if the tune request was fulfilled, or
+ * {@link #notifyError(int)} otherwise.
+ *
+ * @param channelUri The URI of a channel.
+ * @param params Domain-specific data for this tune request. Keys <em>must</em> be a scoped
+ * name, i.e. prefixed with a package name you own, so that different developers
+ * will not create conflicting keys.
+ */
+ public void onTune(Uri channelUri, Bundle params) {
+ onTune(channelUri);
+ }
+
+ /**
+ * Called when the application requests to start TV program recording. Recording must start
+ * immediately when this method is called.
+ *
+ * <p>The application may supply the URI for a TV program for filling in program specific
+ * data fields in the {@link android.media.tv.TvContract.RecordedPrograms} table.
+ * A non-null {@code programUri} implies the started recording should be of that specific
+ * program, whereas null {@code programUri} does not impose such a requirement and the
+ * recording can span across multiple TV programs. In either case, the application must call
+ * {@link TvRecordingClient#stopRecording()} to stop the recording.
+ *
+ * <p>The session must call {@link #notifyError(int)} if the start request cannot be
+ * fulfilled.
+ *
+ * @param programUri The URI for the TV program to record, built by
+ * {@link TvContract#buildProgramUri(long)}. Can be {@code null}.
+ */
+ public abstract void onStartRecording(@Nullable Uri programUri);
+
+ /**
+ * Called when the application requests to stop TV program recording. Recording must stop
+ * immediately when this method is called.
+ *
+ * <p>The session must create a new data entry in the
+ * {@link android.media.tv.TvContract.RecordedPrograms} table that describes the newly
+ * recorded program and call {@link #notifyRecordingStopped(Uri)} with the URI to that
+ * entry.
+ * If the stop request cannot be fulfilled, the session must call {@link #notifyError(int)}.
+ *
+ */
+ public abstract void onStopRecording();
+
+
+ /**
+ * Called when the application requests to release all the resources held by this recording
+ * session.
+ */
+ public abstract void onRelease();
+
+ /**
+ * Processes a private command sent from the application to the TV input. This can be used
+ * to provide domain-specific features that are only known between certain TV inputs and
+ * their clients.
+ *
+ * @param action Name of the command to be performed. This <em>must</em> be a scoped name,
+ * i.e. prefixed with a package name you own, so that different developers will
+ * not create conflicting commands.
+ * @param data Any data to include with the command.
+ */
+ public void onAppPrivateCommand(@NonNull String action, Bundle data) {
+ }
+
+ /**
+ * Calls {@link #onTune(Uri, Bundle)}.
+ *
+ */
+ void tune(Uri channelUri, Bundle params) {
+ onTune(channelUri, params);
+ }
+
+ /**
+ * Calls {@link #onRelease()}.
+ *
+ */
+ void release() {
+ onRelease();
+ }
+
+ /**
+ * Calls {@link #onStartRecording(Uri)}.
+ *
+ */
+ void startRecording(@Nullable Uri programUri) {
+ onStartRecording(programUri);
+ }
+
+ /**
+ * Calls {@link #onStopRecording()}.
+ *
+ */
+ void stopRecording() {
+ onStopRecording();
+ }
+
+ /**
+ * Calls {@link #onAppPrivateCommand(String, Bundle)}.
+ */
+ void appPrivateCommand(String action, Bundle data) {
+ onAppPrivateCommand(action, data);
+ }
+
+ private void initialize(ITvInputSessionCallback callback) {
+ synchronized(mLock) {
+ mSessionCallback = callback;
+ for (Runnable runnable : mPendingActions) {
+ runnable.run();
+ }
+ mPendingActions.clear();
+ }
+ }
+
+ private void executeOrPostRunnableOnMainThread(Runnable action) {
+ synchronized(mLock) {
+ if (mSessionCallback == null) {
+ // The session is not initialized yet.
+ mPendingActions.add(action);
+ } else {
+ if (mHandler.getLooper().isCurrentThread()) {
+ action.run();
+ } else {
+ // Posts the runnable if this is not called from the main thread
+ mHandler.post(action);
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Base class for a TV input session which represents an external device connected to a
+ * hardware TV input.
+ *
+ * <p>This class is for an input which provides channels for the external set-top box to the
+ * application. Once a TV input returns an implementation of this class on
+ * {@link #onCreateSession(String)}, the framework will create a separate session for
+ * a hardware TV Input (e.g. HDMI 1) and forward the application's surface to the session so
+ * that the user can see the screen of the hardware TV Input when she tunes to a channel from
+ * this TV input. The implementation of this class is expected to change the channel of the
+ * external set-top box via a proprietary protocol when {@link HardwareSession#onTune} is
+ * requested by the application.
+ *
+ * <p>Note that this class is not for inputs for internal hardware like built-in tuner and HDMI
+ * 1.
+ *
+ * @see #onCreateSession(String)
+ */
+ public abstract static class HardwareSession extends Session {
+
+ /**
+ * Creates a new HardwareSession.
+ *
+ * @param context The context of the application
+ */
+ public HardwareSession(Context context) {
+ super(context);
+ }
+
+ private TvInputManager.Session mHardwareSession;
+ private ITvInputSession mProxySession;
+ private ITvInputSessionCallback mProxySessionCallback;
+ private Handler mServiceHandler;
+
+ /**
+ * Returns the hardware TV input ID the external device is connected to.
+ *
+ * <p>TV input is expected to provide {@link android.R.attr#setupActivity} so that
+ * the application can launch it before using this TV input. The setup activity may let
+ * the user select the hardware TV input to which the external device is connected. The ID
+ * of the selected one should be stored in the TV input so that it can be returned here.
+ */
+ public abstract String getHardwareInputId();
+
+ private final TvInputManager.SessionCallback mHardwareSessionCallback =
+ new TvInputManager.SessionCallback() {
+ @Override
+ public void onSessionCreated(TvInputManager.Session session) {
+ mHardwareSession = session;
+ SomeArgs args = SomeArgs.obtain();
+ if (session != null) {
+ args.arg1 = HardwareSession.this;
+ args.arg2 = mProxySession;
+ args.arg3 = mProxySessionCallback;
+ args.arg4 = session.getToken();
+ session.tune(TvContract.buildChannelUriForPassthroughInput(
+ getHardwareInputId()));
+ } else {
+ args.arg1 = null;
+ args.arg2 = null;
+ args.arg3 = mProxySessionCallback;
+ args.arg4 = null;
+ onRelease();
+ }
+ mServiceHandler.obtainMessage(ServiceHandler.DO_NOTIFY_SESSION_CREATED, args)
+ .sendToTarget();
+ }
+
+ @Override
+ public void onVideoAvailable(final TvInputManager.Session session) {
+ if (mHardwareSession == session) {
+ onHardwareVideoAvailable();
+ }
+ }
+
+ @Override
+ public void onVideoUnavailable(final TvInputManager.Session session,
+ final int reason) {
+ if (mHardwareSession == session) {
+ onHardwareVideoUnavailable(reason);
+ }
+ }
+ };
+
+ /**
+ * This method will not be called in {@link HardwareSession}. Framework will
+ * forward the application's surface to the hardware TV input.
+ */
+ @Override
+ public final boolean onSetSurface(Surface surface) {
+ Log.e(TAG, "onSetSurface() should not be called in HardwareProxySession.");
+ return false;
+ }
+
+ /**
+ * Called when the underlying hardware TV input session calls
+ * {@link TvInputService.Session#notifyVideoAvailable()}.
+ */
+ public void onHardwareVideoAvailable() { }
+
+ /**
+ * Called when the underlying hardware TV input session calls
+ * {@link TvInputService.Session#notifyVideoUnavailable(int)}.
+ *
+ * @param reason The reason that the hardware TV input stopped the playback:
+ * <ul>
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_UNKNOWN}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
+ * </ul>
+ */
+ public void onHardwareVideoUnavailable(int reason) { }
+
+ @Override
+ void release() {
+ if (mHardwareSession != null) {
+ mHardwareSession.release();
+ mHardwareSession = null;
+ }
+ super.release();
+ }
+ }
+
+ /** @hide */
+ public static boolean isNavigationKey(int keyCode) {
+ switch (keyCode) {
+ case KeyEvent.KEYCODE_DPAD_LEFT:
+ case KeyEvent.KEYCODE_DPAD_RIGHT:
+ case KeyEvent.KEYCODE_DPAD_UP:
+ case KeyEvent.KEYCODE_DPAD_DOWN:
+ case KeyEvent.KEYCODE_DPAD_CENTER:
+ case KeyEvent.KEYCODE_PAGE_UP:
+ case KeyEvent.KEYCODE_PAGE_DOWN:
+ case KeyEvent.KEYCODE_MOVE_HOME:
+ case KeyEvent.KEYCODE_MOVE_END:
+ case KeyEvent.KEYCODE_TAB:
+ case KeyEvent.KEYCODE_SPACE:
+ case KeyEvent.KEYCODE_ENTER:
+ return true;
+ }
+ return false;
+ }
+
+ @SuppressLint("HandlerLeak")
+ private final class ServiceHandler extends Handler {
+ private static final int DO_CREATE_SESSION = 1;
+ private static final int DO_NOTIFY_SESSION_CREATED = 2;
+ private static final int DO_CREATE_RECORDING_SESSION = 3;
+ private static final int DO_ADD_HARDWARE_INPUT = 4;
+ private static final int DO_REMOVE_HARDWARE_INPUT = 5;
+ private static final int DO_ADD_HDMI_INPUT = 6;
+ private static final int DO_REMOVE_HDMI_INPUT = 7;
+
+ private void broadcastAddHardwareInput(int deviceId, TvInputInfo inputInfo) {
+ int n = mCallbacks.beginBroadcast();
+ for (int i = 0; i < n; ++i) {
+ try {
+ mCallbacks.getBroadcastItem(i).addHardwareInput(deviceId, inputInfo);
+ } catch (RemoteException e) {
+ Log.e(TAG, "error in broadcastAddHardwareInput", e);
+ }
+ }
+ mCallbacks.finishBroadcast();
+ }
+
+ private void broadcastAddHdmiInput(int id, TvInputInfo inputInfo) {
+ int n = mCallbacks.beginBroadcast();
+ for (int i = 0; i < n; ++i) {
+ try {
+ mCallbacks.getBroadcastItem(i).addHdmiInput(id, inputInfo);
+ } catch (RemoteException e) {
+ Log.e(TAG, "error in broadcastAddHdmiInput", e);
+ }
+ }
+ mCallbacks.finishBroadcast();
+ }
+
+ private void broadcastRemoveHardwareInput(String inputId) {
+ int n = mCallbacks.beginBroadcast();
+ for (int i = 0; i < n; ++i) {
+ try {
+ mCallbacks.getBroadcastItem(i).removeHardwareInput(inputId);
+ } catch (RemoteException e) {
+ Log.e(TAG, "error in broadcastRemoveHardwareInput", e);
+ }
+ }
+ mCallbacks.finishBroadcast();
+ }
+
+ @Override
+ public final void handleMessage(Message msg) {
+ switch (msg.what) {
+ case DO_CREATE_SESSION: {
+ SomeArgs args = (SomeArgs) msg.obj;
+ InputChannel channel = (InputChannel) args.arg1;
+ ITvInputSessionCallback cb = (ITvInputSessionCallback) args.arg2;
+ String inputId = (String) args.arg3;
+ args.recycle();
+ Session sessionImpl = onCreateSession(inputId);
+ if (sessionImpl == null) {
+ try {
+ // Failed to create a session.
+ cb.onSessionCreated(null, null);
+ } catch (RemoteException e) {
+ Log.e(TAG, "error in onSessionCreated", e);
+ }
+ return;
+ }
+ ITvInputSession stub = new ITvInputSessionWrapper(TvInputService.this,
+ sessionImpl, channel);
+ if (sessionImpl instanceof HardwareSession) {
+ HardwareSession proxySession =
+ ((HardwareSession) sessionImpl);
+ String hardwareInputId = proxySession.getHardwareInputId();
+ if (TextUtils.isEmpty(hardwareInputId) ||
+ !isPassthroughInput(hardwareInputId)) {
+ if (TextUtils.isEmpty(hardwareInputId)) {
+ Log.w(TAG, "Hardware input id is not setup yet.");
+ } else {
+ Log.w(TAG, "Invalid hardware input id : " + hardwareInputId);
+ }
+ sessionImpl.onRelease();
+ try {
+ cb.onSessionCreated(null, null);
+ } catch (RemoteException e) {
+ Log.e(TAG, "error in onSessionCreated", e);
+ }
+ return;
+ }
+ proxySession.mProxySession = stub;
+ proxySession.mProxySessionCallback = cb;
+ proxySession.mServiceHandler = mServiceHandler;
+ TvInputManager manager = (TvInputManager) getSystemService(
+ Context.TV_INPUT_SERVICE);
+ manager.createSession(hardwareInputId,
+ proxySession.mHardwareSessionCallback, mServiceHandler);
+ } else {
+ SomeArgs someArgs = SomeArgs.obtain();
+ someArgs.arg1 = sessionImpl;
+ someArgs.arg2 = stub;
+ someArgs.arg3 = cb;
+ someArgs.arg4 = null;
+ mServiceHandler.obtainMessage(ServiceHandler.DO_NOTIFY_SESSION_CREATED,
+ someArgs).sendToTarget();
+ }
+ return;
+ }
+ case DO_NOTIFY_SESSION_CREATED: {
+ SomeArgs args = (SomeArgs) msg.obj;
+ Session sessionImpl = (Session) args.arg1;
+ ITvInputSession stub = (ITvInputSession) args.arg2;
+ ITvInputSessionCallback cb = (ITvInputSessionCallback) args.arg3;
+ IBinder hardwareSessionToken = (IBinder) args.arg4;
+ try {
+ cb.onSessionCreated(stub, hardwareSessionToken);
+ } catch (RemoteException e) {
+ Log.e(TAG, "error in onSessionCreated", e);
+ }
+ if (sessionImpl != null) {
+ sessionImpl.initialize(cb);
+ }
+ args.recycle();
+ return;
+ }
+ case DO_CREATE_RECORDING_SESSION: {
+ SomeArgs args = (SomeArgs) msg.obj;
+ ITvInputSessionCallback cb = (ITvInputSessionCallback) args.arg1;
+ String inputId = (String) args.arg2;
+ args.recycle();
+ RecordingSession recordingSessionImpl = onCreateRecordingSession(inputId);
+ if (recordingSessionImpl == null) {
+ try {
+ // Failed to create a recording session.
+ cb.onSessionCreated(null, null);
+ } catch (RemoteException e) {
+ Log.e(TAG, "error in onSessionCreated", e);
+ }
+ return;
+ }
+ ITvInputSession stub = new ITvInputSessionWrapper(TvInputService.this,
+ recordingSessionImpl);
+ try {
+ cb.onSessionCreated(stub, null);
+ } catch (RemoteException e) {
+ Log.e(TAG, "error in onSessionCreated", e);
+ }
+ recordingSessionImpl.initialize(cb);
+ return;
+ }
+ case DO_ADD_HARDWARE_INPUT: {
+ TvInputHardwareInfo hardwareInfo = (TvInputHardwareInfo) msg.obj;
+ TvInputInfo inputInfo = onHardwareAdded(hardwareInfo);
+ if (inputInfo != null) {
+ broadcastAddHardwareInput(hardwareInfo.getDeviceId(), inputInfo);
+ }
+ return;
+ }
+ case DO_REMOVE_HARDWARE_INPUT: {
+ TvInputHardwareInfo hardwareInfo = (TvInputHardwareInfo) msg.obj;
+ String inputId = onHardwareRemoved(hardwareInfo);
+ if (inputId != null) {
+ broadcastRemoveHardwareInput(inputId);
+ }
+ return;
+ }
+ case DO_ADD_HDMI_INPUT: {
+ HdmiDeviceInfo deviceInfo = (HdmiDeviceInfo) msg.obj;
+ TvInputInfo inputInfo = onHdmiDeviceAdded(deviceInfo);
+ if (inputInfo != null) {
+ broadcastAddHdmiInput(deviceInfo.getId(), inputInfo);
+ }
+ return;
+ }
+ case DO_REMOVE_HDMI_INPUT: {
+ HdmiDeviceInfo deviceInfo = (HdmiDeviceInfo) msg.obj;
+ String inputId = onHdmiDeviceRemoved(deviceInfo);
+ if (inputId != null) {
+ broadcastRemoveHardwareInput(inputId);
+ }
+ return;
+ }
+ default: {
+ Log.w(TAG, "Unhandled message code: " + msg.what);
+ return;
+ }
+ }
+ }
+ }
+}
diff --git a/android/media/tv/TvRecordingClient.java b/android/media/tv/TvRecordingClient.java
new file mode 100644
index 00000000..5aadeb6e
--- /dev/null
+++ b/android/media/tv/TvRecordingClient.java
@@ -0,0 +1,405 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.SystemApi;
+import android.content.Context;
+import android.media.tv.TvInputManager;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.os.Looper;
+import android.text.TextUtils;
+import android.util.Log;
+import android.util.Pair;
+
+import java.util.ArrayDeque;
+import java.util.Queue;
+
+/**
+ * The public interface object used to interact with a specific TV input service for TV program
+ * recording.
+ */
+public class TvRecordingClient {
+ private static final String TAG = "TvRecordingClient";
+ private static final boolean DEBUG = false;
+
+ private final RecordingCallback mCallback;
+ private final Handler mHandler;
+
+ private final TvInputManager mTvInputManager;
+ private TvInputManager.Session mSession;
+ private MySessionCallback mSessionCallback;
+
+ private boolean mIsRecordingStarted;
+ private boolean mIsTuned;
+ private final Queue<Pair<String, Bundle>> mPendingAppPrivateCommands = new ArrayDeque<>();
+
+ /**
+ * Creates a new TvRecordingClient object.
+ *
+ * @param context The application context to create a TvRecordingClient with.
+ * @param tag A short name for debugging purposes.
+ * @param callback The callback to receive recording status changes.
+ * @param handler The handler to invoke the callback on.
+ */
+ public TvRecordingClient(Context context, String tag, @NonNull RecordingCallback callback,
+ Handler handler) {
+ mCallback = callback;
+ mHandler = handler == null ? new Handler(Looper.getMainLooper()) : handler;
+ mTvInputManager = (TvInputManager) context.getSystemService(Context.TV_INPUT_SERVICE);
+ }
+
+ /**
+ * Tunes to a given channel for TV program recording. The first tune request will create a new
+ * recording session for the corresponding TV input and establish a connection between the
+ * application and the session. If recording has already started in the current recording
+ * session, this method throws an exception.
+ *
+ * <p>The application may call this method before starting or after stopping recording, but not
+ * during recording.
+ *
+ * <p>The recording session will respond by calling
+ * {@link RecordingCallback#onTuned(Uri)} if the tune request was fulfilled, or
+ * {@link RecordingCallback#onError(int)} otherwise.
+ *
+ * @param inputId The ID of the TV input for the given channel.
+ * @param channelUri The URI of a channel.
+ * @throws IllegalStateException If recording is already started.
+ */
+ public void tune(String inputId, Uri channelUri) {
+ tune(inputId, channelUri, null);
+ }
+
+ /**
+ * Tunes to a given channel for TV program recording. The first tune request will create a new
+ * recording session for the corresponding TV input and establish a connection between the
+ * application and the session. If recording has already started in the current recording
+ * session, this method throws an exception. This can be used to provide domain-specific
+ * features that are only known between certain client and their TV inputs.
+ *
+ * <p>The application may call this method before starting or after stopping recording, but not
+ * during recording.
+ *
+ * <p>The recording session will respond by calling
+ * {@link RecordingCallback#onTuned(Uri)} if the tune request was fulfilled, or
+ * {@link RecordingCallback#onError(int)} otherwise.
+ *
+ * @param inputId The ID of the TV input for the given channel.
+ * @param channelUri The URI of a channel.
+ * @param params Domain-specific data for this tune request. Keys <em>must</em> be a scoped
+ * name, i.e. prefixed with a package name you own, so that different developers will
+ * not create conflicting keys.
+ * @throws IllegalStateException If recording is already started.
+ */
+ public void tune(String inputId, Uri channelUri, Bundle params) {
+ if (DEBUG) Log.d(TAG, "tune(" + channelUri + ")");
+ if (TextUtils.isEmpty(inputId)) {
+ throw new IllegalArgumentException("inputId cannot be null or an empty string");
+ }
+ if (mIsRecordingStarted) {
+ throw new IllegalStateException("tune failed - recording already started");
+ }
+ if (mSessionCallback != null && TextUtils.equals(mSessionCallback.mInputId, inputId)) {
+ if (mSession != null) {
+ mSession.tune(channelUri, params);
+ } else {
+ mSessionCallback.mChannelUri = channelUri;
+ mSessionCallback.mConnectionParams = params;
+ }
+ } else {
+ resetInternal();
+ mSessionCallback = new MySessionCallback(inputId, channelUri, params);
+ if (mTvInputManager != null) {
+ mTvInputManager.createRecordingSession(inputId, mSessionCallback, mHandler);
+ }
+ }
+ }
+
+ /**
+ * Releases the resources in the current recording session immediately. This may be called at
+ * any time, however if the session is already released, it does nothing.
+ */
+ public void release() {
+ if (DEBUG) Log.d(TAG, "release()");
+ resetInternal();
+ }
+
+ private void resetInternal() {
+ mSessionCallback = null;
+ mPendingAppPrivateCommands.clear();
+ if (mSession != null) {
+ mSession.release();
+ mSession = null;
+ }
+ }
+
+ /**
+ * Starts TV program recording in the current recording session. Recording is expected to start
+ * immediately when this method is called. If the current recording session has not yet tuned to
+ * any channel, this method throws an exception.
+ *
+ * <p>The application may supply the URI for a TV program for filling in program specific data
+ * fields in the {@link android.media.tv.TvContract.RecordedPrograms} table.
+ * A non-null {@code programUri} implies the started recording should be of that specific
+ * program, whereas null {@code programUri} does not impose such a requirement and the
+ * recording can span across multiple TV programs. In either case, the application must call
+ * {@link TvRecordingClient#stopRecording()} to stop the recording.
+ *
+ * <p>The recording session will respond by calling {@link RecordingCallback#onError(int)} if
+ * the start request cannot be fulfilled.
+ *
+ * @param programUri The URI for the TV program to record, built by
+ * {@link TvContract#buildProgramUri(long)}. Can be {@code null}.
+ * @throws IllegalStateException If {@link #tune} request hasn't been handled yet.
+ */
+ public void startRecording(@Nullable Uri programUri) {
+ if (!mIsTuned) {
+ throw new IllegalStateException("startRecording failed - not yet tuned");
+ }
+ if (mSession != null) {
+ mSession.startRecording(programUri);
+ mIsRecordingStarted = true;
+ }
+ }
+
+ /**
+ * Stops TV program recording in the current recording session. Recording is expected to stop
+ * immediately when this method is called. If recording has not yet started in the current
+ * recording session, this method does nothing.
+ *
+ * <p>The recording session is expected to create a new data entry in the
+ * {@link android.media.tv.TvContract.RecordedPrograms} table that describes the newly
+ * recorded program and pass the URI to that entry through to
+ * {@link RecordingCallback#onRecordingStopped(Uri)}.
+ * If the stop request cannot be fulfilled, the recording session will respond by calling
+ * {@link RecordingCallback#onError(int)}.
+ */
+ public void stopRecording() {
+ if (!mIsRecordingStarted) {
+ Log.w(TAG, "stopRecording failed - recording not yet started");
+ }
+ if (mSession != null) {
+ mSession.stopRecording();
+ }
+ }
+
+ /**
+ * Sends a private command to the underlying TV input. This can be used to provide
+ * domain-specific features that are only known between certain clients and their TV inputs.
+ *
+ * @param action The name of the private command to send. This <em>must</em> be a scoped name,
+ * i.e. prefixed with a package name you own, so that different developers will not
+ * create conflicting commands.
+ * @param data An optional bundle to send with the command.
+ */
+ public void sendAppPrivateCommand(@NonNull String action, Bundle data) {
+ if (TextUtils.isEmpty(action)) {
+ throw new IllegalArgumentException("action cannot be null or an empty string");
+ }
+ if (mSession != null) {
+ mSession.sendAppPrivateCommand(action, data);
+ } else {
+ Log.w(TAG, "sendAppPrivateCommand - session not yet created (action \"" + action
+ + "\" pending)");
+ mPendingAppPrivateCommands.add(Pair.create(action, data));
+ }
+ }
+
+ /**
+ * Callback used to receive various status updates on the
+ * {@link android.media.tv.TvInputService.RecordingSession}
+ */
+ public abstract static class RecordingCallback {
+ /**
+ * This is called when an error occurred while establishing a connection to the recording
+ * session for the corresponding TV input.
+ *
+ * @param inputId The ID of the TV input bound to the current TvRecordingClient.
+ */
+ public void onConnectionFailed(String inputId) {
+ }
+
+ /**
+ * This is called when the connection to the current recording session is lost.
+ *
+ * @param inputId The ID of the TV input bound to the current TvRecordingClient.
+ */
+ public void onDisconnected(String inputId) {
+ }
+
+ /**
+ * This is called when the recording session has been tuned to the given channel and is
+ * ready to start recording.
+ *
+ * @param channelUri The URI of a channel.
+ */
+ public void onTuned(Uri channelUri) {
+ }
+
+ /**
+ * This is called when the current recording session has stopped recording and created a
+ * new data entry in the {@link TvContract.RecordedPrograms} table that describes the newly
+ * recorded program.
+ *
+ * @param recordedProgramUri The URI for the newly recorded program.
+ */
+ public void onRecordingStopped(Uri recordedProgramUri) {
+ }
+
+ /**
+ * This is called when an issue has occurred. It may be called at any time after the current
+ * recording session is created until it is released.
+ *
+ * @param error The error code. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#RECORDING_ERROR_UNKNOWN}
+ * <li>{@link TvInputManager#RECORDING_ERROR_INSUFFICIENT_SPACE}
+ * <li>{@link TvInputManager#RECORDING_ERROR_RESOURCE_BUSY}
+ * </ul>
+ */
+ public void onError(@TvInputManager.RecordingError int error) {
+ }
+
+ /**
+ * This is invoked when a custom event from the bound TV input is sent to this client.
+ *
+ * @param inputId The ID of the TV input bound to this client.
+ * @param eventType The type of the event.
+ * @param eventArgs Optional arguments of the event.
+ * @hide
+ */
+ @SystemApi
+ public void onEvent(String inputId, String eventType, Bundle eventArgs) {
+ }
+ }
+
+ private class MySessionCallback extends TvInputManager.SessionCallback {
+ final String mInputId;
+ Uri mChannelUri;
+ Bundle mConnectionParams;
+
+ MySessionCallback(String inputId, Uri channelUri, Bundle connectionParams) {
+ mInputId = inputId;
+ mChannelUri = channelUri;
+ mConnectionParams = connectionParams;
+ }
+
+ @Override
+ public void onSessionCreated(TvInputManager.Session session) {
+ if (DEBUG) {
+ Log.d(TAG, "onSessionCreated()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionCreated - session already created");
+ // This callback is obsolete.
+ if (session != null) {
+ session.release();
+ }
+ return;
+ }
+ mSession = session;
+ if (session != null) {
+ // Sends the pending app private commands.
+ for (Pair<String, Bundle> command : mPendingAppPrivateCommands) {
+ mSession.sendAppPrivateCommand(command.first, command.second);
+ }
+ mPendingAppPrivateCommands.clear();
+ mSession.tune(mChannelUri, mConnectionParams);
+ } else {
+ mSessionCallback = null;
+ if (mCallback != null) {
+ mCallback.onConnectionFailed(mInputId);
+ }
+ }
+ }
+
+ @Override
+ void onTuned(TvInputManager.Session session, Uri channelUri) {
+ if (DEBUG) {
+ Log.d(TAG, "onTuned()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTuned - session not created");
+ return;
+ }
+ mIsTuned = true;
+ mCallback.onTuned(channelUri);
+ }
+
+ @Override
+ public void onSessionReleased(TvInputManager.Session session) {
+ if (DEBUG) {
+ Log.d(TAG, "onSessionReleased()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionReleased - session not created");
+ return;
+ }
+ mIsTuned = false;
+ mIsRecordingStarted = false;
+ mSessionCallback = null;
+ mSession = null;
+ if (mCallback != null) {
+ mCallback.onDisconnected(mInputId);
+ }
+ }
+
+ @Override
+ public void onRecordingStopped(TvInputManager.Session session, Uri recordedProgramUri) {
+ if (DEBUG) {
+ Log.d(TAG, "onRecordingStopped(recordedProgramUri= " + recordedProgramUri + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onRecordingStopped - session not created");
+ return;
+ }
+ mIsRecordingStarted = false;
+ mCallback.onRecordingStopped(recordedProgramUri);
+ }
+
+ @Override
+ public void onError(TvInputManager.Session session, int error) {
+ if (DEBUG) {
+ Log.d(TAG, "onError(error=" + error + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onError - session not created");
+ return;
+ }
+ mCallback.onError(error);
+ }
+
+ @Override
+ public void onSessionEvent(TvInputManager.Session session, String eventType,
+ Bundle eventArgs) {
+ if (DEBUG) {
+ Log.d(TAG, "onSessionEvent(eventType=" + eventType + ", eventArgs=" + eventArgs
+ + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionEvent - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onEvent(mInputId, eventType, eventArgs);
+ }
+ }
+ }
+}
diff --git a/android/media/tv/TvStreamConfig.java b/android/media/tv/TvStreamConfig.java
new file mode 100644
index 00000000..0c2f3fec
--- /dev/null
+++ b/android/media/tv/TvStreamConfig.java
@@ -0,0 +1,177 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.SystemApi;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+/**
+ * @hide
+ */
+@SystemApi
+public class TvStreamConfig implements Parcelable {
+ static final String TAG = TvStreamConfig.class.getSimpleName();
+
+ public final static int STREAM_TYPE_INDEPENDENT_VIDEO_SOURCE = 1;
+ public final static int STREAM_TYPE_BUFFER_PRODUCER = 2;
+
+ private int mStreamId;
+ private int mType;
+ private int mMaxWidth;
+ private int mMaxHeight;
+ /**
+ * Generations are incremented once framework receives STREAM_CONFIGURATION_CHANGED event from
+ * HAL module. Framework should throw away outdated configurations and get new configurations
+ * via tv_input_device::get_stream_configurations().
+ */
+ private int mGeneration;
+
+ public static final Parcelable.Creator<TvStreamConfig> CREATOR =
+ new Parcelable.Creator<TvStreamConfig>() {
+ @Override
+ public TvStreamConfig createFromParcel(Parcel source) {
+ try {
+ return new Builder().
+ streamId(source.readInt()).
+ type(source.readInt()).
+ maxWidth(source.readInt()).
+ maxHeight(source.readInt()).
+ generation(source.readInt()).build();
+ } catch (Exception e) {
+ Log.e(TAG, "Exception creating TvStreamConfig from parcel", e);
+ return null;
+ }
+ }
+
+ @Override
+ public TvStreamConfig[] newArray(int size) {
+ return new TvStreamConfig[size];
+ }
+ };
+
+ private TvStreamConfig() {}
+
+ public int getStreamId() {
+ return mStreamId;
+ }
+
+ public int getType() {
+ return mType;
+ }
+
+ public int getMaxWidth() {
+ return mMaxWidth;
+ }
+
+ public int getMaxHeight() {
+ return mMaxHeight;
+ }
+
+ public int getGeneration() {
+ return mGeneration;
+ }
+
+ @Override
+ public String toString() {
+ return "TvStreamConfig {mStreamId=" + mStreamId + ";" + "mType=" + mType + ";mGeneration="
+ + mGeneration + "}";
+ }
+
+ // Parcelable
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mStreamId);
+ dest.writeInt(mType);
+ dest.writeInt(mMaxWidth);
+ dest.writeInt(mMaxHeight);
+ dest.writeInt(mGeneration);
+ }
+
+ /**
+ * A helper class for creating a TvStreamConfig object.
+ */
+ public static final class Builder {
+ private Integer mStreamId;
+ private Integer mType;
+ private Integer mMaxWidth;
+ private Integer mMaxHeight;
+ private Integer mGeneration;
+
+ public Builder() {
+ }
+
+ public Builder streamId(int streamId) {
+ mStreamId = streamId;
+ return this;
+ }
+
+ public Builder type(int type) {
+ mType = type;
+ return this;
+ }
+
+ public Builder maxWidth(int maxWidth) {
+ mMaxWidth = maxWidth;
+ return this;
+ }
+
+ public Builder maxHeight(int maxHeight) {
+ mMaxHeight = maxHeight;
+ return this;
+ }
+
+ public Builder generation(int generation) {
+ mGeneration = generation;
+ return this;
+ }
+
+ public TvStreamConfig build() {
+ if (mStreamId == null || mType == null || mMaxWidth == null || mMaxHeight == null
+ || mGeneration == null) {
+ throw new UnsupportedOperationException();
+ }
+
+ TvStreamConfig config = new TvStreamConfig();
+ config.mStreamId = mStreamId;
+ config.mType = mType;
+ config.mMaxWidth = mMaxWidth;
+ config.mMaxHeight = mMaxHeight;
+ config.mGeneration = mGeneration;
+ return config;
+ }
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) return false;
+ if (!(obj instanceof TvStreamConfig)) return false;
+
+ TvStreamConfig config = (TvStreamConfig) obj;
+ return config.mGeneration == mGeneration
+ && config.mStreamId == mStreamId
+ && config.mType == mType
+ && config.mMaxWidth == mMaxWidth
+ && config.mMaxHeight == mMaxHeight;
+ }
+}
diff --git a/android/media/tv/TvTrackInfo.java b/android/media/tv/TvTrackInfo.java
new file mode 100644
index 00000000..c9c881c6
--- /dev/null
+++ b/android/media/tv/TvTrackInfo.java
@@ -0,0 +1,498 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.IntDef;
+import android.annotation.NonNull;
+import android.os.Bundle;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.text.TextUtils;
+
+import com.android.internal.util.Preconditions;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.util.Objects;
+
+/**
+ * Encapsulates the format of tracks played in {@link TvInputService}.
+ */
+public final class TvTrackInfo implements Parcelable {
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef({TYPE_AUDIO, TYPE_VIDEO, TYPE_SUBTITLE})
+ public @interface Type {}
+
+ /**
+ * The type value for audio tracks.
+ */
+ public static final int TYPE_AUDIO = 0;
+
+ /**
+ * The type value for video tracks.
+ */
+ public static final int TYPE_VIDEO = 1;
+
+ /**
+ * The type value for subtitle tracks.
+ */
+ public static final int TYPE_SUBTITLE = 2;
+
+ private final int mType;
+ private final String mId;
+ private final String mLanguage;
+ private final CharSequence mDescription;
+ private final int mAudioChannelCount;
+ private final int mAudioSampleRate;
+ private final int mVideoWidth;
+ private final int mVideoHeight;
+ private final float mVideoFrameRate;
+ private final float mVideoPixelAspectRatio;
+ private final byte mVideoActiveFormatDescription;
+
+ private final Bundle mExtra;
+
+ private TvTrackInfo(int type, String id, String language, CharSequence description,
+ int audioChannelCount, int audioSampleRate, int videoWidth, int videoHeight,
+ float videoFrameRate, float videoPixelAspectRatio, byte videoActiveFormatDescription,
+ Bundle extra) {
+ mType = type;
+ mId = id;
+ mLanguage = language;
+ mDescription = description;
+ mAudioChannelCount = audioChannelCount;
+ mAudioSampleRate = audioSampleRate;
+ mVideoWidth = videoWidth;
+ mVideoHeight = videoHeight;
+ mVideoFrameRate = videoFrameRate;
+ mVideoPixelAspectRatio = videoPixelAspectRatio;
+ mVideoActiveFormatDescription = videoActiveFormatDescription;
+ mExtra = extra;
+ }
+
+ private TvTrackInfo(Parcel in) {
+ mType = in.readInt();
+ mId = in.readString();
+ mLanguage = in.readString();
+ mDescription = in.readString();
+ mAudioChannelCount = in.readInt();
+ mAudioSampleRate = in.readInt();
+ mVideoWidth = in.readInt();
+ mVideoHeight = in.readInt();
+ mVideoFrameRate = in.readFloat();
+ mVideoPixelAspectRatio = in.readFloat();
+ mVideoActiveFormatDescription = in.readByte();
+ mExtra = in.readBundle();
+ }
+
+ /**
+ * Returns the type of the track. The type should be one of the followings:
+ * {@link #TYPE_AUDIO}, {@link #TYPE_VIDEO} and {@link #TYPE_SUBTITLE}.
+ */
+ @Type
+ public final int getType() {
+ return mType;
+ }
+
+ /**
+ * Returns the ID of the track.
+ */
+ public final String getId() {
+ return mId;
+ }
+
+ /**
+ * Returns the language information encoded by either ISO 639-1 or ISO 639-2/T. If the language
+ * is unknown or could not be determined, the corresponding value will be {@code null}.
+ */
+ public final String getLanguage() {
+ return mLanguage;
+ }
+
+ /**
+ * Returns a user readable description for the current track.
+ */
+ public final CharSequence getDescription() {
+ return mDescription;
+ }
+
+ /**
+ * Returns the audio channel count. Valid only for {@link #TYPE_AUDIO} tracks.
+ *
+ * @throws IllegalStateException if not called on an audio track
+ */
+ public final int getAudioChannelCount() {
+ if (mType != TYPE_AUDIO) {
+ throw new IllegalStateException("Not an audio track");
+ }
+ return mAudioChannelCount;
+ }
+
+ /**
+ * Returns the audio sample rate, in the unit of Hz. Valid only for {@link #TYPE_AUDIO} tracks.
+ *
+ * @throws IllegalStateException if not called on an audio track
+ */
+ public final int getAudioSampleRate() {
+ if (mType != TYPE_AUDIO) {
+ throw new IllegalStateException("Not an audio track");
+ }
+ return mAudioSampleRate;
+ }
+
+ /**
+ * Returns the width of the video, in the unit of pixels. Valid only for {@link #TYPE_VIDEO}
+ * tracks.
+ *
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final int getVideoWidth() {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ return mVideoWidth;
+ }
+
+ /**
+ * Returns the height of the video, in the unit of pixels. Valid only for {@link #TYPE_VIDEO}
+ * tracks.
+ *
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final int getVideoHeight() {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ return mVideoHeight;
+ }
+
+ /**
+ * Returns the frame rate of the video, in the unit of fps (frames per second). Valid only for
+ * {@link #TYPE_VIDEO} tracks.
+ *
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final float getVideoFrameRate() {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ return mVideoFrameRate;
+ }
+
+ /**
+ * Returns the pixel aspect ratio (the ratio of a pixel's width to its height) of the video.
+ * Valid only for {@link #TYPE_VIDEO} tracks.
+ *
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final float getVideoPixelAspectRatio() {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ return mVideoPixelAspectRatio;
+ }
+
+ /**
+ * Returns the Active Format Description (AFD) code of the video.
+ * Valid only for {@link #TYPE_VIDEO} tracks.
+ *
+ * <p>The complete list of values are defined in ETSI TS 101 154 V1.7.1 Annex B, ATSC A/53 Part
+ * 4 and SMPTE 2016-1-2007.
+ *
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final byte getVideoActiveFormatDescription() {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ return mVideoActiveFormatDescription;
+ }
+
+ /**
+ * Returns the extra information about the current track.
+ */
+ public final Bundle getExtra() {
+ return mExtra;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ /**
+ * Used to package this object into a {@link Parcel}.
+ *
+ * @param dest The {@link Parcel} to be written.
+ * @param flags The flags used for parceling.
+ */
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mType);
+ dest.writeString(mId);
+ dest.writeString(mLanguage);
+ dest.writeString(mDescription != null ? mDescription.toString() : null);
+ dest.writeInt(mAudioChannelCount);
+ dest.writeInt(mAudioSampleRate);
+ dest.writeInt(mVideoWidth);
+ dest.writeInt(mVideoHeight);
+ dest.writeFloat(mVideoFrameRate);
+ dest.writeFloat(mVideoPixelAspectRatio);
+ dest.writeByte(mVideoActiveFormatDescription);
+ dest.writeBundle(mExtra);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (!(o instanceof TvTrackInfo)) {
+ return false;
+ }
+
+ TvTrackInfo obj = (TvTrackInfo) o;
+ return TextUtils.equals(mId, obj.mId)
+ && mType == obj.mType
+ && TextUtils.equals(mLanguage, obj.mLanguage)
+ && TextUtils.equals(mDescription, obj.mDescription)
+ && Objects.equals(mExtra, obj.mExtra)
+ && (mType == TYPE_AUDIO
+ ? mAudioChannelCount == obj.mAudioChannelCount
+ && mAudioSampleRate == obj.mAudioSampleRate
+ : (mType == TYPE_VIDEO
+ ? mVideoWidth == obj.mVideoWidth
+ && mVideoHeight == obj.mVideoHeight
+ && mVideoFrameRate == obj.mVideoFrameRate
+ && mVideoPixelAspectRatio == obj.mVideoPixelAspectRatio : true));
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hashCode(mId);
+ }
+
+ public static final Parcelable.Creator<TvTrackInfo> CREATOR =
+ new Parcelable.Creator<TvTrackInfo>() {
+ @Override
+ public TvTrackInfo createFromParcel(Parcel in) {
+ return new TvTrackInfo(in);
+ }
+
+ @Override
+ public TvTrackInfo[] newArray(int size) {
+ return new TvTrackInfo[size];
+ }
+ };
+
+ /**
+ * A builder class for creating {@link TvTrackInfo} objects.
+ */
+ public static final class Builder {
+ private final String mId;
+ private final int mType;
+ private String mLanguage;
+ private CharSequence mDescription;
+ private int mAudioChannelCount;
+ private int mAudioSampleRate;
+ private int mVideoWidth;
+ private int mVideoHeight;
+ private float mVideoFrameRate;
+ private float mVideoPixelAspectRatio = 1.0f;
+ private byte mVideoActiveFormatDescription;
+ private Bundle mExtra;
+
+ /**
+ * Create a {@link Builder}. Any field that should be included in the {@link TvTrackInfo}
+ * must be added.
+ *
+ * @param type The type of the track.
+ * @param id The ID of the track that uniquely identifies the current track among all the
+ * other tracks in the same TV program.
+ * @throws IllegalArgumentException if the type is not any of {@link #TYPE_AUDIO},
+ * {@link #TYPE_VIDEO} and {@link #TYPE_SUBTITLE}
+ */
+ public Builder(@Type int type, @NonNull String id) {
+ if (type != TYPE_AUDIO
+ && type != TYPE_VIDEO
+ && type != TYPE_SUBTITLE) {
+ throw new IllegalArgumentException("Unknown type: " + type);
+ }
+ Preconditions.checkNotNull(id);
+ mType = type;
+ mId = id;
+ }
+
+ /**
+ * Sets the language information of the current track.
+ *
+ * @param language The language string encoded by either ISO 639-1 or ISO 639-2/T.
+ */
+ public final Builder setLanguage(String language) {
+ mLanguage = language;
+ return this;
+ }
+
+ /**
+ * Sets a user readable description for the current track.
+ *
+ * @param description The user readable description.
+ */
+ public final Builder setDescription(CharSequence description) {
+ mDescription = description;
+ return this;
+ }
+
+ /**
+ * Sets the audio channel count. Valid only for {@link #TYPE_AUDIO} tracks.
+ *
+ * @param audioChannelCount The audio channel count.
+ * @throws IllegalStateException if not called on an audio track
+ */
+ public final Builder setAudioChannelCount(int audioChannelCount) {
+ if (mType != TYPE_AUDIO) {
+ throw new IllegalStateException("Not an audio track");
+ }
+ mAudioChannelCount = audioChannelCount;
+ return this;
+ }
+
+ /**
+ * Sets the audio sample rate, in the unit of Hz. Valid only for {@link #TYPE_AUDIO}
+ * tracks.
+ *
+ * @param audioSampleRate The audio sample rate.
+ * @throws IllegalStateException if not called on an audio track
+ */
+ public final Builder setAudioSampleRate(int audioSampleRate) {
+ if (mType != TYPE_AUDIO) {
+ throw new IllegalStateException("Not an audio track");
+ }
+ mAudioSampleRate = audioSampleRate;
+ return this;
+ }
+
+ /**
+ * Sets the width of the video, in the unit of pixels. Valid only for {@link #TYPE_VIDEO}
+ * tracks.
+ *
+ * @param videoWidth The width of the video.
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final Builder setVideoWidth(int videoWidth) {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ mVideoWidth = videoWidth;
+ return this;
+ }
+
+ /**
+ * Sets the height of the video, in the unit of pixels. Valid only for {@link #TYPE_VIDEO}
+ * tracks.
+ *
+ * @param videoHeight The height of the video.
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final Builder setVideoHeight(int videoHeight) {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ mVideoHeight = videoHeight;
+ return this;
+ }
+
+ /**
+ * Sets the frame rate of the video, in the unit fps (frames per rate). Valid only for
+ * {@link #TYPE_VIDEO} tracks.
+ *
+ * @param videoFrameRate The frame rate of the video.
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final Builder setVideoFrameRate(float videoFrameRate) {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ mVideoFrameRate = videoFrameRate;
+ return this;
+ }
+
+ /**
+ * Sets the pixel aspect ratio (the ratio of a pixel's width to its height) of the video.
+ * Valid only for {@link #TYPE_VIDEO} tracks.
+ *
+ * <p>This is needed for applications to be able to scale the video properly for some video
+ * formats such as 720x576 4:3 and 720x576 16:9 where pixels are not square. By default,
+ * applications assume the value of 1.0 (square pixels), so it is not necessary to set the
+ * pixel aspect ratio for most video formats.
+ *
+ * @param videoPixelAspectRatio The pixel aspect ratio of the video.
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final Builder setVideoPixelAspectRatio(float videoPixelAspectRatio) {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ mVideoPixelAspectRatio = videoPixelAspectRatio;
+ return this;
+ }
+
+ /**
+ * Sets the Active Format Description (AFD) code of the video.
+ * Valid only for {@link #TYPE_VIDEO} tracks.
+ *
+ * <p>This is needed for applications to be able to scale the video properly based on the
+ * information about where in the coded picture the active video is.
+ * The complete list of values are defined in ETSI TS 101 154 V1.7.1 Annex B, ATSC A/53 Part
+ * 4 and SMPTE 2016-1-2007.
+ *
+ * @param videoActiveFormatDescription The AFD code of the video.
+ * @throws IllegalStateException if not called on a video track
+ */
+ public final Builder setVideoActiveFormatDescription(byte videoActiveFormatDescription) {
+ if (mType != TYPE_VIDEO) {
+ throw new IllegalStateException("Not a video track");
+ }
+ mVideoActiveFormatDescription = videoActiveFormatDescription;
+ return this;
+ }
+
+ /**
+ * Sets the extra information about the current track.
+ *
+ * @param extra The extra information.
+ */
+ public final Builder setExtra(Bundle extra) {
+ mExtra = new Bundle(extra);
+ return this;
+ }
+
+ /**
+ * Creates a {@link TvTrackInfo} instance with the specified fields.
+ *
+ * @return The new {@link TvTrackInfo} instance
+ */
+ public TvTrackInfo build() {
+ return new TvTrackInfo(mType, mId, mLanguage, mDescription, mAudioChannelCount,
+ mAudioSampleRate, mVideoWidth, mVideoHeight, mVideoFrameRate,
+ mVideoPixelAspectRatio, mVideoActiveFormatDescription, mExtra);
+ }
+ }
+}
diff --git a/android/media/tv/TvView.java b/android/media/tv/TvView.java
new file mode 100644
index 00000000..6b329f8e
--- /dev/null
+++ b/android/media/tv/TvView.java
@@ -0,0 +1,1330 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.tv;
+
+import android.annotation.FloatRange;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.RequiresPermission;
+import android.annotation.SystemApi;
+import android.content.Context;
+import android.content.pm.PackageManager;
+import android.graphics.Canvas;
+import android.graphics.PorterDuff;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.graphics.Region;
+import android.media.PlaybackParams;
+import android.media.tv.TvInputManager.Session;
+import android.media.tv.TvInputManager.Session.FinishedInputEventCallback;
+import android.media.tv.TvInputManager.SessionCallback;
+import android.net.Uri;
+import android.os.Bundle;
+import android.os.Handler;
+import android.text.TextUtils;
+import android.util.AttributeSet;
+import android.util.Log;
+import android.util.Pair;
+import android.view.InputEvent;
+import android.view.KeyEvent;
+import android.view.MotionEvent;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.ViewGroup;
+import android.view.ViewRootImpl;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayDeque;
+import java.util.List;
+import java.util.Queue;
+
+/**
+ * Displays TV contents. The TvView class provides a high level interface for applications to show
+ * TV programs from various TV sources that implement {@link TvInputService}. (Note that the list of
+ * TV inputs available on the system can be obtained by calling
+ * {@link TvInputManager#getTvInputList() TvInputManager.getTvInputList()}.)
+ *
+ * <p>Once the application supplies the URI for a specific TV channel to {@link #tune}
+ * method, it takes care of underlying service binding (and unbinding if the current TvView is
+ * already bound to a service) and automatically allocates/deallocates resources needed. In addition
+ * to a few essential methods to control how the contents are presented, it also provides a way to
+ * dispatch input events to the connected TvInputService in order to enable custom key actions for
+ * the TV input.
+ */
+public class TvView extends ViewGroup {
+ private static final String TAG = "TvView";
+ private static final boolean DEBUG = false;
+
+ private static final int ZORDER_MEDIA = 0;
+ private static final int ZORDER_MEDIA_OVERLAY = 1;
+ private static final int ZORDER_ON_TOP = 2;
+
+ private static final WeakReference<TvView> NULL_TV_VIEW = new WeakReference<>(null);
+
+ private static final Object sMainTvViewLock = new Object();
+ private static WeakReference<TvView> sMainTvView = NULL_TV_VIEW;
+
+ private final Handler mHandler = new Handler();
+ private Session mSession;
+ private SurfaceView mSurfaceView;
+ private Surface mSurface;
+ private boolean mOverlayViewCreated;
+ private Rect mOverlayViewFrame;
+ private final TvInputManager mTvInputManager;
+ private MySessionCallback mSessionCallback;
+ private TvInputCallback mCallback;
+ private OnUnhandledInputEventListener mOnUnhandledInputEventListener;
+ private Float mStreamVolume;
+ private Boolean mCaptionEnabled;
+ private final Queue<Pair<String, Bundle>> mPendingAppPrivateCommands = new ArrayDeque<>();
+
+ private boolean mSurfaceChanged;
+ private int mSurfaceFormat;
+ private int mSurfaceWidth;
+ private int mSurfaceHeight;
+ private final AttributeSet mAttrs;
+ private final int mDefStyleAttr;
+ private int mWindowZOrder;
+ private boolean mUseRequestedSurfaceLayout;
+ private int mSurfaceViewLeft;
+ private int mSurfaceViewRight;
+ private int mSurfaceViewTop;
+ private int mSurfaceViewBottom;
+ private TimeShiftPositionCallback mTimeShiftPositionCallback;
+
+ private final SurfaceHolder.Callback mSurfaceHolderCallback = new SurfaceHolder.Callback() {
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ if (DEBUG) {
+ Log.d(TAG, "surfaceChanged(holder=" + holder + ", format=" + format + ", width="
+ + width + ", height=" + height + ")");
+ }
+ mSurfaceFormat = format;
+ mSurfaceWidth = width;
+ mSurfaceHeight = height;
+ mSurfaceChanged = true;
+ dispatchSurfaceChanged(mSurfaceFormat, mSurfaceWidth, mSurfaceHeight);
+ }
+
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ mSurface = holder.getSurface();
+ setSessionSurface(mSurface);
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ mSurface = null;
+ mSurfaceChanged = false;
+ setSessionSurface(null);
+ }
+ };
+
+ private final FinishedInputEventCallback mFinishedInputEventCallback =
+ new FinishedInputEventCallback() {
+ @Override
+ public void onFinishedInputEvent(Object token, boolean handled) {
+ if (DEBUG) {
+ Log.d(TAG, "onFinishedInputEvent(token=" + token + ", handled=" + handled + ")");
+ }
+ if (handled) {
+ return;
+ }
+ // TODO: Re-order unhandled events.
+ InputEvent event = (InputEvent) token;
+ if (dispatchUnhandledInputEvent(event)) {
+ return;
+ }
+ ViewRootImpl viewRootImpl = getViewRootImpl();
+ if (viewRootImpl != null) {
+ viewRootImpl.dispatchUnhandledInputEvent(event);
+ }
+ }
+ };
+
+ public TvView(Context context) {
+ this(context, null, 0);
+ }
+
+ public TvView(Context context, AttributeSet attrs) {
+ this(context, attrs, 0);
+ }
+
+ public TvView(Context context, AttributeSet attrs, int defStyleAttr) {
+ super(context, attrs, defStyleAttr);
+ mAttrs = attrs;
+ mDefStyleAttr = defStyleAttr;
+ resetSurfaceView();
+ mTvInputManager = (TvInputManager) getContext().getSystemService(Context.TV_INPUT_SERVICE);
+ }
+
+ /**
+ * Sets the callback to be invoked when an event is dispatched to this TvView.
+ *
+ * @param callback The callback to receive events. A value of {@code null} removes the existing
+ * callback.
+ */
+ public void setCallback(@Nullable TvInputCallback callback) {
+ mCallback = callback;
+ }
+
+ /**
+ * Sets this as the main {@link TvView}.
+ *
+ * <p>The main {@link TvView} is a {@link TvView} whose corresponding TV input determines the
+ * HDMI-CEC active source device. For an HDMI port input, one of source devices that is
+ * connected to that HDMI port becomes the active source. For an HDMI-CEC logical device input,
+ * the corresponding HDMI-CEC logical device becomes the active source. For any non-HDMI input
+ * (including the tuner, composite, S-Video, etc.), the internal device (= TV itself) becomes
+ * the active source.
+ *
+ * <p>First tuned {@link TvView} becomes main automatically, and keeps to be main until either
+ * {@link #reset} is called for the main {@link TvView} or {@code setMain()} is called for other
+ * {@link TvView}.
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.CHANGE_HDMI_CEC_ACTIVE_SOURCE)
+ public void setMain() {
+ synchronized (sMainTvViewLock) {
+ sMainTvView = new WeakReference<>(this);
+ if (hasWindowFocus() && mSession != null) {
+ mSession.setMain();
+ }
+ }
+ }
+
+ /**
+ * Controls whether the TvView's surface is placed on top of another regular surface view in the
+ * window (but still behind the window itself).
+ * This is typically used to place overlays on top of an underlying TvView.
+ *
+ * <p>Note that this must be set before the TvView's containing window is attached to the
+ * window manager.
+ *
+ * <p>Calling this overrides any previous call to {@link #setZOrderOnTop}.
+ *
+ * @param isMediaOverlay {@code true} to be on top of another regular surface, {@code false}
+ * otherwise.
+ */
+ public void setZOrderMediaOverlay(boolean isMediaOverlay) {
+ if (isMediaOverlay) {
+ mWindowZOrder = ZORDER_MEDIA_OVERLAY;
+ removeSessionOverlayView();
+ } else {
+ mWindowZOrder = ZORDER_MEDIA;
+ createSessionOverlayView();
+ }
+ if (mSurfaceView != null) {
+ // ZOrderOnTop(false) removes WindowManager.LayoutParams.FLAG_ALT_FOCUSABLE_IM
+ // from WindowLayoutParam as well as changes window type.
+ mSurfaceView.setZOrderOnTop(false);
+ mSurfaceView.setZOrderMediaOverlay(isMediaOverlay);
+ }
+ }
+
+ /**
+ * Controls whether the TvView's surface is placed on top of its window. Normally it is placed
+ * behind the window, to allow it to (for the most part) appear to composite with the views in
+ * the hierarchy. By setting this, you cause it to be placed above the window. This means that
+ * none of the contents of the window this TvView is in will be visible on top of its surface.
+ *
+ * <p>Note that this must be set before the TvView's containing window is attached to the window
+ * manager.
+ *
+ * <p>Calling this overrides any previous call to {@link #setZOrderMediaOverlay}.
+ *
+ * @param onTop {@code true} to be on top of its window, {@code false} otherwise.
+ */
+ public void setZOrderOnTop(boolean onTop) {
+ if (onTop) {
+ mWindowZOrder = ZORDER_ON_TOP;
+ removeSessionOverlayView();
+ } else {
+ mWindowZOrder = ZORDER_MEDIA;
+ createSessionOverlayView();
+ }
+ if (mSurfaceView != null) {
+ mSurfaceView.setZOrderMediaOverlay(false);
+ mSurfaceView.setZOrderOnTop(onTop);
+ }
+ }
+
+ /**
+ * Sets the relative stream volume of this TvView.
+ *
+ * <p>This method is primarily used to handle audio focus changes or mute a specific TvView when
+ * multiple views are displayed. If the method has not yet been called, the TvView assumes the
+ * default value of {@code 1.0f}.
+ *
+ * @param volume A volume value between {@code 0.0f} to {@code 1.0f}.
+ */
+ public void setStreamVolume(@FloatRange(from = 0.0, to = 1.0) float volume) {
+ if (DEBUG) Log.d(TAG, "setStreamVolume(" + volume + ")");
+ mStreamVolume = volume;
+ if (mSession == null) {
+ // Volume will be set once the connection has been made.
+ return;
+ }
+ mSession.setStreamVolume(volume);
+ }
+
+ /**
+ * Tunes to a given channel.
+ *
+ * @param inputId The ID of the TV input for the given channel.
+ * @param channelUri The URI of a channel.
+ */
+ public void tune(@NonNull String inputId, Uri channelUri) {
+ tune(inputId, channelUri, null);
+ }
+
+ /**
+ * Tunes to a given channel. This can be used to provide domain-specific features that are only
+ * known between certain clients and their TV inputs.
+ *
+ * @param inputId The ID of TV input for the given channel.
+ * @param channelUri The URI of a channel.
+ * @param params Domain-specific data for this tune request. Keys <em>must</em> be a scoped
+ * name, i.e. prefixed with a package name you own, so that different developers will
+ * not create conflicting keys.
+ */
+ public void tune(String inputId, Uri channelUri, Bundle params) {
+ if (DEBUG) Log.d(TAG, "tune(" + channelUri + ")");
+ if (TextUtils.isEmpty(inputId)) {
+ throw new IllegalArgumentException("inputId cannot be null or an empty string");
+ }
+ synchronized (sMainTvViewLock) {
+ if (sMainTvView.get() == null) {
+ sMainTvView = new WeakReference<>(this);
+ }
+ }
+ if (mSessionCallback != null && TextUtils.equals(mSessionCallback.mInputId, inputId)) {
+ if (mSession != null) {
+ mSession.tune(channelUri, params);
+ } else {
+ // createSession() was called but the actual session for the given inputId has not
+ // yet been created. Just replace the existing tuning params in the callback with
+ // the new ones and tune later in onSessionCreated(). It is not necessary to create
+ // a new callback because this tuning request was made on the same inputId.
+ mSessionCallback.mChannelUri = channelUri;
+ mSessionCallback.mTuneParams = params;
+ }
+ } else {
+ resetInternal();
+ // In case createSession() is called multiple times across different inputId's before
+ // any session is created (e.g. when quickly tuning to a channel from input A and then
+ // to another channel from input B), only the callback for the last createSession()
+ // should be invoked. (The previous callbacks are simply ignored.) To do that, we create
+ // a new callback each time and keep mSessionCallback pointing to the last one. If
+ // MySessionCallback.this is different from mSessionCallback, we know that this callback
+ // is obsolete and should ignore it.
+ mSessionCallback = new MySessionCallback(inputId, channelUri, params);
+ if (mTvInputManager != null) {
+ mTvInputManager.createSession(inputId, mSessionCallback, mHandler);
+ }
+ }
+ }
+
+ /**
+ * Resets this TvView.
+ *
+ * <p>This method is primarily used to un-tune the current TvView.
+ */
+ public void reset() {
+ if (DEBUG) Log.d(TAG, "reset()");
+ synchronized (sMainTvViewLock) {
+ if (this == sMainTvView.get()) {
+ sMainTvView = NULL_TV_VIEW;
+ }
+ }
+ resetInternal();
+ }
+
+ private void resetInternal() {
+ mSessionCallback = null;
+ mPendingAppPrivateCommands.clear();
+ if (mSession != null) {
+ setSessionSurface(null);
+ removeSessionOverlayView();
+ mUseRequestedSurfaceLayout = false;
+ mSession.release();
+ mSession = null;
+ resetSurfaceView();
+ }
+ }
+
+ /**
+ * Requests to unblock TV content according to the given rating.
+ *
+ * <p>This notifies TV input that blocked content is now OK to play.
+ *
+ * @param unblockedRating A TvContentRating to unblock.
+ * @see TvInputService.Session#notifyContentBlocked(TvContentRating)
+ * @removed
+ */
+ public void requestUnblockContent(TvContentRating unblockedRating) {
+ unblockContent(unblockedRating);
+ }
+
+ /**
+ * Requests to unblock TV content according to the given rating.
+ *
+ * <p>This notifies TV input that blocked content is now OK to play.
+ *
+ * @param unblockedRating A TvContentRating to unblock.
+ * @see TvInputService.Session#notifyContentBlocked(TvContentRating)
+ * @hide
+ */
+ @SystemApi
+ @RequiresPermission(android.Manifest.permission.MODIFY_PARENTAL_CONTROLS)
+ public void unblockContent(TvContentRating unblockedRating) {
+ if (mSession != null) {
+ mSession.unblockContent(unblockedRating);
+ }
+ }
+
+ /**
+ * Enables or disables the caption in this TvView.
+ *
+ * <p>Note that this method does not take any effect unless the current TvView is tuned.
+ *
+ * @param enabled {@code true} to enable, {@code false} to disable.
+ */
+ public void setCaptionEnabled(boolean enabled) {
+ if (DEBUG) Log.d(TAG, "setCaptionEnabled(" + enabled + ")");
+ mCaptionEnabled = enabled;
+ if (mSession != null) {
+ mSession.setCaptionEnabled(enabled);
+ }
+ }
+
+ /**
+ * Selects a track.
+ *
+ * @param type The type of the track to select. The type can be {@link TvTrackInfo#TYPE_AUDIO},
+ * {@link TvTrackInfo#TYPE_VIDEO} or {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @param trackId The ID of the track to select. {@code null} means to unselect the current
+ * track for a given type.
+ * @see #getTracks
+ * @see #getSelectedTrack
+ */
+ public void selectTrack(int type, String trackId) {
+ if (mSession != null) {
+ mSession.selectTrack(type, trackId);
+ }
+ }
+
+ /**
+ * Returns the list of tracks. Returns {@code null} if the information is not available.
+ *
+ * @param type The type of the tracks. The type can be {@link TvTrackInfo#TYPE_AUDIO},
+ * {@link TvTrackInfo#TYPE_VIDEO} or {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @see #selectTrack
+ * @see #getSelectedTrack
+ */
+ public List<TvTrackInfo> getTracks(int type) {
+ if (mSession == null) {
+ return null;
+ }
+ return mSession.getTracks(type);
+ }
+
+ /**
+ * Returns the ID of the selected track for a given type. Returns {@code null} if the
+ * information is not available or the track is not selected.
+ *
+ * @param type The type of the selected tracks. The type can be {@link TvTrackInfo#TYPE_AUDIO},
+ * {@link TvTrackInfo#TYPE_VIDEO} or {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @see #selectTrack
+ * @see #getTracks
+ */
+ public String getSelectedTrack(int type) {
+ if (mSession == null) {
+ return null;
+ }
+ return mSession.getSelectedTrack(type);
+ }
+
+ /**
+ * Plays a given recorded TV program.
+ *
+ * @param inputId The ID of the TV input that created the given recorded program.
+ * @param recordedProgramUri The URI of a recorded program.
+ */
+ public void timeShiftPlay(String inputId, Uri recordedProgramUri) {
+ if (DEBUG) Log.d(TAG, "timeShiftPlay(" + recordedProgramUri + ")");
+ if (TextUtils.isEmpty(inputId)) {
+ throw new IllegalArgumentException("inputId cannot be null or an empty string");
+ }
+ synchronized (sMainTvViewLock) {
+ if (sMainTvView.get() == null) {
+ sMainTvView = new WeakReference<>(this);
+ }
+ }
+ if (mSessionCallback != null && TextUtils.equals(mSessionCallback.mInputId, inputId)) {
+ if (mSession != null) {
+ mSession.timeShiftPlay(recordedProgramUri);
+ } else {
+ mSessionCallback.mRecordedProgramUri = recordedProgramUri;
+ }
+ } else {
+ resetInternal();
+ mSessionCallback = new MySessionCallback(inputId, recordedProgramUri);
+ if (mTvInputManager != null) {
+ mTvInputManager.createSession(inputId, mSessionCallback, mHandler);
+ }
+ }
+ }
+
+ /**
+ * Pauses playback. No-op if it is already paused. Call {@link #timeShiftResume} to resume.
+ */
+ public void timeShiftPause() {
+ if (mSession != null) {
+ mSession.timeShiftPause();
+ }
+ }
+
+ /**
+ * Resumes playback. No-op if it is already resumed. Call {@link #timeShiftPause} to pause.
+ */
+ public void timeShiftResume() {
+ if (mSession != null) {
+ mSession.timeShiftResume();
+ }
+ }
+
+ /**
+ * Seeks to a specified time position. {@code timeMs} must be equal to or greater than the start
+ * position returned by {@link TimeShiftPositionCallback#onTimeShiftStartPositionChanged} and
+ * equal to or less than the current time.
+ *
+ * @param timeMs The time position to seek to, in milliseconds since the epoch.
+ */
+ public void timeShiftSeekTo(long timeMs) {
+ if (mSession != null) {
+ mSession.timeShiftSeekTo(timeMs);
+ }
+ }
+
+ /**
+ * Sets playback rate using {@link android.media.PlaybackParams}.
+ *
+ * @param params The playback params.
+ */
+ public void timeShiftSetPlaybackParams(@NonNull PlaybackParams params) {
+ if (mSession != null) {
+ mSession.timeShiftSetPlaybackParams(params);
+ }
+ }
+
+ /**
+ * Sets the callback to be invoked when the time shift position is changed.
+ *
+ * @param callback The callback to receive time shift position changes. A value of {@code null}
+ * removes the existing callback.
+ */
+ public void setTimeShiftPositionCallback(@Nullable TimeShiftPositionCallback callback) {
+ mTimeShiftPositionCallback = callback;
+ ensurePositionTracking();
+ }
+
+ private void ensurePositionTracking() {
+ if (mSession == null) {
+ return;
+ }
+ mSession.timeShiftEnablePositionTracking(mTimeShiftPositionCallback != null);
+ }
+
+ /**
+ * Sends a private command to the underlying TV input. This can be used to provide
+ * domain-specific features that are only known between certain clients and their TV inputs.
+ *
+ * @param action The name of the private command to send. This <em>must</em> be a scoped name,
+ * i.e. prefixed with a package name you own, so that different developers will not
+ * create conflicting commands.
+ * @param data An optional bundle to send with the command.
+ */
+ public void sendAppPrivateCommand(@NonNull String action, Bundle data) {
+ if (TextUtils.isEmpty(action)) {
+ throw new IllegalArgumentException("action cannot be null or an empty string");
+ }
+ if (mSession != null) {
+ mSession.sendAppPrivateCommand(action, data);
+ } else {
+ Log.w(TAG, "sendAppPrivateCommand - session not yet created (action \"" + action
+ + "\" pending)");
+ mPendingAppPrivateCommands.add(Pair.create(action, data));
+ }
+ }
+
+ /**
+ * Dispatches an unhandled input event to the next receiver.
+ *
+ * <p>Except system keys, TvView always consumes input events in the normal flow. This is called
+ * asynchronously from where the event is dispatched. It gives the host application a chance to
+ * dispatch the unhandled input events.
+ *
+ * @param event The input event.
+ * @return {@code true} if the event was handled by the view, {@code false} otherwise.
+ */
+ public boolean dispatchUnhandledInputEvent(InputEvent event) {
+ if (mOnUnhandledInputEventListener != null) {
+ if (mOnUnhandledInputEventListener.onUnhandledInputEvent(event)) {
+ return true;
+ }
+ }
+ return onUnhandledInputEvent(event);
+ }
+
+ /**
+ * Called when an unhandled input event also has not been handled by the user provided
+ * callback. This is the last chance to handle the unhandled input event in the TvView.
+ *
+ * @param event The input event.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to be
+ * handled by the next receiver, return {@code false}.
+ */
+ public boolean onUnhandledInputEvent(InputEvent event) {
+ return false;
+ }
+
+ /**
+ * Registers a callback to be invoked when an input event is not handled by the bound TV input.
+ *
+ * @param listener The callback to be invoked when the unhandled input event is received.
+ */
+ public void setOnUnhandledInputEventListener(OnUnhandledInputEventListener listener) {
+ mOnUnhandledInputEventListener = listener;
+ }
+
+ @Override
+ public boolean dispatchKeyEvent(KeyEvent event) {
+ if (super.dispatchKeyEvent(event)) {
+ return true;
+ }
+ if (DEBUG) Log.d(TAG, "dispatchKeyEvent(" + event + ")");
+ if (mSession == null) {
+ return false;
+ }
+ InputEvent copiedEvent = event.copy();
+ int ret = mSession.dispatchInputEvent(copiedEvent, copiedEvent, mFinishedInputEventCallback,
+ mHandler);
+ return ret != Session.DISPATCH_NOT_HANDLED;
+ }
+
+ @Override
+ public boolean dispatchTouchEvent(MotionEvent event) {
+ if (super.dispatchTouchEvent(event)) {
+ return true;
+ }
+ if (DEBUG) Log.d(TAG, "dispatchTouchEvent(" + event + ")");
+ if (mSession == null) {
+ return false;
+ }
+ InputEvent copiedEvent = event.copy();
+ int ret = mSession.dispatchInputEvent(copiedEvent, copiedEvent, mFinishedInputEventCallback,
+ mHandler);
+ return ret != Session.DISPATCH_NOT_HANDLED;
+ }
+
+ @Override
+ public boolean dispatchTrackballEvent(MotionEvent event) {
+ if (super.dispatchTrackballEvent(event)) {
+ return true;
+ }
+ if (DEBUG) Log.d(TAG, "dispatchTrackballEvent(" + event + ")");
+ if (mSession == null) {
+ return false;
+ }
+ InputEvent copiedEvent = event.copy();
+ int ret = mSession.dispatchInputEvent(copiedEvent, copiedEvent, mFinishedInputEventCallback,
+ mHandler);
+ return ret != Session.DISPATCH_NOT_HANDLED;
+ }
+
+ @Override
+ public boolean dispatchGenericMotionEvent(MotionEvent event) {
+ if (super.dispatchGenericMotionEvent(event)) {
+ return true;
+ }
+ if (DEBUG) Log.d(TAG, "dispatchGenericMotionEvent(" + event + ")");
+ if (mSession == null) {
+ return false;
+ }
+ InputEvent copiedEvent = event.copy();
+ int ret = mSession.dispatchInputEvent(copiedEvent, copiedEvent, mFinishedInputEventCallback,
+ mHandler);
+ return ret != Session.DISPATCH_NOT_HANDLED;
+ }
+
+ @Override
+ public void dispatchWindowFocusChanged(boolean hasFocus) {
+ super.dispatchWindowFocusChanged(hasFocus);
+ // Other app may have shown its own main TvView.
+ // Set main again to regain main session.
+ synchronized (sMainTvViewLock) {
+ if (hasFocus && this == sMainTvView.get() && mSession != null
+ && checkChangeHdmiCecActiveSourcePermission()) {
+ mSession.setMain();
+ }
+ }
+ }
+
+ @Override
+ protected void onAttachedToWindow() {
+ super.onAttachedToWindow();
+ createSessionOverlayView();
+ }
+
+ @Override
+ protected void onDetachedFromWindow() {
+ removeSessionOverlayView();
+ super.onDetachedFromWindow();
+ }
+
+ @Override
+ protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
+ if (DEBUG) {
+ Log.d(TAG, "onLayout (left=" + left + ", top=" + top + ", right=" + right
+ + ", bottom=" + bottom + ",)");
+ }
+ if (mUseRequestedSurfaceLayout) {
+ mSurfaceView.layout(mSurfaceViewLeft, mSurfaceViewTop, mSurfaceViewRight,
+ mSurfaceViewBottom);
+ } else {
+ mSurfaceView.layout(0, 0, right - left, bottom - top);
+ }
+ }
+
+ @Override
+ protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
+ mSurfaceView.measure(widthMeasureSpec, heightMeasureSpec);
+ int width = mSurfaceView.getMeasuredWidth();
+ int height = mSurfaceView.getMeasuredHeight();
+ int childState = mSurfaceView.getMeasuredState();
+ setMeasuredDimension(resolveSizeAndState(width, widthMeasureSpec, childState),
+ resolveSizeAndState(height, heightMeasureSpec,
+ childState << MEASURED_HEIGHT_STATE_SHIFT));
+ }
+
+ @Override
+ public boolean gatherTransparentRegion(Region region) {
+ if (mWindowZOrder != ZORDER_ON_TOP) {
+ if (region != null) {
+ int width = getWidth();
+ int height = getHeight();
+ if (width > 0 && height > 0) {
+ int location[] = new int[2];
+ getLocationInWindow(location);
+ int left = location[0];
+ int top = location[1];
+ region.op(left, top, left + width, top + height, Region.Op.UNION);
+ }
+ }
+ }
+ return super.gatherTransparentRegion(region);
+ }
+
+ @Override
+ public void draw(Canvas canvas) {
+ if (mWindowZOrder != ZORDER_ON_TOP) {
+ // Punch a hole so that the underlying overlay view and surface can be shown.
+ canvas.drawColor(0, PorterDuff.Mode.CLEAR);
+ }
+ super.draw(canvas);
+ }
+
+ @Override
+ protected void dispatchDraw(Canvas canvas) {
+ if (mWindowZOrder != ZORDER_ON_TOP) {
+ // Punch a hole so that the underlying overlay view and surface can be shown.
+ canvas.drawColor(0, PorterDuff.Mode.CLEAR);
+ }
+ super.dispatchDraw(canvas);
+ }
+
+ @Override
+ protected void onVisibilityChanged(View changedView, int visibility) {
+ super.onVisibilityChanged(changedView, visibility);
+ mSurfaceView.setVisibility(visibility);
+ if (visibility == View.VISIBLE) {
+ createSessionOverlayView();
+ } else {
+ removeSessionOverlayView();
+ }
+ }
+
+ private void resetSurfaceView() {
+ if (mSurfaceView != null) {
+ mSurfaceView.getHolder().removeCallback(mSurfaceHolderCallback);
+ removeView(mSurfaceView);
+ }
+ mSurface = null;
+ mSurfaceView = new SurfaceView(getContext(), mAttrs, mDefStyleAttr) {
+ @Override
+ protected void updateSurface() {
+ super.updateSurface();
+ relayoutSessionOverlayView();
+ }};
+ // The surface view's content should be treated as secure all the time.
+ mSurfaceView.setSecure(true);
+ mSurfaceView.getHolder().addCallback(mSurfaceHolderCallback);
+ if (mWindowZOrder == ZORDER_MEDIA_OVERLAY) {
+ mSurfaceView.setZOrderMediaOverlay(true);
+ } else if (mWindowZOrder == ZORDER_ON_TOP) {
+ mSurfaceView.setZOrderOnTop(true);
+ }
+ addView(mSurfaceView);
+ }
+
+ private void setSessionSurface(Surface surface) {
+ if (mSession == null) {
+ return;
+ }
+ mSession.setSurface(surface);
+ }
+
+ private void dispatchSurfaceChanged(int format, int width, int height) {
+ if (mSession == null) {
+ return;
+ }
+ mSession.dispatchSurfaceChanged(format, width, height);
+ }
+
+ private void createSessionOverlayView() {
+ if (mSession == null || !isAttachedToWindow()
+ || mOverlayViewCreated || mWindowZOrder != ZORDER_MEDIA) {
+ return;
+ }
+ mOverlayViewFrame = getViewFrameOnScreen();
+ mSession.createOverlayView(this, mOverlayViewFrame);
+ mOverlayViewCreated = true;
+ }
+
+ private void removeSessionOverlayView() {
+ if (mSession == null || !mOverlayViewCreated) {
+ return;
+ }
+ mSession.removeOverlayView();
+ mOverlayViewCreated = false;
+ mOverlayViewFrame = null;
+ }
+
+ private void relayoutSessionOverlayView() {
+ if (mSession == null || !isAttachedToWindow() || !mOverlayViewCreated
+ || mWindowZOrder != ZORDER_MEDIA) {
+ return;
+ }
+ Rect viewFrame = getViewFrameOnScreen();
+ if (viewFrame.equals(mOverlayViewFrame)) {
+ return;
+ }
+ mSession.relayoutOverlayView(viewFrame);
+ mOverlayViewFrame = viewFrame;
+ }
+
+ private Rect getViewFrameOnScreen() {
+ Rect frame = new Rect();
+ getGlobalVisibleRect(frame);
+ RectF frameF = new RectF(frame);
+ getMatrix().mapRect(frameF);
+ frameF.round(frame);
+ return frame;
+ }
+
+ private boolean checkChangeHdmiCecActiveSourcePermission() {
+ return getContext().checkSelfPermission(
+ android.Manifest.permission.CHANGE_HDMI_CEC_ACTIVE_SOURCE)
+ == PackageManager.PERMISSION_GRANTED;
+ }
+
+ /**
+ * Callback used to receive time shift position changes.
+ */
+ public abstract static class TimeShiftPositionCallback {
+
+ /**
+ * This is called when the start position for time shifting has changed.
+ *
+ * <p>The start position for time shifting indicates the earliest possible time the user can
+ * seek to. Initially this is equivalent to the time when the underlying TV input starts
+ * recording. Later it may be adjusted because there is insufficient space or the duration
+ * of recording is limited. The application must not allow the user to seek to a position
+ * earlier than the start position.
+ *
+ * <p>For playback of a recorded program initiated by {@link #timeShiftPlay(String, Uri)},
+ * the start position is the time when playback starts. It does not change.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param timeMs The start position for time shifting, in milliseconds since the epoch.
+ */
+ public void onTimeShiftStartPositionChanged(String inputId, long timeMs) {
+ }
+
+ /**
+ * This is called when the current position for time shifting has changed.
+ *
+ * <p>The current position for time shifting is the same as the current position of
+ * playback. During playback, the current position changes continuously. When paused, it
+ * does not change.
+ *
+ * <p>Note that {@code timeMs} is wall-clock time.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param timeMs The current position for time shifting, in milliseconds since the epoch.
+ */
+ public void onTimeShiftCurrentPositionChanged(String inputId, long timeMs) {
+ }
+ }
+
+ /**
+ * Callback used to receive various status updates on the {@link TvView}.
+ */
+ public abstract static class TvInputCallback {
+
+ /**
+ * This is invoked when an error occurred while establishing a connection to the underlying
+ * TV input.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ */
+ public void onConnectionFailed(String inputId) {
+ }
+
+ /**
+ * This is invoked when the existing connection to the underlying TV input is lost.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ */
+ public void onDisconnected(String inputId) {
+ }
+
+ /**
+ * This is invoked when the channel of this TvView is changed by the underlying TV input
+ * without any {@link TvView#tune} request.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param channelUri The URI of a channel.
+ */
+ public void onChannelRetuned(String inputId, Uri channelUri) {
+ }
+
+ /**
+ * This is called when the track information has been changed.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param tracks A list which includes track information.
+ */
+ public void onTracksChanged(String inputId, List<TvTrackInfo> tracks) {
+ }
+
+ /**
+ * This is called when there is a change on the selected tracks.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param type The type of the track selected. The type can be
+ * {@link TvTrackInfo#TYPE_AUDIO}, {@link TvTrackInfo#TYPE_VIDEO} or
+ * {@link TvTrackInfo#TYPE_SUBTITLE}.
+ * @param trackId The ID of the track selected.
+ */
+ public void onTrackSelected(String inputId, int type, String trackId) {
+ }
+
+ /**
+ * This is invoked when the video size has been changed. It is also called when the first
+ * time video size information becomes available after this view is tuned to a specific
+ * channel.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param width The width of the video.
+ * @param height The height of the video.
+ */
+ public void onVideoSizeChanged(String inputId, int width, int height) {
+ }
+
+ /**
+ * This is called when the video is available, so the TV input starts the playback.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ */
+ public void onVideoAvailable(String inputId) {
+ }
+
+ /**
+ * This is called when the video is not available, so the TV input stops the playback.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param reason The reason why the TV input stopped the playback:
+ * <ul>
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_UNKNOWN}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_TUNING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_WEAK_SIGNAL}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_BUFFERING}
+ * <li>{@link TvInputManager#VIDEO_UNAVAILABLE_REASON_AUDIO_ONLY}
+ * </ul>
+ */
+ public void onVideoUnavailable(
+ String inputId, @TvInputManager.VideoUnavailableReason int reason) {
+ }
+
+ /**
+ * This is called when the current program content turns out to be allowed to watch since
+ * its content rating is not blocked by parental controls.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ */
+ public void onContentAllowed(String inputId) {
+ }
+
+ /**
+ * This is called when the current program content turns out to be not allowed to watch
+ * since its content rating is blocked by parental controls.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param rating The content rating of the blocked program.
+ */
+ public void onContentBlocked(String inputId, TvContentRating rating) {
+ }
+
+ /**
+ * This is invoked when a custom event from the bound TV input is sent to this view.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param eventType The type of the event.
+ * @param eventArgs Optional arguments of the event.
+ * @hide
+ */
+ @SystemApi
+ public void onEvent(String inputId, String eventType, Bundle eventArgs) {
+ }
+
+ /**
+ * This is called when the time shift status is changed.
+ *
+ * @param inputId The ID of the TV input bound to this view.
+ * @param status The current time shift status. Should be one of the followings.
+ * <ul>
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNSUPPORTED}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_UNAVAILABLE}
+ * <li>{@link TvInputManager#TIME_SHIFT_STATUS_AVAILABLE}
+ * </ul>
+ */
+ public void onTimeShiftStatusChanged(
+ String inputId, @TvInputManager.TimeShiftStatus int status) {
+ }
+ }
+
+ /**
+ * Interface definition for a callback to be invoked when the unhandled input event is received.
+ */
+ public interface OnUnhandledInputEventListener {
+ /**
+ * Called when an input event was not handled by the bound TV input.
+ *
+ * <p>This is called asynchronously from where the event is dispatched. It gives the host
+ * application a chance to handle the unhandled input events.
+ *
+ * @param event The input event.
+ * @return If you handled the event, return {@code true}. If you want to allow the event to
+ * be handled by the next receiver, return {@code false}.
+ */
+ boolean onUnhandledInputEvent(InputEvent event);
+ }
+
+ private class MySessionCallback extends SessionCallback {
+ final String mInputId;
+ Uri mChannelUri;
+ Bundle mTuneParams;
+ Uri mRecordedProgramUri;
+
+ MySessionCallback(String inputId, Uri channelUri, Bundle tuneParams) {
+ mInputId = inputId;
+ mChannelUri = channelUri;
+ mTuneParams = tuneParams;
+ }
+
+ MySessionCallback(String inputId, Uri recordedProgramUri) {
+ mInputId = inputId;
+ mRecordedProgramUri = recordedProgramUri;
+ }
+
+ @Override
+ public void onSessionCreated(Session session) {
+ if (DEBUG) {
+ Log.d(TAG, "onSessionCreated()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionCreated - session already created");
+ // This callback is obsolete.
+ if (session != null) {
+ session.release();
+ }
+ return;
+ }
+ mSession = session;
+ if (session != null) {
+ // Sends the pending app private commands first.
+ for (Pair<String, Bundle> command : mPendingAppPrivateCommands) {
+ mSession.sendAppPrivateCommand(command.first, command.second);
+ }
+ mPendingAppPrivateCommands.clear();
+
+ synchronized (sMainTvViewLock) {
+ if (hasWindowFocus() && TvView.this == sMainTvView.get()
+ && checkChangeHdmiCecActiveSourcePermission()) {
+ mSession.setMain();
+ }
+ }
+ // mSurface may not be ready yet as soon as starting an application.
+ // In the case, we don't send Session.setSurface(null) unnecessarily.
+ // setSessionSurface will be called in surfaceCreated.
+ if (mSurface != null) {
+ setSessionSurface(mSurface);
+ if (mSurfaceChanged) {
+ dispatchSurfaceChanged(mSurfaceFormat, mSurfaceWidth, mSurfaceHeight);
+ }
+ }
+ createSessionOverlayView();
+ if (mStreamVolume != null) {
+ mSession.setStreamVolume(mStreamVolume);
+ }
+ if (mCaptionEnabled != null) {
+ mSession.setCaptionEnabled(mCaptionEnabled);
+ }
+ if (mChannelUri != null) {
+ mSession.tune(mChannelUri, mTuneParams);
+ } else {
+ mSession.timeShiftPlay(mRecordedProgramUri);
+ }
+ ensurePositionTracking();
+ } else {
+ mSessionCallback = null;
+ if (mCallback != null) {
+ mCallback.onConnectionFailed(mInputId);
+ }
+ }
+ }
+
+ @Override
+ public void onSessionReleased(Session session) {
+ if (DEBUG) {
+ Log.d(TAG, "onSessionReleased()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionReleased - session not created");
+ return;
+ }
+ mOverlayViewCreated = false;
+ mOverlayViewFrame = null;
+ mSessionCallback = null;
+ mSession = null;
+ if (mCallback != null) {
+ mCallback.onDisconnected(mInputId);
+ }
+ }
+
+ @Override
+ public void onChannelRetuned(Session session, Uri channelUri) {
+ if (DEBUG) {
+ Log.d(TAG, "onChannelChangedByTvInput(" + channelUri + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onChannelRetuned - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onChannelRetuned(mInputId, channelUri);
+ }
+ }
+
+ @Override
+ public void onTracksChanged(Session session, List<TvTrackInfo> tracks) {
+ if (DEBUG) {
+ Log.d(TAG, "onTracksChanged(" + tracks + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTracksChanged - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onTracksChanged(mInputId, tracks);
+ }
+ }
+
+ @Override
+ public void onTrackSelected(Session session, int type, String trackId) {
+ if (DEBUG) {
+ Log.d(TAG, "onTrackSelected(type=" + type + ", trackId=" + trackId + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTrackSelected - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onTrackSelected(mInputId, type, trackId);
+ }
+ }
+
+ @Override
+ public void onVideoSizeChanged(Session session, int width, int height) {
+ if (DEBUG) {
+ Log.d(TAG, "onVideoSizeChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onVideoSizeChanged - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onVideoSizeChanged(mInputId, width, height);
+ }
+ }
+
+ @Override
+ public void onVideoAvailable(Session session) {
+ if (DEBUG) {
+ Log.d(TAG, "onVideoAvailable()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onVideoAvailable - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onVideoAvailable(mInputId);
+ }
+ }
+
+ @Override
+ public void onVideoUnavailable(Session session, int reason) {
+ if (DEBUG) {
+ Log.d(TAG, "onVideoUnavailable(reason=" + reason + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onVideoUnavailable - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onVideoUnavailable(mInputId, reason);
+ }
+ }
+
+ @Override
+ public void onContentAllowed(Session session) {
+ if (DEBUG) {
+ Log.d(TAG, "onContentAllowed()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onContentAllowed - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onContentAllowed(mInputId);
+ }
+ }
+
+ @Override
+ public void onContentBlocked(Session session, TvContentRating rating) {
+ if (DEBUG) {
+ Log.d(TAG, "onContentBlocked(rating=" + rating + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onContentBlocked - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onContentBlocked(mInputId, rating);
+ }
+ }
+
+ @Override
+ public void onLayoutSurface(Session session, int left, int top, int right, int bottom) {
+ if (DEBUG) {
+ Log.d(TAG, "onLayoutSurface (left=" + left + ", top=" + top + ", right="
+ + right + ", bottom=" + bottom + ",)");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onLayoutSurface - session not created");
+ return;
+ }
+ mSurfaceViewLeft = left;
+ mSurfaceViewTop = top;
+ mSurfaceViewRight = right;
+ mSurfaceViewBottom = bottom;
+ mUseRequestedSurfaceLayout = true;
+ requestLayout();
+ }
+
+ @Override
+ public void onSessionEvent(Session session, String eventType, Bundle eventArgs) {
+ if (DEBUG) {
+ Log.d(TAG, "onSessionEvent(" + eventType + ")");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onSessionEvent - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onEvent(mInputId, eventType, eventArgs);
+ }
+ }
+
+ @Override
+ public void onTimeShiftStatusChanged(Session session, int status) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftStatusChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftStatusChanged - session not created");
+ return;
+ }
+ if (mCallback != null) {
+ mCallback.onTimeShiftStatusChanged(mInputId, status);
+ }
+ }
+
+ @Override
+ public void onTimeShiftStartPositionChanged(Session session, long timeMs) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftStartPositionChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftStartPositionChanged - session not created");
+ return;
+ }
+ if (mTimeShiftPositionCallback != null) {
+ mTimeShiftPositionCallback.onTimeShiftStartPositionChanged(mInputId, timeMs);
+ }
+ }
+
+ @Override
+ public void onTimeShiftCurrentPositionChanged(Session session, long timeMs) {
+ if (DEBUG) {
+ Log.d(TAG, "onTimeShiftCurrentPositionChanged()");
+ }
+ if (this != mSessionCallback) {
+ Log.w(TAG, "onTimeShiftCurrentPositionChanged - session not created");
+ return;
+ }
+ if (mTimeShiftPositionCallback != null) {
+ mTimeShiftPositionCallback.onTimeShiftCurrentPositionChanged(mInputId, timeMs);
+ }
+ }
+ }
+}