summaryrefslogtreecommitdiff
path: root/android/hardware/camera2
diff options
context:
space:
mode:
authorJustin Klaassen <justinklaassen@google.com>2017-09-15 17:58:39 -0400
committerJustin Klaassen <justinklaassen@google.com>2017-09-15 17:58:39 -0400
commit10d07c88d69cc64f73a069163e7ea5ba2519a099 (patch)
tree8dbd149eb350320a29c3d10e7ad3201de1c5cbee /android/hardware/camera2
parent677516fb6b6f207d373984757d3d9450474b6b00 (diff)
downloadandroid-28-10d07c88d69cc64f73a069163e7ea5ba2519a099.tar.gz
Import Android SDK Platform PI [4335822]
/google/data/ro/projects/android/fetch_artifact \ --bid 4335822 \ --target sdk_phone_armv7-win_sdk \ sdk-repo-linux-sources-4335822.zip AndroidVersion.ApiLevel has been modified to appear as 28 Change-Id: Ic8f04be005a71c2b9abeaac754d8da8d6f9a2c32
Diffstat (limited to 'android/hardware/camera2')
-rw-r--r--android/hardware/camera2/CameraAccessException.java187
-rw-r--r--android/hardware/camera2/CameraCaptureSession.java1095
-rw-r--r--android/hardware/camera2/CameraCharacteristics.java3042
-rw-r--r--android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.java131
-rw-r--r--android/hardware/camera2/CameraDevice.java1081
-rw-r--r--android/hardware/camera2/CameraManager.java1230
-rw-r--r--android/hardware/camera2/CameraMetadata.java2872
-rw-r--r--android/hardware/camera2/CaptureFailure.java158
-rw-r--r--android/hardware/camera2/CaptureRequest.java2901
-rw-r--r--android/hardware/camera2/CaptureResult.java4304
-rw-r--r--android/hardware/camera2/DngCreator.java678
-rw-r--r--android/hardware/camera2/TotalCaptureResult.java114
-rw-r--r--android/hardware/camera2/dispatch/ArgumentReplacingDispatcher.java85
-rw-r--r--android/hardware/camera2/dispatch/BroadcastDispatcher.java64
-rw-r--r--android/hardware/camera2/dispatch/Dispatchable.java35
-rw-r--r--android/hardware/camera2/dispatch/DuckTypingDispatcher.java55
-rw-r--r--android/hardware/camera2/dispatch/HandlerDispatcher.java85
-rw-r--r--android/hardware/camera2/dispatch/InvokeDispatcher.java55
-rw-r--r--android/hardware/camera2/dispatch/MethodNameInvoker.java97
-rw-r--r--android/hardware/camera2/dispatch/NullDispatcher.java38
-rw-r--r--android/hardware/camera2/impl/CallbackProxies.java202
-rw-r--r--android/hardware/camera2/impl/CameraCaptureSessionCore.java64
-rw-r--r--android/hardware/camera2/impl/CameraCaptureSessionImpl.java804
-rw-r--r--android/hardware/camera2/impl/CameraConstrainedHighSpeedCaptureSessionImpl.java313
-rw-r--r--android/hardware/camera2/impl/CameraDeviceImpl.java2227
-rw-r--r--android/hardware/camera2/impl/CameraMetadataNative.java1338
-rw-r--r--android/hardware/camera2/impl/CaptureResultExtras.java115
-rw-r--r--android/hardware/camera2/impl/GetCommand.java33
-rw-r--r--android/hardware/camera2/impl/ICameraDeviceUserWrapper.java228
-rw-r--r--android/hardware/camera2/impl/PublicKey.java32
-rw-r--r--android/hardware/camera2/impl/SetCommand.java33
-rw-r--r--android/hardware/camera2/impl/SyntheticKey.java36
-rw-r--r--android/hardware/camera2/legacy/BurstHolder.java90
-rw-r--r--android/hardware/camera2/legacy/CameraDeviceState.java362
-rw-r--r--android/hardware/camera2/legacy/CameraDeviceUserShim.java727
-rw-r--r--android/hardware/camera2/legacy/CaptureCollector.java673
-rw-r--r--android/hardware/camera2/legacy/GLThreadManager.java264
-rw-r--r--android/hardware/camera2/legacy/LegacyCameraDevice.java850
-rw-r--r--android/hardware/camera2/legacy/LegacyExceptionUtils.java138
-rw-r--r--android/hardware/camera2/legacy/LegacyFaceDetectMapper.java263
-rw-r--r--android/hardware/camera2/legacy/LegacyFocusStateMapper.java321
-rw-r--r--android/hardware/camera2/legacy/LegacyMetadataMapper.java1511
-rw-r--r--android/hardware/camera2/legacy/LegacyRequest.java67
-rw-r--r--android/hardware/camera2/legacy/LegacyRequestMapper.java687
-rw-r--r--android/hardware/camera2/legacy/LegacyResultMapper.java520
-rw-r--r--android/hardware/camera2/legacy/ParameterUtils.java1006
-rw-r--r--android/hardware/camera2/legacy/PerfMeasurement.java308
-rw-r--r--android/hardware/camera2/legacy/RequestHandlerThread.java113
-rw-r--r--android/hardware/camera2/legacy/RequestHolder.java283
-rw-r--r--android/hardware/camera2/legacy/RequestQueue.java174
-rw-r--r--android/hardware/camera2/legacy/RequestThreadManager.java1098
-rw-r--r--android/hardware/camera2/legacy/SizeAreaComparator.java72
-rw-r--r--android/hardware/camera2/legacy/SurfaceTextureRenderer.java832
-rw-r--r--android/hardware/camera2/marshal/MarshalHelpers.java243
-rw-r--r--android/hardware/camera2/marshal/MarshalQueryable.java63
-rw-r--r--android/hardware/camera2/marshal/MarshalRegistry.java144
-rw-r--r--android/hardware/camera2/marshal/Marshaler.java148
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableArray.java179
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableBlackLevelPattern.java76
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableBoolean.java67
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableColorSpaceTransform.java84
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableEnum.java220
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableHighSpeedVideoConfiguration.java85
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableMeteringRectangle.java88
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableNativeByteToInteger.java70
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryablePair.java158
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableParcelable.java190
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryablePrimitive.java183
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableRange.java136
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableRect.java77
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java131
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableRggbChannelVector.java75
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableSize.java68
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableSizeF.java72
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java80
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java90
-rw-r--r--android/hardware/camera2/marshal/impl/MarshalQueryableString.java112
-rw-r--r--android/hardware/camera2/params/BlackLevelPattern.java147
-rw-r--r--android/hardware/camera2/params/ColorSpaceTransform.java299
-rw-r--r--android/hardware/camera2/params/Face.java265
-rw-r--r--android/hardware/camera2/params/HighSpeedVideoConfiguration.java172
-rw-r--r--android/hardware/camera2/params/InputConfiguration.java128
-rw-r--r--android/hardware/camera2/params/LensShadingMap.java289
-rw-r--r--android/hardware/camera2/params/MeteringRectangle.java269
-rw-r--r--android/hardware/camera2/params/OutputConfiguration.java613
-rw-r--r--android/hardware/camera2/params/ReprocessFormatsMap.java264
-rw-r--r--android/hardware/camera2/params/RggbChannelVector.java223
-rw-r--r--android/hardware/camera2/params/StreamConfiguration.java171
-rw-r--r--android/hardware/camera2/params/StreamConfigurationDuration.java151
-rw-r--r--android/hardware/camera2/params/StreamConfigurationMap.java1688
-rw-r--r--android/hardware/camera2/params/TonemapCurve.java351
-rw-r--r--android/hardware/camera2/params/VendorTagDescriptor.java67
-rw-r--r--android/hardware/camera2/params/VendorTagDescriptorCache.java67
-rw-r--r--android/hardware/camera2/utils/ArrayUtils.java184
-rw-r--r--android/hardware/camera2/utils/CloseableLock.java346
-rw-r--r--android/hardware/camera2/utils/HashCodeHelpers.java100
-rw-r--r--android/hardware/camera2/utils/ListUtils.java104
-rw-r--r--android/hardware/camera2/utils/LongParcelable.java74
-rw-r--r--android/hardware/camera2/utils/ParamsUtils.java200
-rw-r--r--android/hardware/camera2/utils/SizeAreaComparator.java71
-rw-r--r--android/hardware/camera2/utils/SubmitInfo.java106
-rw-r--r--android/hardware/camera2/utils/SurfaceUtils.java196
-rw-r--r--android/hardware/camera2/utils/TaskDrainer.java214
-rw-r--r--android/hardware/camera2/utils/TaskSingleDrainer.java104
-rw-r--r--android/hardware/camera2/utils/TypeReference.java437
-rw-r--r--android/hardware/camera2/utils/UncheckedThrow.java52
106 files changed, 44282 insertions, 0 deletions
diff --git a/android/hardware/camera2/CameraAccessException.java b/android/hardware/camera2/CameraAccessException.java
new file mode 100644
index 00000000..f9b659c6
--- /dev/null
+++ b/android/hardware/camera2/CameraAccessException.java
@@ -0,0 +1,187 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.annotation.IntDef;
+import android.util.AndroidException;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * <p><code>CameraAccessException</code> is thrown if a camera device could not
+ * be queried or opened by the {@link CameraManager}, or if the connection to an
+ * opened {@link CameraDevice} is no longer valid.</p>
+ *
+ * @see CameraManager
+ * @see CameraDevice
+ */
+public class CameraAccessException extends AndroidException {
+ /**
+ * The camera device is in use already.
+ */
+ public static final int CAMERA_IN_USE = 4;
+
+ /**
+ * The system-wide limit for number of open cameras or camera resources has
+ * been reached, and more camera devices cannot be opened or torch mode
+ * cannot be turned on until previous instances are closed.
+ */
+ public static final int MAX_CAMERAS_IN_USE = 5;
+
+ /**
+ * The camera is disabled due to a device policy, and cannot be opened.
+ *
+ * @see android.app.admin.DevicePolicyManager#setCameraDisabled(android.content.ComponentName, boolean)
+ */
+ public static final int CAMERA_DISABLED = 1;
+
+ /**
+ * The camera device is removable and has been disconnected from the Android
+ * device, or the camera id used with {@link android.hardware.camera2.CameraManager#openCamera}
+ * is no longer valid, or the camera service has shut down the connection due to a
+ * higher-priority access request for the camera device.
+ */
+ public static final int CAMERA_DISCONNECTED = 2;
+
+ /**
+ * The camera device is currently in the error state.
+ *
+ * <p>The camera has failed to open or has failed at a later time
+ * as a result of some non-user interaction. Refer to
+ * {@link CameraDevice.StateCallback#onError} for the exact
+ * nature of the error.</p>
+ *
+ * <p>No further calls to the camera will succeed. Clean up
+ * the camera with {@link CameraDevice#close} and try
+ * handling the error in order to successfully re-open the camera.
+ * </p>
+ *
+ */
+ public static final int CAMERA_ERROR = 3;
+
+ /**
+ * A deprecated HAL version is in use.
+ * @hide
+ */
+ public static final int CAMERA_DEPRECATED_HAL = 1000;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(
+ {CAMERA_IN_USE,
+ MAX_CAMERAS_IN_USE,
+ CAMERA_DISABLED,
+ CAMERA_DISCONNECTED,
+ CAMERA_ERROR})
+ public @interface AccessError {};
+
+ // Make the eclipse warning about serializable exceptions go away
+ private static final long serialVersionUID = 5630338637471475675L; // randomly generated
+
+ private final int mReason;
+
+ /**
+ * The reason for the failure to access the camera.
+ *
+ * @see #CAMERA_DISABLED
+ * @see #CAMERA_DISCONNECTED
+ * @see #CAMERA_ERROR
+ */
+ @AccessError
+ public final int getReason() {
+ return mReason;
+ }
+
+ public CameraAccessException(@AccessError int problem) {
+ super(getDefaultMessage(problem));
+ mReason = problem;
+ }
+
+ public CameraAccessException(@AccessError int problem, String message) {
+ super(getCombinedMessage(problem, message));
+ mReason = problem;
+ }
+
+ public CameraAccessException(@AccessError int problem, String message, Throwable cause) {
+ super(getCombinedMessage(problem, message), cause);
+ mReason = problem;
+ }
+
+ public CameraAccessException(@AccessError int problem, Throwable cause) {
+ super(getDefaultMessage(problem), cause);
+ mReason = problem;
+ }
+
+ /**
+ * @hide
+ */
+ public static String getDefaultMessage(@AccessError int problem) {
+ switch (problem) {
+ case CAMERA_IN_USE:
+ return "The camera device is in use already";
+ case MAX_CAMERAS_IN_USE:
+ return "The system-wide limit for number of open cameras has been reached, " +
+ "and more camera devices cannot be opened until previous instances " +
+ "are closed.";
+ case CAMERA_DISCONNECTED:
+ return "The camera device is removable and has been disconnected from the " +
+ "Android device, or the camera service has shut down the connection due " +
+ "to a higher-priority access request for the camera device.";
+ case CAMERA_DISABLED:
+ return "The camera is disabled due to a device policy, and cannot be opened.";
+ case CAMERA_ERROR:
+ return "The camera device is currently in the error state; " +
+ "no further calls to it will succeed.";
+ }
+ return null;
+ }
+
+ private static String getCombinedMessage(@AccessError int problem, String message) {
+ String problemString = getProblemString(problem);
+ return String.format("%s (%d): %s", problemString, problem, message);
+ }
+
+ private static String getProblemString(int problem) {
+ String problemString;
+ switch (problem) {
+ case CAMERA_IN_USE:
+ problemString = "CAMERA_IN_USE";
+ break;
+ case MAX_CAMERAS_IN_USE:
+ problemString = "MAX_CAMERAS_IN_USE";
+ break;
+ case CAMERA_DISCONNECTED:
+ problemString = "CAMERA_DISCONNECTED";
+ break;
+ case CAMERA_DISABLED:
+ problemString = "CAMERA_DISABLED";
+ break;
+ case CAMERA_ERROR:
+ problemString = "CAMERA_ERROR";
+ break;
+ case CAMERA_DEPRECATED_HAL:
+ problemString = "CAMERA_DEPRECATED_HAL";
+ break;
+ default:
+ problemString = "<UNKNOWN ERROR>";
+ }
+ return problemString;
+ }
+
+}
diff --git a/android/hardware/camera2/CameraCaptureSession.java b/android/hardware/camera2/CameraCaptureSession.java
new file mode 100644
index 00000000..da771e48
--- /dev/null
+++ b/android/hardware/camera2/CameraCaptureSession.java
@@ -0,0 +1,1095 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.hardware.camera2.params.OutputConfiguration;
+import android.os.Handler;
+import android.view.Surface;
+
+import java.util.List;
+
+
+/**
+ * A configured capture session for a {@link CameraDevice}, used for capturing images from the
+ * camera or reprocessing images captured from the camera in the same session previously.
+ *
+ * <p>A CameraCaptureSession is created by providing a set of target output surfaces to
+ * {@link CameraDevice#createCaptureSession createCaptureSession}, or by providing an
+ * {@link android.hardware.camera2.params.InputConfiguration} and a set of target output surfaces to
+ * {@link CameraDevice#createReprocessableCaptureSession createReprocessableCaptureSession} for a
+ * reprocessable capture session. Once created, the session is active until a new session is
+ * created by the camera device, or the camera device is closed.</p>
+ *
+ * <p>All capture sessions can be used for capturing images from the camera but only reprocessable
+ * capture sessions can reprocess images captured from the camera in the same session previously.
+ * </p>
+ *
+ * <p>Creating a session is an expensive operation and can take several hundred milliseconds, since
+ * it requires configuring the camera device's internal pipelines and allocating memory buffers for
+ * sending images to the desired targets. Therefore the setup is done asynchronously, and
+ * {@link CameraDevice#createCaptureSession createCaptureSession} and
+ * {@link CameraDevice#createReprocessableCaptureSession createReprocessableCaptureSession} will
+ * send the ready-to-use CameraCaptureSession to the provided listener's
+ * {@link CameraCaptureSession.StateCallback#onConfigured onConfigured} callback. If configuration
+ * cannot be completed, then the
+ * {@link CameraCaptureSession.StateCallback#onConfigureFailed onConfigureFailed} is called, and the
+ * session will not become active.</p>
+ *<!--
+ * <p>Any capture requests (repeating or non-repeating) submitted before the session is ready will
+ * be queued up and will begin capture once the session becomes ready. In case the session cannot be
+ * configured and {@link StateCallback#onConfigureFailed onConfigureFailed} is called, all queued
+ * capture requests are discarded.</p>
+ *-->
+ * <p>If a new session is created by the camera device, then the previous session is closed, and its
+ * associated {@link StateCallback#onClosed onClosed} callback will be invoked. All
+ * of the session methods will throw an IllegalStateException if called once the session is
+ * closed.</p>
+ *
+ * <p>A closed session clears any repeating requests (as if {@link #stopRepeating} had been called),
+ * but will still complete all of its in-progress capture requests as normal, before a newly
+ * created session takes over and reconfigures the camera device.</p>
+ */
+public abstract class CameraCaptureSession implements AutoCloseable {
+
+ /**
+ * Used to identify invalid session ID.
+ * @hide
+ */
+ public static final int SESSION_ID_NONE = -1;
+
+ /**
+ * Get the camera device that this session is created for.
+ */
+ @NonNull
+ public abstract CameraDevice getDevice();
+
+ /**
+ * <p>Pre-allocate all buffers for an output Surface.</p>
+ *
+ * <p>Normally, the image buffers for a given output Surface are allocated on-demand,
+ * to minimize startup latency and memory overhead.</p>
+ *
+ * <p>However, in some cases, it may be desirable for the buffers to be allocated before
+ * any requests targeting the Surface are actually submitted to the device. Large buffers
+ * may take some time to allocate, which can result in delays in submitting requests until
+ * sufficient buffers are allocated to reach steady-state behavior. Such delays can cause
+ * bursts to take longer than desired, or cause skips or stutters in preview output.</p>
+ *
+ * <p>The prepare() method can be used to perform this preallocation. It may only be called for
+ * a given output Surface before that Surface is used as a target for a request. The number of
+ * buffers allocated is the sum of the count needed by the consumer providing the output
+ * Surface, and the maximum number needed by the camera device to fill its pipeline. Since this
+ * may be a larger number than what is actually required for steady-state operation, using
+ * prepare may result in higher memory consumption than the normal on-demand behavior results
+ * in. Prepare() will also delay the time to first output to a given Surface, in exchange for
+ * smoother frame rate once the allocation is complete.</p>
+ *
+ * <p>For example, an application that creates an
+ * {@link android.media.ImageReader#newInstance ImageReader} with a maxImages argument of 10,
+ * but only uses 3 simultaneous Images at once would normally only cause those 3 images to be
+ * allocated (plus what is needed by the camera device for smooth operation). But using
+ * prepare() on the ImageReader Surface will result in all 10 Images being allocated. So
+ * applications using this method should take care to request only the number of buffers
+ * actually necessary for their application.</p>
+ *
+ * <p>If the same output Surface is used in consecutive sessions (without closing the first
+ * session explicitly), then its already-allocated buffers are carried over, and if it was
+ * used as a target of a capture request in the first session, prepare cannot be called on it
+ * in the second session.</p>
+ *
+ * <p>Once allocation is complete, {@link StateCallback#onSurfacePrepared} will be invoked with
+ * the Surface provided to this method. Between the prepare call and the onSurfacePrepared call,
+ * the Surface provided to prepare must not be used as a target of a CaptureRequest submitted
+ * to this session.</p>
+ *
+ * <p>Note that if 2 surfaces share the same stream via {@link
+ * OutputConfiguration#enableSurfaceSharing} and {@link OutputConfiguration#addSurface},
+ * prepare() only needs to be called on one surface, and {link
+ * StateCallback#onSurfacePrepared} will be triggered for both surfaces.</p>
+ *
+ * <p>{@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}
+ * devices cannot pre-allocate output buffers; for those devices,
+ * {@link StateCallback#onSurfacePrepared} will be immediately called, and no preallocation is
+ * done.</p>
+ *
+ * @param surface the output Surface for which buffers should be pre-allocated. Must be one of
+ * the output Surfaces used to create this session.
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ * @throws IllegalArgumentException if the Surface is invalid, not part of this Session, or has
+ * already been used as a target of a CaptureRequest in this
+ * session or immediately prior sessions.
+ *
+ * @see StateCallback#onSurfacePrepared
+ */
+ public abstract void prepare(@NonNull Surface surface) throws CameraAccessException;
+
+ /**
+ * <p>Pre-allocate at most maxCount buffers for an output Surface.</p>
+ *
+ * <p>Like the {@link #prepare(Surface)} method, this method can be used to allocate output
+ * buffers for a given Surface. However, while the {@link #prepare(Surface)} method allocates
+ * the maximum possible buffer count, this method allocates at most maxCount buffers.</p>
+ *
+ * <p>If maxCount is greater than the possible maximum count (which is the sum of the buffer
+ * count requested by the creator of the Surface and the count requested by the camera device),
+ * only the possible maximum count is allocated, in which case the function acts exactly like
+ * {@link #prepare(Surface)}.</p>
+ *
+ * <p>The restrictions on when this method can be called are the same as for
+ * {@link #prepare(Surface)}.</p>
+ *
+ * <p>Repeated calls to this method are allowed, and a mix of {@link #prepare(Surface)} and
+ * this method is also allowed. Note that after the first call to {@link #prepare(Surface)},
+ * subsequent calls to either prepare method are effectively no-ops. In addition, this method
+ * is not additive in terms of buffer count. This means calling it twice with maxCount = 2
+ * will only allocate 2 buffers, not 4 (assuming the possible maximum is at least 2); to
+ * allocate two buffers on the first call and two on the second, the application needs to call
+ * prepare with prepare(surface, 2) and prepare(surface, 4).</p>
+ *
+ * @param maxCount the buffer count to try to allocate. If this is greater than the possible
+ * maximum for this output, the possible maximum is allocated instead. If
+ * maxCount buffers are already allocated, then prepare will do nothing.
+ * @param surface the output Surface for which buffers should be pre-allocated.
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error.
+ * @throws IllegalStateException if this session is no longer active, either because the
+ * session was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ * @throws IllegalArgumentException if the Surface is invalid, not part of this Session,
+ * or has already been used as a target of a CaptureRequest in
+ * this session or immediately prior sessions without an
+ * intervening tearDown call.
+ *
+ * @hide
+ */
+ public abstract void prepare(int maxCount, @NonNull Surface surface)
+ throws CameraAccessException;
+
+ /**
+ * <p>Free all buffers allocated for an output Surface.</p>
+ *
+ * <p>Normally, once allocated, the image buffers for a given output Surface remain allocated
+ * for the lifetime of the capture session, to minimize latency of captures and to reduce
+ * memory allocation overhead.</p>
+ *
+ * <p>However, in some cases, it may be desirable for allocated buffers to be freed to reduce
+ * the application's memory consumption, if the particular output Surface will not be used by
+ * the application for some time.</p>
+ *
+ * <p>The tearDown() method can be used to perform this operation. After the call finishes, all
+ * unfilled image buffers will have been freed. Any future use of the target Surface may require
+ * allocation of additional buffers, as if the session had just been created. Buffers being
+ * held by the application (either explicitly as Image objects from ImageReader, or implicitly
+ * as the current texture in a SurfaceTexture or the current contents of a RS Allocation, will
+ * remain valid and allocated even when tearDown is invoked.</p>
+ *
+ * <p>A Surface that has had tearDown() called on it is eligible to have prepare() invoked on it
+ * again even if it was used as a request target before the tearDown() call, as long as it
+ * doesn't get used as a target of a request between the tearDown() and prepare() calls.</p>
+ *
+ * @param surface the output Surface for which buffers should be freed. Must be one of the
+ * the output Surfaces used to create this session.
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error.
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ * @throws IllegalArgumentException if the Surface is invalid, not part of this Session, or has
+ * already been used as a target of a CaptureRequest in this
+ * session or immediately prior sessions.
+ *
+ * @hide
+ */
+ public abstract void tearDown(@NonNull Surface surface) throws CameraAccessException;
+
+ /**
+ * <p>Finalize the output configurations that now have their deferred and/or extra Surfaces
+ * included.</p>
+ *
+ * <p>For camera use cases where a preview and other output configurations need to be
+ * configured, it can take some time for the preview Surface to be ready. For example, if the
+ * preview Surface is obtained from {@link android.view.SurfaceView}, the SurfaceView will only
+ * be ready after the UI layout is done, potentially delaying camera startup.</p>
+ *
+ * <p>To speed up camera startup time, the application can configure the
+ * {@link CameraCaptureSession} with the eventual preview size (via
+ * {@link OutputConfiguration#OutputConfiguration(Size,Class) a deferred OutputConfiguration}),
+ * and defer the preview output configuration until the Surface is ready. After the
+ * {@link CameraCaptureSession} is created successfully with this deferred output and other
+ * normal outputs, the application can start submitting requests as long as they do not include
+ * deferred output Surfaces. Once a deferred Surface is ready, the application can add the
+ * Surface to the deferred output configuration with the
+ * {@link OutputConfiguration#addSurface} method, and then update the deferred output
+ * configuration via this method, before it can submit capture requests with this output
+ * target.</p>
+ *
+ * <p>This function can also be called in case where multiple surfaces share the same
+ * OutputConfiguration, and one of the surfaces becomes available after the {@link
+ * CameraCaptureSession} is created. In that case, the application must first create the
+ * OutputConfiguration with the available Surface, then enable furture surface sharing via
+ * {@link OutputConfiguration#enableSurfaceSharing}, before creating the CameraCaptureSession.
+ * After the CameraCaptureSession is created, and once the extra Surface becomes available, the
+ * application must then call {@link OutputConfiguration#addSurface} before finalizing the
+ * configuration with this method.</p>
+ *
+ * <p>If the provided OutputConfigurations are unchanged from session creation, this function
+ * call has no effect. This function must only be called once for a particular output
+ * configuration. </p>
+ *
+ * <p>The output Surfaces included by this list of
+ * {@link OutputConfiguration OutputConfigurations} can be used as {@link CaptureRequest}
+ * targets as soon as this call returns.</p>
+ *
+ * <p>This method is not supported by
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}-level devices.</p>
+ *
+ * @param outputConfigs a list of {@link OutputConfiguration OutputConfigurations} that
+ * have had {@link OutputConfiguration#addSurface addSurface} invoked with a valid
+ * output Surface after {@link CameraDevice#createCaptureSessionByOutputConfigurations}.
+ * @throws CameraAccessException if the camera device is no longer connected or has encountered
+ * a fatal error.
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created, or the camera device has
+ * been closed.
+ * @throws IllegalArgumentException for invalid output configurations, including ones where the
+ * source of the Surface is no longer valid or the Surface is from a unsupported
+ * source. Or if one of the output configuration was already finished with an
+ * included surface in a prior call.
+ */
+ public abstract void finalizeOutputConfigurations(
+ List<OutputConfiguration> outputConfigs) throws CameraAccessException;
+
+ /**
+ * <p>Submit a request for an image to be captured by the camera device.</p>
+ *
+ * <p>The request defines all the parameters for capturing the single image,
+ * including sensor, lens, flash, and post-processing settings.</p>
+ *
+ * <p>Each request will produce one {@link CaptureResult} and produce new frames for one or more
+ * target Surfaces, set with the CaptureRequest builder's
+ * {@link CaptureRequest.Builder#addTarget} method. The target surfaces (set with
+ * {@link CaptureRequest.Builder#addTarget}) must be a subset of the surfaces provided when this
+ * capture session was created.</p>
+ *
+ * <p>Multiple regular and reprocess requests can be in progress at once. If there are only
+ * regular requests or reprocess requests in progress, they are processed in first-in,
+ * first-out order. If there are both regular and reprocess requests in progress, regular
+ * requests are processed in first-in, first-out order and reprocess requests are processed in
+ * first-in, first-out order, respectively. However, the processing order of a regular request
+ * and a reprocess request in progress is not specified. In other words, a regular request
+ * will always be processed before regular requets that are submitted later. A reprocess request
+ * will always be processed before reprocess requests that are submitted later. However, a
+ * regular request may not be processed before reprocess requests that are submitted later.<p>
+ *
+ * <p>Requests submitted through this method have higher priority than
+ * those submitted through {@link #setRepeatingRequest} or
+ * {@link #setRepeatingBurst}, and will be processed as soon as the current
+ * repeat/repeatBurst processing completes.</p>
+ *
+ * <p>All capture sessions can be used for capturing images from the camera but only capture
+ * sessions created by
+ * {@link CameraDevice#createReprocessableCaptureSession createReprocessableCaptureSession}
+ * can submit reprocess capture requests. Submitting a reprocess request to a regular capture
+ * session will result in an {@link IllegalArgumentException}.</p>
+ *
+ * @param request the settings for this capture
+ * @param listener The callback object to notify once this request has been
+ * processed. If null, no metadata will be produced for this capture,
+ * although image data will still be produced.
+ * @param handler the handler on which the listener should be invoked, or
+ * {@code null} to use the current thread's {@link android.os.Looper
+ * looper}.
+ *
+ * @return int A unique capture sequence ID used by
+ * {@link CaptureCallback#onCaptureSequenceCompleted}.
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ * @throws IllegalArgumentException if the request targets no Surfaces or Surfaces that are not
+ * configured as outputs for this session; or the request
+ * targets a set of Surfaces that cannot be submitted
+ * simultaneously in a reprocessable capture session; or a
+ * reprocess capture request is submitted in a
+ * non-reprocessable capture session; or the reprocess capture
+ * request was created with a {@link TotalCaptureResult} from
+ * a different session; or the capture targets a Surface in
+ * the middle of being {@link #prepare prepared}; or the
+ * handler is null, the listener is not null, and the calling
+ * thread has no looper.
+ *
+ * @see #captureBurst
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ * @see #abortCaptures
+ * @see CameraDevice#createReprocessableCaptureSession
+ */
+ public abstract int capture(@NonNull CaptureRequest request,
+ @Nullable CaptureCallback listener, @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * Submit a list of requests to be captured in sequence as a burst. The
+ * burst will be captured in the minimum amount of time possible, and will
+ * not be interleaved with requests submitted by other capture or repeat
+ * calls.
+ *
+ * <p>Regular and reprocess requests can be mixed together in a single burst. Regular requests
+ * will be captured in order and reprocess requests will be processed in order, respectively.
+ * However, the processing order between a regular request and a reprocess request is not
+ * specified. Each capture produces one {@link CaptureResult} and image buffers for one or more
+ * target {@link android.view.Surface surfaces}. The target surfaces (set with
+ * {@link CaptureRequest.Builder#addTarget}) must be a subset of the surfaces provided when
+ * this capture session was created.</p>
+ *
+ * <p>The main difference between this method and simply calling
+ * {@link #capture} repeatedly is that this method guarantees that no
+ * other requests will be interspersed with the burst.</p>
+ *
+ * <p>All capture sessions can be used for capturing images from the camera but only capture
+ * sessions created by
+ * {@link CameraDevice#createReprocessableCaptureSession createReprocessableCaptureSession}
+ * can submit reprocess capture requests. Submitting a reprocess request to a regular
+ * capture session will result in an {@link IllegalArgumentException}.</p>
+ *
+ * @param requests the list of settings for this burst capture
+ * @param listener The callback object to notify each time one of the
+ * requests in the burst has been processed. If null, no metadata will be
+ * produced for any requests in this burst, although image data will still
+ * be produced.
+ * @param handler the handler on which the listener should be invoked, or
+ * {@code null} to use the current thread's {@link android.os.Looper
+ * looper}.
+ *
+ * @return int A unique capture sequence ID used by
+ * {@link CaptureCallback#onCaptureSequenceCompleted}.
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ * @throws IllegalArgumentException If the requests target no Surfaces, or the requests target
+ * Surfaces not currently configured as outputs; or one of the
+ * requests targets a set of Surfaces that cannot be submitted
+ * simultaneously in a reprocessable capture session; or a
+ * reprocess capture request is submitted in a
+ * non-reprocessable capture session; or one of the reprocess
+ * capture requests was created with a
+ * {@link TotalCaptureResult} from a different session; or one
+ * of the captures targets a Surface in the middle of being
+ * {@link #prepare prepared}; or if the handler is null, the
+ * listener is not null, and the calling thread has no looper.
+ *
+ * @see #capture
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ * @see #abortCaptures
+ */
+ public abstract int captureBurst(@NonNull List<CaptureRequest> requests,
+ @Nullable CaptureCallback listener, @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * Request endlessly repeating capture of images by this capture session.
+ *
+ * <p>With this method, the camera device will continually capture images
+ * using the settings in the provided {@link CaptureRequest}, at the maximum
+ * rate possible.</p>
+ *
+ * <p>Repeating requests are a simple way for an application to maintain a
+ * preview or other continuous stream of frames, without having to
+ * continually submit identical requests through {@link #capture}.</p>
+ *
+ * <p>Repeat requests have lower priority than those submitted
+ * through {@link #capture} or {@link #captureBurst}, so if
+ * {@link #capture} is called when a repeating request is active, the
+ * capture request will be processed before any further repeating
+ * requests are processed.<p>
+ *
+ * <p>To stop the repeating capture, call {@link #stopRepeating}. Calling
+ * {@link #abortCaptures} will also clear the request.</p>
+ *
+ * <p>Calling this method will replace any earlier repeating request or
+ * burst set up by this method or {@link #setRepeatingBurst}, although any
+ * in-progress burst will be completed before the new repeat request will be
+ * used.</p>
+ *
+ * <p>This method does not support reprocess capture requests because each reprocess
+ * {@link CaptureRequest} must be created from the {@link TotalCaptureResult} that matches
+ * the input image to be reprocessed. This is either the {@link TotalCaptureResult} of capture
+ * that is sent for reprocessing, or one of the {@link TotalCaptureResult TotalCaptureResults}
+ * of a set of captures, when data from the whole set is combined by the application into a
+ * single reprocess input image. The request must be capturing images from the camera. If a
+ * reprocess capture request is submitted, this method will throw IllegalArgumentException.</p>
+ *
+ * @param request the request to repeat indefinitely
+ * @param listener The callback object to notify every time the
+ * request finishes processing. If null, no metadata will be
+ * produced for this stream of requests, although image data will
+ * still be produced.
+ * @param handler the handler on which the listener should be invoked, or
+ * {@code null} to use the current thread's {@link android.os.Looper
+ * looper}.
+ *
+ * @return int A unique capture sequence ID used by
+ * {@link CaptureCallback#onCaptureSequenceCompleted}.
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ * @throws IllegalArgumentException If the request references no Surfaces or references Surfaces
+ * that are not currently configured as outputs; or the request
+ * is a reprocess capture request; or the capture targets a
+ * Surface in the middle of being {@link #prepare prepared}; or
+ * the handler is null, the listener is not null, and the
+ * calling thread has no looper; or no requests were passed in.
+ *
+ * @see #capture
+ * @see #captureBurst
+ * @see #setRepeatingBurst
+ * @see #stopRepeating
+ * @see #abortCaptures
+ */
+ public abstract int setRepeatingRequest(@NonNull CaptureRequest request,
+ @Nullable CaptureCallback listener, @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * <p>Request endlessly repeating capture of a sequence of images by this
+ * capture session.</p>
+ *
+ * <p>With this method, the camera device will continually capture images,
+ * cycling through the settings in the provided list of
+ * {@link CaptureRequest CaptureRequests}, at the maximum rate possible.</p>
+ *
+ * <p>If a request is submitted through {@link #capture} or
+ * {@link #captureBurst}, the current repetition of the request list will be
+ * completed before the higher-priority request is handled. This guarantees
+ * that the application always receives a complete repeat burst captured in
+ * minimal time, instead of bursts interleaved with higher-priority
+ * captures, or incomplete captures.</p>
+ *
+ * <p>Repeating burst requests are a simple way for an application to
+ * maintain a preview or other continuous stream of frames where each
+ * request is different in a predicatable way, without having to continually
+ * submit requests through {@link #captureBurst}.</p>
+ *
+ * <p>To stop the repeating capture, call {@link #stopRepeating}. Any
+ * ongoing burst will still be completed, however. Calling
+ * {@link #abortCaptures} will also clear the request.</p>
+ *
+ * <p>Calling this method will replace a previously-set repeating request or
+ * burst set up by this method or {@link #setRepeatingRequest}, although any
+ * in-progress burst will be completed before the new repeat burst will be
+ * used.</p>
+ *
+ * <p>This method does not support reprocess capture requests because each reprocess
+ * {@link CaptureRequest} must be created from the {@link TotalCaptureResult} that matches
+ * the input image to be reprocessed. This is either the {@link TotalCaptureResult} of capture
+ * that is sent for reprocessing, or one of the {@link TotalCaptureResult TotalCaptureResults}
+ * of a set of captures, when data from the whole set is combined by the application into a
+ * single reprocess input image. The request must be capturing images from the camera. If a
+ * reprocess capture request is submitted, this method will throw IllegalArgumentException.</p>
+ *
+ * @param requests the list of requests to cycle through indefinitely
+ * @param listener The callback object to notify each time one of the
+ * requests in the repeating bursts has finished processing. If null, no
+ * metadata will be produced for this stream of requests, although image
+ * data will still be produced.
+ * @param handler the handler on which the listener should be invoked, or
+ * {@code null} to use the current thread's {@link android.os.Looper
+ * looper}.
+ *
+ * @return int A unique capture sequence ID used by
+ * {@link CaptureCallback#onCaptureSequenceCompleted}.
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ * @throws IllegalArgumentException If the requests reference no Surfaces or reference Surfaces
+ * not currently configured as outputs; or one of the requests
+ * is a reprocess capture request; or one of the captures
+ * targets a Surface in the middle of being
+ * {@link #prepare prepared}; or the handler is null, the
+ * listener is not null, and the calling thread has no looper;
+ * or no requests were passed in.
+ *
+ * @see #capture
+ * @see #captureBurst
+ * @see #setRepeatingRequest
+ * @see #stopRepeating
+ * @see #abortCaptures
+ */
+ public abstract int setRepeatingBurst(@NonNull List<CaptureRequest> requests,
+ @Nullable CaptureCallback listener, @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * <p>Cancel any ongoing repeating capture set by either
+ * {@link #setRepeatingRequest setRepeatingRequest} or
+ * {@link #setRepeatingBurst}. Has no effect on requests submitted through
+ * {@link #capture capture} or {@link #captureBurst captureBurst}.</p>
+ *
+ * <p>Any currently in-flight captures will still complete, as will any burst that is
+ * mid-capture. To ensure that the device has finished processing all of its capture requests
+ * and is in ready state, wait for the {@link StateCallback#onReady} callback after
+ * calling this method.</p>
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ *
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ * @see StateCallback#onIdle
+ */
+ public abstract void stopRepeating() throws CameraAccessException;
+
+ /**
+ * Discard all captures currently pending and in-progress as fast as possible.
+ *
+ * <p>The camera device will discard all of its current work as fast as possible. Some in-flight
+ * captures may complete successfully and call {@link CaptureCallback#onCaptureCompleted}, while
+ * others will trigger their {@link CaptureCallback#onCaptureFailed} callbacks. If a repeating
+ * request or a repeating burst is set, it will be cleared.</p>
+ *
+ * <p>This method is the fastest way to switch the camera device to a new session with
+ * {@link CameraDevice#createCaptureSession} or
+ * {@link CameraDevice#createReprocessableCaptureSession}, at the cost of discarding in-progress
+ * work. It must be called before the new session is created. Once all pending requests are
+ * either completed or thrown away, the {@link StateCallback#onReady} callback will be called,
+ * if the session has not been closed. Otherwise, the {@link StateCallback#onClosed}
+ * callback will be fired when a new session is created by the camera device.</p>
+ *
+ * <p>Cancelling will introduce at least a brief pause in the stream of data from the camera
+ * device, since once the camera device is emptied, the first new request has to make it through
+ * the entire camera pipeline before new output buffers are produced.</p>
+ *
+ * <p>This means that using {@code abortCaptures()} to simply remove pending requests is not
+ * recommended; it's best used for quickly switching output configurations, or for cancelling
+ * long in-progress requests (such as a multi-second capture).</p>
+ *
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if this session is no longer active, either because the session
+ * was explicitly closed, a new session has been created
+ * or the camera device has been closed.
+ *
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ * @see CameraDevice#createCaptureSession
+ * @see CameraDevice#createReprocessableCaptureSession
+ */
+ public abstract void abortCaptures() throws CameraAccessException;
+
+ /**
+ * Return if the application can submit reprocess capture requests with this camera capture
+ * session.
+ *
+ * @return {@code true} if the application can submit reprocess capture requests with this
+ * camera capture session. {@code false} otherwise.
+ *
+ * @see CameraDevice#createReprocessableCaptureSession
+ */
+ public abstract boolean isReprocessable();
+
+ /**
+ * Get the input Surface associated with a reprocessable capture session.
+ *
+ * <p>Each reprocessable capture session has an input {@link Surface} where the reprocess
+ * capture requests get the input images from, rather than the camera device. The application
+ * can create a {@link android.media.ImageWriter ImageWriter} with this input {@link Surface}
+ * and use it to provide input images for reprocess capture requests. When the reprocessable
+ * capture session is closed, the input {@link Surface} is abandoned and becomes invalid.</p>
+ *
+ * @return The {@link Surface} where reprocessing capture requests get the input images from. If
+ * this is not a reprocess capture session, {@code null} will be returned.
+ *
+ * @see CameraDevice#createReprocessableCaptureSession
+ * @see android.media.ImageWriter
+ * @see android.media.ImageReader
+ */
+ @Nullable
+ public abstract Surface getInputSurface();
+
+ /**
+ * Close this capture session asynchronously.
+ *
+ * <p>Closing a session frees up the target output Surfaces of the session for reuse with either
+ * a new session, or to other APIs that can draw to Surfaces.</p>
+ *
+ * <p>Note that creating a new capture session with {@link CameraDevice#createCaptureSession}
+ * will close any existing capture session automatically, and call the older session listener's
+ * {@link StateCallback#onClosed} callback. Using {@link CameraDevice#createCaptureSession}
+ * directly without closing is the recommended approach for quickly switching to a new session,
+ * since unchanged target outputs can be reused more efficiently.</p>
+ *
+ * <p>Once a session is closed, all methods on it will throw an IllegalStateException, and any
+ * repeating requests or bursts are stopped (as if {@link #stopRepeating()} was called).
+ * However, any in-progress capture requests submitted to the session will be completed as
+ * normal; once all captures have completed and the session has been torn down,
+ * {@link StateCallback#onClosed} will be called.</p>
+ *
+ * <p>Closing a session is idempotent; closing more than once has no effect.</p>
+ */
+ @Override
+ public abstract void close();
+
+ /**
+ * A callback object for receiving updates about the state of a camera capture session.
+ *
+ */
+ public static abstract class StateCallback {
+
+ /**
+ * This method is called when the camera device has finished configuring itself, and the
+ * session can start processing capture requests.
+ *
+ * <p>If there are capture requests already queued with the session, they will start
+ * processing once this callback is invoked, and the session will call {@link #onActive}
+ * right after this callback is invoked.</p>
+ *
+ * <p>If no capture requests have been submitted, then the session will invoke
+ * {@link #onReady} right after this callback.</p>
+ *
+ * <p>If the camera device configuration fails, then {@link #onConfigureFailed} will
+ * be invoked instead of this callback.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ */
+ public abstract void onConfigured(@NonNull CameraCaptureSession session);
+
+ /**
+ * This method is called if the session cannot be configured as requested.
+ *
+ * <p>This can happen if the set of requested outputs contains unsupported sizes,
+ * or too many outputs are requested at once.</p>
+ *
+ * <p>The session is considered to be closed, and all methods called on it after this
+ * callback is invoked will throw an IllegalStateException. Any capture requests submitted
+ * to the session prior to this callback will be discarded and will not produce any
+ * callbacks on their listeners.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ */
+ public abstract void onConfigureFailed(@NonNull CameraCaptureSession session);
+
+ /**
+ * This method is called every time the session has no more capture requests to process.
+ *
+ * <p>During the creation of a new session, this callback is invoked right after
+ * {@link #onConfigured} if no capture requests were submitted to the session prior to it
+ * completing configuration.</p>
+ *
+ * <p>Otherwise, this callback will be invoked any time the session finishes processing
+ * all of its active capture requests, and no repeating request or burst is set up.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ *
+ */
+ public void onReady(@NonNull CameraCaptureSession session) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called when the session starts actively processing capture requests.
+ *
+ * <p>If capture requests are submitted prior to {@link #onConfigured} being called,
+ * then the session will start processing those requests immediately after the callback,
+ * and this method will be immediately called after {@link #onConfigured}.
+ *
+ * <p>If the session runs out of capture requests to process and calls {@link #onReady},
+ * then this callback will be invoked again once new requests are submitted for capture.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ */
+ public void onActive(@NonNull CameraCaptureSession session) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called when camera device's input capture queue becomes empty,
+ * and is ready to accept the next request.
+ *
+ * <p>Pending capture requests exist in one of two queues: the in-flight queue where requests
+ * are already in different stages of processing pipeline, and an input queue where requests
+ * wait to enter the in-flight queue. The input queue is needed because more requests may be
+ * submitted than the current camera device pipeline depth.</p>
+ *
+ * <p>This callback is fired when the input queue becomes empty, and the camera device may
+ * have to fall back to the repeating request if set, or completely skip the next frame from
+ * the sensor. This can cause glitches to camera preview output, for example. This callback
+ * will only fire after requests queued by capture() or captureBurst(), not after a
+ * repeating request or burst enters the in-flight queue. For example, in the common case
+ * of a repeating request and a single-shot JPEG capture, this callback only fires when the
+ * JPEG request has entered the in-flight queue for capture.</p>
+ *
+ * <p>By only sending a new {@link #capture} or {@link #captureBurst} when the input
+ * queue is empty, pipeline latency can be minimized.</p>
+ *
+ * <p>This callback is not fired when the session is first created. It is different from
+ * {@link #onReady}, which is fired when all requests in both queues have been processed.</p>
+ *
+ * @param session
+ * The session returned by {@link CameraDevice#createCaptureSession}
+ */
+ public void onCaptureQueueEmpty(@NonNull CameraCaptureSession session) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called when the session is closed.
+ *
+ * <p>A session is closed when a new session is created by the parent camera device,
+ * or when the parent camera device is closed (either by the user closing the device,
+ * or due to a camera device disconnection or fatal error).</p>
+ *
+ * <p>Once a session is closed, all methods on it will throw an IllegalStateException, and
+ * any repeating requests or bursts are stopped (as if {@link #stopRepeating()} was called).
+ * However, any in-progress capture requests submitted to the session will be completed
+ * as normal.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ */
+ public void onClosed(@NonNull CameraCaptureSession session) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called when the buffer pre-allocation for an output Surface is complete.
+ *
+ * <p>Buffer pre-allocation for an output Surface is started by the {@link #prepare} call.
+ * While allocation is underway, the Surface must not be used as a capture target.
+ * Once this callback fires, the output Surface provided can again be used as a target for
+ * a capture request.</p>
+ *
+ * <p>In case of a error during pre-allocation (such as running out of suitable memory),
+ * this callback is still invoked after the error is encountered, though some buffers may
+ * not have been successfully pre-allocated.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ * @param surface the Surface that was used with the {@link #prepare} call.
+ */
+ public void onSurfacePrepared(@NonNull CameraCaptureSession session,
+ @NonNull Surface surface) {
+ // default empty implementation
+ }
+ }
+
+ /**
+ * <p>A callback object for tracking the progress of a {@link CaptureRequest} submitted to the
+ * camera device.</p>
+ *
+ * <p>This callback is invoked when a request triggers a capture to start,
+ * and when the capture is complete. In case on an error capturing an image,
+ * the error method is triggered instead of the completion method.</p>
+ *
+ * @see #capture
+ * @see #captureBurst
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ */
+ public static abstract class CaptureCallback {
+
+ /**
+ * This constant is used to indicate that no images were captured for
+ * the request.
+ *
+ * @hide
+ */
+ public static final int NO_FRAMES_CAPTURED = -1;
+
+ /**
+ * This method is called when the camera device has started capturing
+ * the output image for the request, at the beginning of image exposure, or
+ * when the camera device has started processing an input image for a reprocess
+ * request.
+ *
+ * <p>For a regular capture request, this callback is invoked right as
+ * the capture of a frame begins, so it is the most appropriate time
+ * for playing a shutter sound, or triggering UI indicators of capture.</p>
+ *
+ * <p>The request that is being used for this capture is provided, along
+ * with the actual timestamp for the start of exposure. For a reprocess
+ * request, this timestamp will be the input image's start of exposure
+ * which matches {@link CaptureResult#SENSOR_TIMESTAMP the result timestamp field}
+ * of the {@link TotalCaptureResult} that was used to
+ * {@link CameraDevice#createReprocessCaptureRequest create the reprocess request}.
+ * This timestamp matches the timestamps that will be
+ * included in {@link CaptureResult#SENSOR_TIMESTAMP the result timestamp field},
+ * and in the buffers sent to each output Surface. These buffer
+ * timestamps are accessible through, for example,
+ * {@link android.media.Image#getTimestamp() Image.getTimestamp()} or
+ * {@link android.graphics.SurfaceTexture#getTimestamp()}.
+ * The frame number included is equal to the frame number that will be included in
+ * {@link CaptureResult#getFrameNumber}.</p>
+ *
+ * <p>For the simplest way to play a shutter sound camera shutter or a
+ * video recording start/stop sound, see the
+ * {@link android.media.MediaActionSound} class.</p>
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ * @param request the request for the capture that just begun
+ * @param timestamp the timestamp at start of capture for a regular request, or
+ * the timestamp at the input image's start of capture for a
+ * reprocess request, in nanoseconds.
+ * @param frameNumber the frame number for this capture
+ *
+ * @see android.media.MediaActionSound
+ */
+ public void onCaptureStarted(@NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request, long timestamp, long frameNumber) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called when some results from an image capture are
+ * available.
+ *
+ * <p>The result provided here will contain some subset of the fields of
+ * a full result. Multiple onCapturePartial calls may happen per
+ * capture; a given result field will only be present in one partial
+ * capture at most. The final onCaptureCompleted call will always
+ * contain all the fields, whether onCapturePartial was called or
+ * not.</p>
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ * @param request The request that was given to the CameraDevice
+ * @param result The partial output metadata from the capture, which
+ * includes a subset of the CaptureResult fields.
+ *
+ * @see #capture
+ * @see #captureBurst
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ *
+ * @hide
+ */
+ public void onCapturePartial(CameraCaptureSession session,
+ CaptureRequest request, CaptureResult result) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called when an image capture makes partial forward progress; some
+ * (but not all) results from an image capture are available.
+ *
+ * <p>The result provided here will contain some subset of the fields of
+ * a full result. Multiple {@link #onCaptureProgressed} calls may happen per
+ * capture; a given result field will only be present in one partial
+ * capture at most. The final {@link #onCaptureCompleted} call will always
+ * contain all the fields (in particular, the union of all the fields of all
+ * the partial results composing the total result).</p>
+ *
+ * <p>For each request, some result data might be available earlier than others. The typical
+ * delay between each partial result (per request) is a single frame interval.
+ * For performance-oriented use-cases, applications should query the metadata they need
+ * to make forward progress from the partial results and avoid waiting for the completed
+ * result.</p>
+ *
+ * <p>For a particular request, {@link #onCaptureProgressed} may happen before or after
+ * {@link #onCaptureStarted}.</p>
+ *
+ * <p>Each request will generate at least {@code 1} partial results, and at most
+ * {@link CameraCharacteristics#REQUEST_PARTIAL_RESULT_COUNT} partial results.</p>
+ *
+ * <p>Depending on the request settings, the number of partial results per request
+ * will vary, although typically the partial count could be the same as long as the
+ * camera device subsystems enabled stay the same.</p>
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ * @param request The request that was given to the CameraDevice
+ * @param partialResult The partial output metadata from the capture, which
+ * includes a subset of the {@link TotalCaptureResult} fields.
+ *
+ * @see #capture
+ * @see #captureBurst
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ */
+ public void onCaptureProgressed(@NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request, @NonNull CaptureResult partialResult) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called when an image capture has fully completed and all the
+ * result metadata is available.
+ *
+ * <p>This callback will always fire after the last {@link #onCaptureProgressed};
+ * in other words, no more partial results will be delivered once the completed result
+ * is available.</p>
+ *
+ * <p>For performance-intensive use-cases where latency is a factor, consider
+ * using {@link #onCaptureProgressed} instead.</p>
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param session the session returned by {@link CameraDevice#createCaptureSession}
+ * @param request The request that was given to the CameraDevice
+ * @param result The total output metadata from the capture, including the
+ * final capture parameters and the state of the camera system during
+ * capture.
+ *
+ * @see #capture
+ * @see #captureBurst
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ */
+ public void onCaptureCompleted(@NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called instead of {@link #onCaptureCompleted} when the
+ * camera device failed to produce a {@link CaptureResult} for the
+ * request.
+ *
+ * <p>Other requests are unaffected, and some or all image buffers from
+ * the capture may have been pushed to their respective output
+ * streams.</p>
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param session
+ * The session returned by {@link CameraDevice#createCaptureSession}
+ * @param request
+ * The request that was given to the CameraDevice
+ * @param failure
+ * The output failure from the capture, including the failure reason
+ * and the frame number.
+ *
+ * @see #capture
+ * @see #captureBurst
+ * @see #setRepeatingRequest
+ * @see #setRepeatingBurst
+ */
+ public void onCaptureFailed(@NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called independently of the others in CaptureCallback,
+ * when a capture sequence finishes and all {@link CaptureResult}
+ * or {@link CaptureFailure} for it have been returned via this listener.
+ *
+ * <p>In total, there will be at least one result/failure returned by this listener
+ * before this callback is invoked. If the capture sequence is aborted before any
+ * requests have been processed, {@link #onCaptureSequenceAborted} is invoked instead.</p>
+ *
+ * <p>The default implementation does nothing.</p>
+ *
+ * @param session
+ * The session returned by {@link CameraDevice#createCaptureSession}
+ * @param sequenceId
+ * A sequence ID returned by the {@link #capture} family of functions.
+ * @param frameNumber
+ * The last frame number (returned by {@link CaptureResult#getFrameNumber}
+ * or {@link CaptureFailure#getFrameNumber}) in the capture sequence.
+ *
+ * @see CaptureResult#getFrameNumber()
+ * @see CaptureFailure#getFrameNumber()
+ * @see CaptureResult#getSequenceId()
+ * @see CaptureFailure#getSequenceId()
+ * @see #onCaptureSequenceAborted
+ */
+ public void onCaptureSequenceCompleted(@NonNull CameraCaptureSession session,
+ int sequenceId, long frameNumber) {
+ // default empty implementation
+ }
+
+ /**
+ * This method is called independently of the others in CaptureCallback,
+ * when a capture sequence aborts before any {@link CaptureResult}
+ * or {@link CaptureFailure} for it have been returned via this listener.
+ *
+ * <p>Due to the asynchronous nature of the camera device, not all submitted captures
+ * are immediately processed. It is possible to clear out the pending requests
+ * by a variety of operations such as {@link CameraCaptureSession#stopRepeating} or
+ * {@link CameraCaptureSession#abortCaptures}. When such an event happens,
+ * {@link #onCaptureSequenceCompleted} will not be called.</p>
+ *
+ * <p>The default implementation does nothing.</p>
+ *
+ * @param session
+ * The session returned by {@link CameraDevice#createCaptureSession}
+ * @param sequenceId
+ * A sequence ID returned by the {@link #capture} family of functions.
+ *
+ * @see CaptureResult#getFrameNumber()
+ * @see CaptureFailure#getFrameNumber()
+ * @see CaptureResult#getSequenceId()
+ * @see CaptureFailure#getSequenceId()
+ * @see #onCaptureSequenceCompleted
+ */
+ public void onCaptureSequenceAborted(@NonNull CameraCaptureSession session,
+ int sequenceId) {
+ // default empty implementation
+ }
+
+ /**
+ * <p>This method is called if a single buffer for a capture could not be sent to its
+ * destination surface.</p>
+ *
+ * <p>If the whole capture failed, then {@link #onCaptureFailed} will be called instead. If
+ * some but not all buffers were captured but the result metadata will not be available,
+ * then onCaptureFailed will be invoked with {@link CaptureFailure#wasImageCaptured}
+ * returning true, along with one or more calls to {@link #onCaptureBufferLost} for the
+ * failed outputs.</p>
+ *
+ * @param session
+ * The session returned by {@link CameraDevice#createCaptureSession}
+ * @param request
+ * The request that was given to the CameraDevice
+ * @param target
+ * The target Surface that the buffer will not be produced for
+ * @param frameNumber
+ * The frame number for the request
+ */
+ public void onCaptureBufferLost(@NonNull CameraCaptureSession session,
+ @NonNull CaptureRequest request, @NonNull Surface target, long frameNumber) {
+ // default empty implementation
+ }
+ }
+
+}
diff --git a/android/hardware/camera2/CameraCharacteristics.java b/android/hardware/camera2/CameraCharacteristics.java
new file mode 100644
index 00000000..46ad3f0e
--- /dev/null
+++ b/android/hardware/camera2/CameraCharacteristics.java
@@ -0,0 +1,3042 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.impl.PublicKey;
+import android.hardware.camera2.impl.SyntheticKey;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.Rational;
+
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * <p>The properties describing a
+ * {@link CameraDevice CameraDevice}.</p>
+ *
+ * <p>These properties are fixed for a given CameraDevice, and can be queried
+ * through the {@link CameraManager CameraManager}
+ * interface with {@link CameraManager#getCameraCharacteristics}.</p>
+ *
+ * <p>{@link CameraCharacteristics} objects are immutable.</p>
+ *
+ * @see CameraDevice
+ * @see CameraManager
+ */
+public final class CameraCharacteristics extends CameraMetadata<CameraCharacteristics.Key<?>> {
+
+ /**
+ * A {@code Key} is used to do camera characteristics field lookups with
+ * {@link CameraCharacteristics#get}.
+ *
+ * <p>For example, to get the stream configuration map:
+ * <code><pre>
+ * StreamConfigurationMap map = cameraCharacteristics.get(
+ * CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ * </pre></code>
+ * </p>
+ *
+ * <p>To enumerate over all possible keys for {@link CameraCharacteristics}, see
+ * {@link CameraCharacteristics#getKeys()}.</p>
+ *
+ * @see CameraCharacteristics#get
+ * @see CameraCharacteristics#getKeys()
+ */
+ public static final class Key<T> {
+ private final CameraMetadataNative.Key<T> mKey;
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, Class<T> type, long vendorId) {
+ mKey = new CameraMetadataNative.Key<T>(name, type, vendorId);
+ }
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, Class<T> type) {
+ mKey = new CameraMetadataNative.Key<T>(name, type);
+ }
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, TypeReference<T> typeReference) {
+ mKey = new CameraMetadataNative.Key<T>(name, typeReference);
+ }
+
+ /**
+ * Return a camelCase, period separated name formatted like:
+ * {@code "root.section[.subsections].name"}.
+ *
+ * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."};
+ * keys that are device/platform-specific are prefixed with {@code "com."}.</p>
+ *
+ * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would
+ * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device
+ * specific key might look like {@code "com.google.nexus.data.private"}.</p>
+ *
+ * @return String representation of the key name
+ */
+ @NonNull
+ public String getName() {
+ return mKey.getName();
+ }
+
+ /**
+ * Return vendor tag id.
+ *
+ * @hide
+ */
+ public long getVendorId() {
+ return mKey.getVendorId();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public final int hashCode() {
+ return mKey.hashCode();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ public final boolean equals(Object o) {
+ return o instanceof Key && ((Key<T>)o).mKey.equals(mKey);
+ }
+
+ /**
+ * Return this {@link Key} as a string representation.
+ *
+ * <p>{@code "CameraCharacteristics.Key(%s)"}, where {@code %s} represents
+ * the name of this key as returned by {@link #getName}.</p>
+ *
+ * @return string representation of {@link Key}
+ */
+ @NonNull
+ @Override
+ public String toString() {
+ return String.format("CameraCharacteristics.Key(%s)", mKey.getName());
+ }
+
+ /**
+ * Visible for CameraMetadataNative implementation only; do not use.
+ *
+ * TODO: Make this private or remove it altogether.
+ *
+ * @hide
+ */
+ public CameraMetadataNative.Key<T> getNativeKey() {
+ return mKey;
+ }
+
+ @SuppressWarnings({
+ "unused", "unchecked"
+ })
+ private Key(CameraMetadataNative.Key<?> nativeKey) {
+ mKey = (CameraMetadataNative.Key<T>) nativeKey;
+ }
+ }
+
+ private final CameraMetadataNative mProperties;
+ private List<CameraCharacteristics.Key<?>> mKeys;
+ private List<CaptureRequest.Key<?>> mAvailableRequestKeys;
+ private List<CaptureResult.Key<?>> mAvailableResultKeys;
+
+ /**
+ * Takes ownership of the passed-in properties object
+ * @hide
+ */
+ public CameraCharacteristics(CameraMetadataNative properties) {
+ mProperties = CameraMetadataNative.move(properties);
+ setNativeInstance(mProperties);
+ }
+
+ /**
+ * Returns a copy of the underlying {@link CameraMetadataNative}.
+ * @hide
+ */
+ public CameraMetadataNative getNativeCopy() {
+ return new CameraMetadataNative(mProperties);
+ }
+
+ /**
+ * Get a camera characteristics field value.
+ *
+ * <p>The field definitions can be
+ * found in {@link CameraCharacteristics}.</p>
+ *
+ * <p>Querying the value for the same key more than once will return a value
+ * which is equal to the previous queried value.</p>
+ *
+ * @throws IllegalArgumentException if the key was not valid
+ *
+ * @param key The characteristics field to read.
+ * @return The value of that key, or {@code null} if the field is not set.
+ */
+ @Nullable
+ public <T> T get(Key<T> key) {
+ return mProperties.get(key);
+ }
+
+ /**
+ * {@inheritDoc}
+ * @hide
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ protected <T> T getProtected(Key<?> key) {
+ return (T) mProperties.get(key);
+ }
+
+ /**
+ * {@inheritDoc}
+ * @hide
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ protected Class<Key<?>> getKeyClass() {
+ Object thisClass = Key.class;
+ return (Class<Key<?>>)thisClass;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @NonNull
+ @Override
+ public List<Key<?>> getKeys() {
+ // List of keys is immutable; cache the results after we calculate them
+ if (mKeys != null) {
+ return mKeys;
+ }
+
+ int[] filterTags = get(REQUEST_AVAILABLE_CHARACTERISTICS_KEYS);
+ if (filterTags == null) {
+ throw new AssertionError("android.request.availableCharacteristicsKeys must be non-null"
+ + " in the characteristics");
+ }
+
+ mKeys = Collections.unmodifiableList(
+ getKeys(getClass(), getKeyClass(), this, filterTags));
+ return mKeys;
+ }
+
+ /**
+ * Returns the list of keys supported by this {@link CameraDevice} for querying
+ * with a {@link CaptureRequest}.
+ *
+ * <p>The list returned is not modifiable, so any attempts to modify it will throw
+ * a {@code UnsupportedOperationException}.</p>
+ *
+ * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
+ *
+ * <p>Note that there is no {@code getAvailableCameraCharacteristicsKeys()} -- use
+ * {@link #getKeys()} instead.</p>
+ *
+ * @return List of keys supported by this CameraDevice for CaptureRequests.
+ */
+ @SuppressWarnings({"unchecked"})
+ @NonNull
+ public List<CaptureRequest.Key<?>> getAvailableCaptureRequestKeys() {
+ if (mAvailableRequestKeys == null) {
+ Object crKey = CaptureRequest.Key.class;
+ Class<CaptureRequest.Key<?>> crKeyTyped = (Class<CaptureRequest.Key<?>>)crKey;
+
+ int[] filterTags = get(REQUEST_AVAILABLE_REQUEST_KEYS);
+ if (filterTags == null) {
+ throw new AssertionError("android.request.availableRequestKeys must be non-null "
+ + "in the characteristics");
+ }
+ mAvailableRequestKeys =
+ getAvailableKeyList(CaptureRequest.class, crKeyTyped, filterTags);
+ }
+ return mAvailableRequestKeys;
+ }
+
+ /**
+ * Returns the list of keys supported by this {@link CameraDevice} for querying
+ * with a {@link CaptureResult}.
+ *
+ * <p>The list returned is not modifiable, so any attempts to modify it will throw
+ * a {@code UnsupportedOperationException}.</p>
+ *
+ * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
+ *
+ * <p>Note that there is no {@code getAvailableCameraCharacteristicsKeys()} -- use
+ * {@link #getKeys()} instead.</p>
+ *
+ * @return List of keys supported by this CameraDevice for CaptureResults.
+ */
+ @SuppressWarnings({"unchecked"})
+ @NonNull
+ public List<CaptureResult.Key<?>> getAvailableCaptureResultKeys() {
+ if (mAvailableResultKeys == null) {
+ Object crKey = CaptureResult.Key.class;
+ Class<CaptureResult.Key<?>> crKeyTyped = (Class<CaptureResult.Key<?>>)crKey;
+
+ int[] filterTags = get(REQUEST_AVAILABLE_RESULT_KEYS);
+ if (filterTags == null) {
+ throw new AssertionError("android.request.availableResultKeys must be non-null "
+ + "in the characteristics");
+ }
+ mAvailableResultKeys = getAvailableKeyList(CaptureResult.class, crKeyTyped, filterTags);
+ }
+ return mAvailableResultKeys;
+ }
+
+ /**
+ * Returns the list of keys supported by this {@link CameraDevice} by metadataClass.
+ *
+ * <p>The list returned is not modifiable, so any attempts to modify it will throw
+ * a {@code UnsupportedOperationException}.</p>
+ *
+ * <p>Each key is only listed once in the list. The order of the keys is undefined.</p>
+ *
+ * @param metadataClass The subclass of CameraMetadata that you want to get the keys for.
+ * @param keyClass The class of the metadata key, e.g. CaptureRequest.Key.class
+ *
+ * @return List of keys supported by this CameraDevice for metadataClass.
+ *
+ * @throws IllegalArgumentException if metadataClass is not a subclass of CameraMetadata
+ */
+ private <TKey> List<TKey>
+ getAvailableKeyList(Class<?> metadataClass, Class<TKey> keyClass, int[] filterTags) {
+
+ if (metadataClass.equals(CameraMetadata.class)) {
+ throw new AssertionError(
+ "metadataClass must be a strict subclass of CameraMetadata");
+ } else if (!CameraMetadata.class.isAssignableFrom(metadataClass)) {
+ throw new AssertionError(
+ "metadataClass must be a subclass of CameraMetadata");
+ }
+
+ List<TKey> staticKeyList = getKeys(
+ metadataClass, keyClass, /*instance*/null, filterTags);
+ return Collections.unmodifiableList(staticKeyList);
+ }
+
+ /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+ * The key entries below this point are generated from metadata
+ * definitions in /system/media/camera/docs. Do not modify by hand or
+ * modify the comment blocks at the start or end.
+ *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
+
+ /**
+ * <p>List of aberration correction modes for {@link CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE android.colorCorrection.aberrationMode} that are
+ * supported by this camera device.</p>
+ * <p>This key lists the valid modes for {@link CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE android.colorCorrection.aberrationMode}. If no
+ * aberration correction modes are available for a device, this list will solely include
+ * OFF mode. All camera devices will support either OFF or FAST mode.</p>
+ * <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always list
+ * OFF mode. This includes all FULL level devices.</p>
+ * <p>LEGACY devices will always only support FAST mode.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE android.colorCorrection.aberrationMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES =
+ new Key<int[]>("android.colorCorrection.availableAberrationModes", int[].class);
+
+ /**
+ * <p>List of auto-exposure antibanding modes for {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} that are
+ * supported by this camera device.</p>
+ * <p>Not all of the auto-exposure anti-banding modes may be
+ * supported by a given camera device. This field lists the
+ * valid anti-banding modes that the application may request
+ * for this camera device with the
+ * {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} control.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> CONTROL_AE_AVAILABLE_ANTIBANDING_MODES =
+ new Key<int[]>("android.control.aeAvailableAntibandingModes", int[].class);
+
+ /**
+ * <p>List of auto-exposure modes for {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} that are supported by this camera
+ * device.</p>
+ * <p>Not all the auto-exposure modes may be supported by a
+ * given camera device, especially if no flash unit is
+ * available. This entry lists the valid modes for
+ * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} for this camera device.</p>
+ * <p>All camera devices support ON, and all camera devices with flash
+ * units support ON_AUTO_FLASH and ON_ALWAYS_FLASH.</p>
+ * <p>FULL mode camera devices always support OFF mode,
+ * which enables application control of camera exposure time,
+ * sensitivity, and frame duration.</p>
+ * <p>LEGACY mode camera devices never support OFF mode.
+ * LIMITED mode devices support OFF if they support the MANUAL_SENSOR
+ * capability.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> CONTROL_AE_AVAILABLE_MODES =
+ new Key<int[]>("android.control.aeAvailableModes", int[].class);
+
+ /**
+ * <p>List of frame rate ranges for {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange} supported by
+ * this camera device.</p>
+ * <p>For devices at the LEGACY level or above:</p>
+ * <ul>
+ * <li>
+ * <p>For constant-framerate recording, for each normal
+ * {@link android.media.CamcorderProfile CamcorderProfile}, that is, a
+ * {@link android.media.CamcorderProfile CamcorderProfile} that has
+ * {@link android.media.CamcorderProfile#quality quality} in
+ * the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW},
+ * {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is
+ * supported by the device and has
+ * {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} <code>x</code>, this list will
+ * always include (<code>x</code>,<code>x</code>).</p>
+ * </li>
+ * <li>
+ * <p>Also, a camera device must either not support any
+ * {@link android.media.CamcorderProfile CamcorderProfile},
+ * or support at least one
+ * normal {@link android.media.CamcorderProfile CamcorderProfile} that has
+ * {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} <code>x</code> &gt;= 24.</p>
+ * </li>
+ * </ul>
+ * <p>For devices at the LIMITED level or above:</p>
+ * <ul>
+ * <li>For YUV_420_888 burst capture use case, this list will always include (<code>min</code>, <code>max</code>)
+ * and (<code>max</code>, <code>max</code>) where <code>min</code> &lt;= 15 and <code>max</code> = the maximum output frame rate of the
+ * maximum YUV_420_888 output size.</li>
+ * </ul>
+ * <p><b>Units</b>: Frames per second (FPS)</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE
+ */
+ @PublicKey
+ public static final Key<android.util.Range<Integer>[]> CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES =
+ new Key<android.util.Range<Integer>[]>("android.control.aeAvailableTargetFpsRanges", new TypeReference<android.util.Range<Integer>[]>() {{ }});
+
+ /**
+ * <p>Maximum and minimum exposure compensation values for
+ * {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation}, in counts of {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep},
+ * that are supported by this camera device.</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>Range [0,0] indicates that exposure compensation is not supported.</p>
+ * <p>For LIMITED and FULL devices, range must follow below requirements if exposure
+ * compensation is supported (<code>range != [0, 0]</code>):</p>
+ * <p><code>Min.exposure compensation * {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep} &lt;= -2 EV</code></p>
+ * <p><code>Max.exposure compensation * {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep} &gt;= 2 EV</code></p>
+ * <p>LEGACY devices may support a smaller range than this.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
+ * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION
+ */
+ @PublicKey
+ public static final Key<android.util.Range<Integer>> CONTROL_AE_COMPENSATION_RANGE =
+ new Key<android.util.Range<Integer>>("android.control.aeCompensationRange", new TypeReference<android.util.Range<Integer>>() {{ }});
+
+ /**
+ * <p>Smallest step by which the exposure compensation
+ * can be changed.</p>
+ * <p>This is the unit for {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation}. For example, if this key has
+ * a value of <code>1/2</code>, then a setting of <code>-2</code> for {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation} means
+ * that the target EV offset for the auto-exposure routine is -1 EV.</p>
+ * <p>One unit of EV compensation changes the brightness of the captured image by a factor
+ * of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness.</p>
+ * <p><b>Units</b>: Exposure Value (EV)</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION
+ */
+ @PublicKey
+ public static final Key<Rational> CONTROL_AE_COMPENSATION_STEP =
+ new Key<Rational>("android.control.aeCompensationStep", Rational.class);
+
+ /**
+ * <p>List of auto-focus (AF) modes for {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} that are
+ * supported by this camera device.</p>
+ * <p>Not all the auto-focus modes may be supported by a
+ * given camera device. This entry lists the valid modes for
+ * {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} for this camera device.</p>
+ * <p>All LIMITED and FULL mode camera devices will support OFF mode, and all
+ * camera devices with adjustable focuser units
+ * (<code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} &gt; 0</code>) will support AUTO mode.</p>
+ * <p>LEGACY devices will support OFF mode only if they support
+ * focusing to infinity (by also setting {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} to
+ * <code>0.0f</code>).</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ */
+ @PublicKey
+ public static final Key<int[]> CONTROL_AF_AVAILABLE_MODES =
+ new Key<int[]>("android.control.afAvailableModes", int[].class);
+
+ /**
+ * <p>List of color effects for {@link CaptureRequest#CONTROL_EFFECT_MODE android.control.effectMode} that are supported by this camera
+ * device.</p>
+ * <p>This list contains the color effect modes that can be applied to
+ * images produced by the camera device.
+ * Implementations are not expected to be consistent across all devices.
+ * If no color effect modes are available for a device, this will only list
+ * OFF.</p>
+ * <p>A color effect will only be applied if
+ * {@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF. OFF is always included in this list.</p>
+ * <p>This control has no effect on the operation of other control routines such
+ * as auto-exposure, white balance, or focus.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#CONTROL_EFFECT_MODE android.control.effectMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> CONTROL_AVAILABLE_EFFECTS =
+ new Key<int[]>("android.control.availableEffects", int[].class);
+
+ /**
+ * <p>List of scene modes for {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} that are supported by this camera
+ * device.</p>
+ * <p>This list contains scene modes that can be set for the camera device.
+ * Only scene modes that have been fully implemented for the
+ * camera device may be included here. Implementations are not expected
+ * to be consistent across all devices.</p>
+ * <p>If no scene modes are supported by the camera device, this
+ * will be set to DISABLED. Otherwise DISABLED will not be listed.</p>
+ * <p>FACE_PRIORITY is always listed if face detection is
+ * supported (i.e.<code>{@link CameraCharacteristics#STATISTICS_INFO_MAX_FACE_COUNT android.statistics.info.maxFaceCount} &gt;
+ * 0</code>).</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ * @see CameraCharacteristics#STATISTICS_INFO_MAX_FACE_COUNT
+ */
+ @PublicKey
+ public static final Key<int[]> CONTROL_AVAILABLE_SCENE_MODES =
+ new Key<int[]>("android.control.availableSceneModes", int[].class);
+
+ /**
+ * <p>List of video stabilization modes for {@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}
+ * that are supported by this camera device.</p>
+ * <p>OFF will always be listed.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES =
+ new Key<int[]>("android.control.availableVideoStabilizationModes", int[].class);
+
+ /**
+ * <p>List of auto-white-balance modes for {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} that are supported by this
+ * camera device.</p>
+ * <p>Not all the auto-white-balance modes may be supported by a
+ * given camera device. This entry lists the valid modes for
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} for this camera device.</p>
+ * <p>All camera devices will support ON mode.</p>
+ * <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF
+ * mode, which enables application control of white balance, by using
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}({@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} must be set to TRANSFORM_MATRIX). This includes all FULL
+ * mode camera devices.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> CONTROL_AWB_AVAILABLE_MODES =
+ new Key<int[]>("android.control.awbAvailableModes", int[].class);
+
+ /**
+ * <p>List of the maximum number of regions that can be used for metering in
+ * auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
+ * this corresponds to the the maximum number of elements in
+ * {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}, {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions},
+ * and {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}.</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>Value must be &gt;= 0 for each element. For full-capability devices
+ * this value must be &gt;= 1 for AE and AF. The order of the elements is:
+ * <code>(AE, AWB, AF)</code>.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_REGIONS
+ * @see CaptureRequest#CONTROL_AF_REGIONS
+ * @see CaptureRequest#CONTROL_AWB_REGIONS
+ * @hide
+ */
+ public static final Key<int[]> CONTROL_MAX_REGIONS =
+ new Key<int[]>("android.control.maxRegions", int[].class);
+
+ /**
+ * <p>The maximum number of metering regions that can be used by the auto-exposure (AE)
+ * routine.</p>
+ * <p>This corresponds to the the maximum allowed number of elements in
+ * {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Value will be &gt;= 0. For FULL-capability devices, this
+ * value will be &gt;= 1.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_REGIONS
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<Integer> CONTROL_MAX_REGIONS_AE =
+ new Key<Integer>("android.control.maxRegionsAe", int.class);
+
+ /**
+ * <p>The maximum number of metering regions that can be used by the auto-white balance (AWB)
+ * routine.</p>
+ * <p>This corresponds to the the maximum allowed number of elements in
+ * {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Value will be &gt;= 0.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_REGIONS
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<Integer> CONTROL_MAX_REGIONS_AWB =
+ new Key<Integer>("android.control.maxRegionsAwb", int.class);
+
+ /**
+ * <p>The maximum number of metering regions that can be used by the auto-focus (AF) routine.</p>
+ * <p>This corresponds to the the maximum allowed number of elements in
+ * {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Value will be &gt;= 0. For FULL-capability devices, this
+ * value will be &gt;= 1.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_REGIONS
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<Integer> CONTROL_MAX_REGIONS_AF =
+ new Key<Integer>("android.control.maxRegionsAf", int.class);
+
+ /**
+ * <p>List of available high speed video size, fps range and max batch size configurations
+ * supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).</p>
+ * <p>When CONSTRAINED_HIGH_SPEED_VIDEO is supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities},
+ * this metadata will list the supported high speed video size, fps range and max batch size
+ * configurations. All the sizes listed in this configuration will be a subset of the sizes
+ * reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes }
+ * for processed non-stalling formats.</p>
+ * <p>For the high speed video use case, the application must
+ * select the video size and fps range from this metadata to configure the recording and
+ * preview streams and setup the recording requests. For example, if the application intends
+ * to do high speed recording, it can select the maximum size reported by this metadata to
+ * configure output streams. Once the size is selected, application can filter this metadata
+ * by selected size and get the supported fps ranges, and use these fps ranges to setup the
+ * recording requests. Note that for the use case of multiple output streams, application
+ * must select one unique size from this metadata to use (e.g., preview and recording streams
+ * must have the same size). Otherwise, the high speed capture session creation will fail.</p>
+ * <p>The min and max fps will be multiple times of 30fps.</p>
+ * <p>High speed video streaming extends significant performance pressue to camera hardware,
+ * to achieve efficient high speed streaming, the camera device may have to aggregate
+ * multiple frames together and send to camera device for processing where the request
+ * controls are same for all the frames in this batch. Max batch size indicates
+ * the max possible number of frames the camera device will group together for this high
+ * speed stream configuration. This max batch size will be used to generate a high speed
+ * recording request list by
+ * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList }.
+ * The max batch size for each configuration will satisfy below conditions:</p>
+ * <ul>
+ * <li>Each max batch size will be a divisor of its corresponding fps_max / 30. For example,
+ * if max_fps is 300, max batch size will only be 1, 2, 5, or 10.</li>
+ * <li>The camera device may choose smaller internal batch size for each configuration, but
+ * the actual batch size will be a divisor of max batch size. For example, if the max batch
+ * size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8.</li>
+ * <li>The max batch size in each configuration entry must be no larger than 32.</li>
+ * </ul>
+ * <p>The camera device doesn't have to support batch mode to achieve high speed video recording,
+ * in such case, batch_size_max will be reported as 1 in each configuration entry.</p>
+ * <p>This fps ranges in this configuration list can only be used to create requests
+ * that are submitted to a high speed camera capture session created by
+ * {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }.
+ * The fps ranges reported in this metadata must not be used to setup capture requests for
+ * normal capture session, or it will cause request error.</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>For each configuration, the fps_max &gt;= 120fps.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.HighSpeedVideoConfiguration[]> CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS =
+ new Key<android.hardware.camera2.params.HighSpeedVideoConfiguration[]>("android.control.availableHighSpeedVideoConfigurations", android.hardware.camera2.params.HighSpeedVideoConfiguration[].class);
+
+ /**
+ * <p>Whether the camera device supports {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock}</p>
+ * <p>Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always
+ * list <code>true</code>. This includes FULL devices.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ */
+ @PublicKey
+ public static final Key<Boolean> CONTROL_AE_LOCK_AVAILABLE =
+ new Key<Boolean>("android.control.aeLockAvailable", boolean.class);
+
+ /**
+ * <p>Whether the camera device supports {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}</p>
+ * <p>Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will
+ * always list <code>true</code>. This includes FULL devices.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_LOCK
+ */
+ @PublicKey
+ public static final Key<Boolean> CONTROL_AWB_LOCK_AVAILABLE =
+ new Key<Boolean>("android.control.awbLockAvailable", boolean.class);
+
+ /**
+ * <p>List of control modes for {@link CaptureRequest#CONTROL_MODE android.control.mode} that are supported by this camera
+ * device.</p>
+ * <p>This list contains control modes that can be set for the camera device.
+ * LEGACY mode devices will always support AUTO mode. LIMITED and FULL
+ * devices will always support OFF, AUTO modes.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#CONTROL_MODE android.control.mode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> CONTROL_AVAILABLE_MODES =
+ new Key<int[]>("android.control.availableModes", int[].class);
+
+ /**
+ * <p>Range of boosts for {@link CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST android.control.postRawSensitivityBoost} supported
+ * by this camera device.</p>
+ * <p>Devices support post RAW sensitivity boost will advertise
+ * {@link CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST android.control.postRawSensitivityBoost} key for controling
+ * post RAW sensitivity boost.</p>
+ * <p>This key will be <code>null</code> for devices that do not support any RAW format
+ * outputs. For devices that do support RAW format outputs, this key will always
+ * present, and if a device does not support post RAW sensitivity boost, it will
+ * list <code>(100, 100)</code> in this key.</p>
+ * <p><b>Units</b>: ISO arithmetic units, the same as {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<android.util.Range<Integer>> CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE =
+ new Key<android.util.Range<Integer>>("android.control.postRawSensitivityBoostRange", new TypeReference<android.util.Range<Integer>>() {{ }});
+
+ /**
+ * <p>List of edge enhancement modes for {@link CaptureRequest#EDGE_MODE android.edge.mode} that are supported by this camera
+ * device.</p>
+ * <p>Full-capability camera devices must always support OFF; camera devices that support
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will
+ * list FAST.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#EDGE_MODE android.edge.mode}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#EDGE_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<int[]> EDGE_AVAILABLE_EDGE_MODES =
+ new Key<int[]>("android.edge.availableEdgeModes", int[].class);
+
+ /**
+ * <p>Whether this camera device has a
+ * flash unit.</p>
+ * <p>Will be <code>false</code> if no flash is available.</p>
+ * <p>If there is no flash unit, none of the flash controls do
+ * anything.
+ * This key is available on all devices.</p>
+ */
+ @PublicKey
+ public static final Key<Boolean> FLASH_INFO_AVAILABLE =
+ new Key<Boolean>("android.flash.info.available", boolean.class);
+
+ /**
+ * <p>List of hot pixel correction modes for {@link CaptureRequest#HOT_PIXEL_MODE android.hotPixel.mode} that are supported by this
+ * camera device.</p>
+ * <p>FULL mode camera devices will always support FAST.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#HOT_PIXEL_MODE android.hotPixel.mode}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#HOT_PIXEL_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES =
+ new Key<int[]>("android.hotPixel.availableHotPixelModes", int[].class);
+
+ /**
+ * <p>List of JPEG thumbnail sizes for {@link CaptureRequest#JPEG_THUMBNAIL_SIZE android.jpeg.thumbnailSize} supported by this
+ * camera device.</p>
+ * <p>This list will include at least one non-zero resolution, plus <code>(0,0)</code> for indicating no
+ * thumbnail should be generated.</p>
+ * <p>Below condiditions will be satisfied for this size list:</p>
+ * <ul>
+ * <li>The sizes will be sorted by increasing pixel area (width x height).
+ * If several resolutions have the same area, they will be sorted by increasing width.</li>
+ * <li>The aspect ratio of the largest thumbnail size will be same as the
+ * aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations.
+ * The largest size is defined as the size that has the largest pixel area
+ * in a given size list.</li>
+ * <li>Each output JPEG size in android.scaler.availableStreamConfigurations will have at least
+ * one corresponding size that has the same aspect ratio in availableThumbnailSizes,
+ * and vice versa.</li>
+ * <li>All non-<code>(0, 0)</code> sizes will have non-zero widths and heights.
+ * This key is available on all devices.</li>
+ * </ul>
+ *
+ * @see CaptureRequest#JPEG_THUMBNAIL_SIZE
+ */
+ @PublicKey
+ public static final Key<android.util.Size[]> JPEG_AVAILABLE_THUMBNAIL_SIZES =
+ new Key<android.util.Size[]>("android.jpeg.availableThumbnailSizes", android.util.Size[].class);
+
+ /**
+ * <p>List of aperture size values for {@link CaptureRequest#LENS_APERTURE android.lens.aperture} that are
+ * supported by this camera device.</p>
+ * <p>If the camera device doesn't support a variable lens aperture,
+ * this list will contain only one value, which is the fixed aperture size.</p>
+ * <p>If the camera device supports a variable aperture, the aperture values
+ * in this list will be sorted in ascending order.</p>
+ * <p><b>Units</b>: The aperture f-number</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#LENS_APERTURE
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_INFO_AVAILABLE_APERTURES =
+ new Key<float[]>("android.lens.info.availableApertures", float[].class);
+
+ /**
+ * <p>List of neutral density filter values for
+ * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} that are supported by this camera device.</p>
+ * <p>If a neutral density filter is not supported by this camera device,
+ * this list will contain only 0. Otherwise, this list will include every
+ * filter density supported by the camera device, in ascending order.</p>
+ * <p><b>Units</b>: Exposure value (EV)</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>Values are &gt;= 0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#LENS_FILTER_DENSITY
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_INFO_AVAILABLE_FILTER_DENSITIES =
+ new Key<float[]>("android.lens.info.availableFilterDensities", float[].class);
+
+ /**
+ * <p>List of focal lengths for {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength} that are supported by this camera
+ * device.</p>
+ * <p>If optical zoom is not supported, this list will only contain
+ * a single value corresponding to the fixed focal length of the
+ * device. Otherwise, this list will include every focal length supported
+ * by the camera device, in ascending order.</p>
+ * <p><b>Units</b>: Millimeters</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>Values are &gt; 0</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#LENS_FOCAL_LENGTH
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_INFO_AVAILABLE_FOCAL_LENGTHS =
+ new Key<float[]>("android.lens.info.availableFocalLengths", float[].class);
+
+ /**
+ * <p>List of optical image stabilization (OIS) modes for
+ * {@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode} that are supported by this camera device.</p>
+ * <p>If OIS is not supported by a given camera device, this list will
+ * contain only OFF.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION =
+ new Key<int[]>("android.lens.info.availableOpticalStabilization", int[].class);
+
+ /**
+ * <p>Hyperfocal distance for this lens.</p>
+ * <p>If the lens is not fixed focus, the camera device will report this
+ * field when {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} is APPROXIMATE or CALIBRATED.</p>
+ * <p><b>Units</b>: See {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details</p>
+ * <p><b>Range of valid values:</b><br>
+ * If lens is fixed focus, &gt;= 0. If lens has focuser unit, the value is
+ * within <code>(0.0f, {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}]</code></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ */
+ @PublicKey
+ public static final Key<Float> LENS_INFO_HYPERFOCAL_DISTANCE =
+ new Key<Float>("android.lens.info.hyperfocalDistance", float.class);
+
+ /**
+ * <p>Shortest distance from frontmost surface
+ * of the lens that can be brought into sharp focus.</p>
+ * <p>If the lens is fixed-focus, this will be
+ * 0.</p>
+ * <p><b>Units</b>: See {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ */
+ @PublicKey
+ public static final Key<Float> LENS_INFO_MINIMUM_FOCUS_DISTANCE =
+ new Key<Float>("android.lens.info.minimumFocusDistance", float.class);
+
+ /**
+ * <p>Dimensions of lens shading map.</p>
+ * <p>The map should be on the order of 30-40 rows and columns, and
+ * must be smaller than 64x64.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Both values &gt;= 1</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @hide
+ */
+ public static final Key<android.util.Size> LENS_INFO_SHADING_MAP_SIZE =
+ new Key<android.util.Size>("android.lens.info.shadingMapSize", android.util.Size.class);
+
+ /**
+ * <p>The lens focus distance calibration quality.</p>
+ * <p>The lens focus distance calibration quality determines the reliability of
+ * focus related metadata entries, i.e. {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
+ * {@link CaptureResult#LENS_FOCUS_RANGE android.lens.focusRange}, {@link CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE android.lens.info.hyperfocalDistance}, and
+ * {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}.</p>
+ * <p>APPROXIMATE and CALIBRATED devices report the focus metadata in
+ * units of diopters (1/meter), so <code>0.0f</code> represents focusing at infinity,
+ * and increasing positive numbers represent focusing closer and closer
+ * to the camera device. The focus distance control also uses diopters
+ * on these devices.</p>
+ * <p>UNCALIBRATED devices do not use units that are directly comparable
+ * to any real physical measurement, but <code>0.0f</code> still represents farthest
+ * focus, and {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} represents the
+ * nearest focus the device can achieve.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED UNCALIBRATED}</li>
+ * <li>{@link #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE APPROXIMATE}</li>
+ * <li>{@link #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED CALIBRATED}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CaptureResult#LENS_FOCUS_RANGE
+ * @see CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED
+ * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE
+ * @see #LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED
+ */
+ @PublicKey
+ public static final Key<Integer> LENS_INFO_FOCUS_DISTANCE_CALIBRATION =
+ new Key<Integer>("android.lens.info.focusDistanceCalibration", int.class);
+
+ /**
+ * <p>Direction the camera faces relative to
+ * device screen.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #LENS_FACING_FRONT FRONT}</li>
+ * <li>{@link #LENS_FACING_BACK BACK}</li>
+ * <li>{@link #LENS_FACING_EXTERNAL EXTERNAL}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ * @see #LENS_FACING_FRONT
+ * @see #LENS_FACING_BACK
+ * @see #LENS_FACING_EXTERNAL
+ */
+ @PublicKey
+ public static final Key<Integer> LENS_FACING =
+ new Key<Integer>("android.lens.facing", int.class);
+
+ /**
+ * <p>The orientation of the camera relative to the sensor
+ * coordinate system.</p>
+ * <p>The four coefficients that describe the quaternion
+ * rotation from the Android sensor coordinate system to a
+ * camera-aligned coordinate system where the X-axis is
+ * aligned with the long side of the image sensor, the Y-axis
+ * is aligned with the short side of the image sensor, and
+ * the Z-axis is aligned with the optical axis of the sensor.</p>
+ * <p>To convert from the quaternion coefficients <code>(x,y,z,w)</code>
+ * to the axis of rotation <code>(a_x, a_y, a_z)</code> and rotation
+ * amount <code>theta</code>, the following formulas can be used:</p>
+ * <pre><code> theta = 2 * acos(w)
+ * a_x = x / sin(theta/2)
+ * a_y = y / sin(theta/2)
+ * a_z = z / sin(theta/2)
+ * </code></pre>
+ * <p>To create a 3x3 rotation matrix that applies the rotation
+ * defined by this quaternion, the following matrix can be
+ * used:</p>
+ * <pre><code>R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw,
+ * 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw,
+ * 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ]
+ * </code></pre>
+ * <p>This matrix can then be used to apply the rotation to a
+ * column vector point with</p>
+ * <p><code>p' = Rp</code></p>
+ * <p>where <code>p</code> is in the device sensor coordinate system, and
+ * <code>p'</code> is in the camera-oriented coordinate system.</p>
+ * <p><b>Units</b>:
+ * Quaternion coefficients</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_POSE_ROTATION =
+ new Key<float[]>("android.lens.poseRotation", float[].class);
+
+ /**
+ * <p>Position of the camera optical center.</p>
+ * <p>The position of the camera device's lens optical center,
+ * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
+ * optical center of the largest camera device facing in the
+ * same direction as this camera, in the {@link android.hardware.SensorEvent Android sensor coordinate
+ * axes}. Note that only the axis definitions are shared with
+ * the sensor coordinate system, but not the origin.</p>
+ * <p>If this device is the largest or only camera device with a
+ * given facing, then this position will be <code>(0, 0, 0)</code>; a
+ * camera device with a lens optical center located 3 cm from
+ * the main sensor along the +X axis (to the right from the
+ * user's perspective) will report <code>(0.03, 0, 0)</code>.</p>
+ * <p>To transform a pixel coordinates between two cameras
+ * facing the same direction, first the source camera
+ * {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion} must be corrected for. Then
+ * the source camera {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} needs
+ * to be applied, followed by the {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}
+ * of the source camera, the translation of the source camera
+ * relative to the destination camera, the
+ * {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} of the destination camera, and
+ * finally the inverse of {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}
+ * of the destination camera. This obtains a
+ * radial-distortion-free coordinate in the destination
+ * camera pixel coordinates.</p>
+ * <p>To compare this against a real image from the destination
+ * camera, the destination camera image then needs to be
+ * corrected for radial distortion before comparison or
+ * sampling.</p>
+ * <p><b>Units</b>: Meters</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
+ * @see CameraCharacteristics#LENS_POSE_ROTATION
+ * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_POSE_TRANSLATION =
+ new Key<float[]>("android.lens.poseTranslation", float[].class);
+
+ /**
+ * <p>The parameters for this camera device's intrinsic
+ * calibration.</p>
+ * <p>The five calibration parameters that describe the
+ * transform from camera-centric 3D coordinates to sensor
+ * pixel coordinates:</p>
+ * <pre><code>[f_x, f_y, c_x, c_y, s]
+ * </code></pre>
+ * <p>Where <code>f_x</code> and <code>f_y</code> are the horizontal and vertical
+ * focal lengths, <code>[c_x, c_y]</code> is the position of the optical
+ * axis, and <code>s</code> is a skew parameter for the sensor plane not
+ * being aligned with the lens plane.</p>
+ * <p>These are typically used within a transformation matrix K:</p>
+ * <pre><code>K = [ f_x, s, c_x,
+ * 0, f_y, c_y,
+ * 0 0, 1 ]
+ * </code></pre>
+ * <p>which can then be combined with the camera pose rotation
+ * <code>R</code> and translation <code>t</code> ({@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} and
+ * {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}, respective) to calculate the
+ * complete transform from world coordinates to pixel
+ * coordinates:</p>
+ * <pre><code>P = [ K 0 * [ R t
+ * 0 1 ] 0 1 ]
+ * </code></pre>
+ * <p>and with <code>p_w</code> being a point in the world coordinate system
+ * and <code>p_s</code> being a point in the camera active pixel array
+ * coordinate system, and with the mapping including the
+ * homogeneous division by z:</p>
+ * <pre><code> p_h = (x_h, y_h, z_h) = P p_w
+ * p_s = p_h / z_h
+ * </code></pre>
+ * <p>so <code>[x_s, y_s]</code> is the pixel coordinates of the world
+ * point, <code>z_s = 1</code>, and <code>w_s</code> is a measurement of disparity
+ * (depth) in pixel coordinates.</p>
+ * <p>Note that the coordinate system for this transform is the
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} system,
+ * where <code>(0,0)</code> is the top-left of the
+ * preCorrectionActiveArraySize rectangle. Once the pose and
+ * intrinsic calibration transforms have been applied to a
+ * world point, then the {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion}
+ * transform needs to be applied, and the result adjusted to
+ * be in the {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} coordinate
+ * system (where <code>(0, 0)</code> is the top-left of the
+ * activeArraySize rectangle), to determine the final pixel
+ * coordinate of the world point for processed (non-RAW)
+ * output buffers.</p>
+ * <p><b>Units</b>:
+ * Pixels in the
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize}
+ * coordinate system.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#LENS_POSE_ROTATION
+ * @see CameraCharacteristics#LENS_POSE_TRANSLATION
+ * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_INTRINSIC_CALIBRATION =
+ new Key<float[]>("android.lens.intrinsicCalibration", float[].class);
+
+ /**
+ * <p>The correction coefficients to correct for this camera device's
+ * radial and tangential lens distortion.</p>
+ * <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
+ * kappa_3]</code> and two tangential distortion coefficients
+ * <code>[kappa_4, kappa_5]</code> that can be used to correct the
+ * lens's geometric distortion with the mapping equations:</p>
+ * <pre><code> x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ * kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
+ * y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ * kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
+ * </code></pre>
+ * <p>Here, <code>[x_c, y_c]</code> are the coordinates to sample in the
+ * input image that correspond to the pixel values in the
+ * corrected image at the coordinate <code>[x_i, y_i]</code>:</p>
+ * <pre><code> correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
+ * </code></pre>
+ * <p>The pixel coordinates are defined in a normalized
+ * coordinate system related to the
+ * {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} calibration fields.
+ * Both <code>[x_i, y_i]</code> and <code>[x_c, y_c]</code> have <code>(0,0)</code> at the
+ * lens optical center <code>[c_x, c_y]</code>. The maximum magnitudes
+ * of both x and y coordinates are normalized to be 1 at the
+ * edge further from the optical center, so the range
+ * for both dimensions is <code>-1 &lt;= x &lt;= 1</code>.</p>
+ * <p>Finally, <code>r</code> represents the radial distance from the
+ * optical center, <code>r^2 = x_i^2 + y_i^2</code>, and its magnitude
+ * is therefore no larger than <code>|r| &lt;= sqrt(2)</code>.</p>
+ * <p>The distortion model used is the Brown-Conrady model.</p>
+ * <p><b>Units</b>:
+ * Unitless coefficients.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_RADIAL_DISTORTION =
+ new Key<float[]>("android.lens.radialDistortion", float[].class);
+
+ /**
+ * <p>List of noise reduction modes for {@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode} that are supported
+ * by this camera device.</p>
+ * <p>Full-capability camera devices will always support OFF and FAST.</p>
+ * <p>Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support
+ * ZERO_SHUTTER_LAG.</p>
+ * <p>Legacy-capability camera devices will only support FAST mode.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES =
+ new Key<int[]>("android.noiseReduction.availableNoiseReductionModes", int[].class);
+
+ /**
+ * <p>If set to 1, the HAL will always split result
+ * metadata for a single capture into multiple buffers,
+ * returned using multiple process_capture_result calls.</p>
+ * <p>Does not need to be listed in static
+ * metadata. Support for partial results will be reworked in
+ * future versions of camera service. This quirk will stop
+ * working at that point; DO NOT USE without careful
+ * consideration of future support.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<Byte> QUIRKS_USE_PARTIAL_RESULT =
+ new Key<Byte>("android.quirks.usePartialResult", byte.class);
+
+ /**
+ * <p>The maximum numbers of different types of output streams
+ * that can be configured and used simultaneously by a camera device.</p>
+ * <p>This is a 3 element tuple that contains the max number of output simultaneous
+ * streams for raw sensor, processed (but not stalling), and processed (and stalling)
+ * formats respectively. For example, assuming that JPEG is typically a processed and
+ * stalling stream, if max raw sensor format output stream number is 1, max YUV streams
+ * number is 3, and max JPEG stream number is 2, then this tuple should be <code>(1, 3, 2)</code>.</p>
+ * <p>This lists the upper bound of the number of output streams supported by
+ * the camera device. Using more streams simultaneously may require more hardware and
+ * CPU resources that will consume more power. The image format for an output stream can
+ * be any supported format provided by android.scaler.availableStreamConfigurations.
+ * The formats defined in android.scaler.availableStreamConfigurations can be catergorized
+ * into the 3 stream types as below:</p>
+ * <ul>
+ * <li>Processed (but stalling): any non-RAW format with a stallDurations &gt; 0.
+ * Typically {@link android.graphics.ImageFormat#JPEG JPEG format}.</li>
+ * <li>Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10 RAW10}, or {@link android.graphics.ImageFormat#RAW12 RAW12}.</li>
+ * <li>Processed (but not-stalling): any non-RAW format without a stall duration.
+ * Typically {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888},
+ * {@link android.graphics.ImageFormat#NV21 NV21}, or
+ * {@link android.graphics.ImageFormat#YV12 YV12}.</li>
+ * </ul>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>For processed (and stalling) format streams, &gt;= 1.</p>
+ * <p>For Raw format (either stalling or non-stalling) streams, &gt;= 0.</p>
+ * <p>For processed (but not stalling) format streams, &gt;= 3
+ * for FULL mode devices (<code>{@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} == FULL</code>);
+ * &gt;= 2 for LIMITED mode devices (<code>{@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} == LIMITED</code>).</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @hide
+ */
+ public static final Key<int[]> REQUEST_MAX_NUM_OUTPUT_STREAMS =
+ new Key<int[]>("android.request.maxNumOutputStreams", int[].class);
+
+ /**
+ * <p>The maximum numbers of different types of output streams
+ * that can be configured and used simultaneously by a camera device
+ * for any <code>RAW</code> formats.</p>
+ * <p>This value contains the max number of output simultaneous
+ * streams from the raw sensor.</p>
+ * <p>This lists the upper bound of the number of output streams supported by
+ * the camera device. Using more streams simultaneously may require more hardware and
+ * CPU resources that will consume more power. The image format for this kind of an output stream can
+ * be any <code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
+ * <p>In particular, a <code>RAW</code> format is typically one of:</p>
+ * <ul>
+ * <li>{@link android.graphics.ImageFormat#RAW_SENSOR RAW_SENSOR}</li>
+ * <li>{@link android.graphics.ImageFormat#RAW10 RAW10}</li>
+ * <li>{@link android.graphics.ImageFormat#RAW12 RAW12}</li>
+ * </ul>
+ * <p>LEGACY mode devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> LEGACY)
+ * never support raw streams.</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>&gt;= 0</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_RAW =
+ new Key<Integer>("android.request.maxNumOutputRaw", int.class);
+
+ /**
+ * <p>The maximum numbers of different types of output streams
+ * that can be configured and used simultaneously by a camera device
+ * for any processed (but not-stalling) formats.</p>
+ * <p>This value contains the max number of output simultaneous
+ * streams for any processed (but not-stalling) formats.</p>
+ * <p>This lists the upper bound of the number of output streams supported by
+ * the camera device. Using more streams simultaneously may require more hardware and
+ * CPU resources that will consume more power. The image format for this kind of an output stream can
+ * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
+ * <p>Processed (but not-stalling) is defined as any non-RAW format without a stall duration.
+ * Typically:</p>
+ * <ul>
+ * <li>{@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888}</li>
+ * <li>{@link android.graphics.ImageFormat#NV21 NV21}</li>
+ * <li>{@link android.graphics.ImageFormat#YV12 YV12}</li>
+ * <li>Implementation-defined formats, i.e. {@link android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class) }</li>
+ * </ul>
+ * <p>For full guarantees, query {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } with a
+ * processed format -- it will return 0 for a non-stalling stream.</p>
+ * <p>LEGACY devices will support at least 2 processing/non-stalling streams.</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>&gt;= 3
+ * for FULL mode devices (<code>{@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} == FULL</code>);
+ * &gt;= 2 for LIMITED mode devices (<code>{@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} == LIMITED</code>).</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC =
+ new Key<Integer>("android.request.maxNumOutputProc", int.class);
+
+ /**
+ * <p>The maximum numbers of different types of output streams
+ * that can be configured and used simultaneously by a camera device
+ * for any processed (and stalling) formats.</p>
+ * <p>This value contains the max number of output simultaneous
+ * streams for any processed (but not-stalling) formats.</p>
+ * <p>This lists the upper bound of the number of output streams supported by
+ * the camera device. Using more streams simultaneously may require more hardware and
+ * CPU resources that will consume more power. The image format for this kind of an output stream can
+ * be any non-<code>RAW</code> and supported format provided by {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}.</p>
+ * <p>A processed and stalling format is defined as any non-RAW format with a stallDurations
+ * &gt; 0. Typically only the {@link android.graphics.ImageFormat#JPEG JPEG format} is a
+ * stalling format.</p>
+ * <p>For full guarantees, query {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } with a
+ * processed format -- it will return a non-0 value for a stalling stream.</p>
+ * <p>LEGACY devices will support up to 1 processing/stalling stream.</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>&gt;= 1</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<Integer> REQUEST_MAX_NUM_OUTPUT_PROC_STALLING =
+ new Key<Integer>("android.request.maxNumOutputProcStalling", int.class);
+
+ /**
+ * <p>The maximum numbers of any type of input streams
+ * that can be configured and used simultaneously by a camera device.</p>
+ * <p>When set to 0, it means no input stream is supported.</p>
+ * <p>The image format for a input stream can be any supported format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }. When using an
+ * input stream, there must be at least one output stream configured to to receive the
+ * reprocessed images.</p>
+ * <p>When an input stream and some output streams are used in a reprocessing request,
+ * only the input buffer will be used to produce these output stream buffers, and a
+ * new sensor image will not be captured.</p>
+ * <p>For example, for Zero Shutter Lag (ZSL) still capture use case, the input
+ * stream image format will be PRIVATE, the associated output stream image format
+ * should be JPEG.</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>0 or 1.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<Integer> REQUEST_MAX_NUM_INPUT_STREAMS =
+ new Key<Integer>("android.request.maxNumInputStreams", int.class);
+
+ /**
+ * <p>Specifies the number of maximum pipeline stages a frame
+ * has to go through from when it's exposed to when it's available
+ * to the framework.</p>
+ * <p>A typical minimum value for this is 2 (one stage to expose,
+ * one stage to readout) from the sensor. The ISP then usually adds
+ * its own stages to do custom HW processing. Further stages may be
+ * added by SW processing.</p>
+ * <p>Depending on what settings are used (e.g. YUV, JPEG) and what
+ * processing is enabled (e.g. face detection), the actual pipeline
+ * depth (specified by {@link CaptureResult#REQUEST_PIPELINE_DEPTH android.request.pipelineDepth}) may be less than
+ * the max pipeline depth.</p>
+ * <p>A pipeline depth of X stages is equivalent to a pipeline latency of
+ * X frame intervals.</p>
+ * <p>This value will normally be 8 or less, however, for high speed capture session,
+ * the max pipeline depth will be up to 8 x size of high speed capture request list.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureResult#REQUEST_PIPELINE_DEPTH
+ */
+ @PublicKey
+ public static final Key<Byte> REQUEST_PIPELINE_MAX_DEPTH =
+ new Key<Byte>("android.request.pipelineMaxDepth", byte.class);
+
+ /**
+ * <p>Defines how many sub-components
+ * a result will be composed of.</p>
+ * <p>In order to combat the pipeline latency, partial results
+ * may be delivered to the application layer from the camera device as
+ * soon as they are available.</p>
+ * <p>Optional; defaults to 1. A value of 1 means that partial
+ * results are not supported, and only the final TotalCaptureResult will
+ * be produced by the camera device.</p>
+ * <p>A typical use case for this might be: after requesting an
+ * auto-focus (AF) lock the new AF state might be available 50%
+ * of the way through the pipeline. The camera device could
+ * then immediately dispatch this state via a partial result to
+ * the application, and the rest of the metadata via later
+ * partial results.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 1</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ */
+ @PublicKey
+ public static final Key<Integer> REQUEST_PARTIAL_RESULT_COUNT =
+ new Key<Integer>("android.request.partialResultCount", int.class);
+
+ /**
+ * <p>List of capabilities that this camera device
+ * advertises as fully supporting.</p>
+ * <p>A capability is a contract that the camera device makes in order
+ * to be able to satisfy one or more use cases.</p>
+ * <p>Listing a capability guarantees that the whole set of features
+ * required to support a common use will all be available.</p>
+ * <p>Using a subset of the functionality provided by an unsupported
+ * capability may be possible on a specific camera device implementation;
+ * to do this query each of android.request.availableRequestKeys,
+ * android.request.availableResultKeys,
+ * android.request.availableCharacteristicsKeys.</p>
+ * <p>The following capabilities are guaranteed to be available on
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} <code>==</code> FULL devices:</p>
+ * <ul>
+ * <li>MANUAL_SENSOR</li>
+ * <li>MANUAL_POST_PROCESSING</li>
+ * </ul>
+ * <p>Other capabilities may be available on either FULL or LIMITED
+ * devices, but the application should query this key to be sure.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE BACKWARD_COMPATIBLE}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR MANUAL_SENSOR}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING MANUAL_POST_PROCESSING}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_RAW RAW}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING PRIVATE_REPROCESSING}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS READ_SENSOR_SETTINGS}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING YUV_REPROCESSING}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT DEPTH_OUTPUT}</li>
+ * <li>{@link #REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO CONSTRAINED_HIGH_SPEED_VIDEO}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_RAW
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT
+ * @see #REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
+ */
+ @PublicKey
+ public static final Key<int[]> REQUEST_AVAILABLE_CAPABILITIES =
+ new Key<int[]>("android.request.availableCapabilities", int[].class);
+
+ /**
+ * <p>A list of all keys that the camera device has available
+ * to use with {@link android.hardware.camera2.CaptureRequest }.</p>
+ * <p>Attempting to set a key into a CaptureRequest that is not
+ * listed here will result in an invalid request and will be rejected
+ * by the camera device.</p>
+ * <p>This field can be used to query the feature set of a camera device
+ * at a more granular level than capabilities. This is especially
+ * important for optional keys that are not listed under any capability
+ * in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @hide
+ */
+ public static final Key<int[]> REQUEST_AVAILABLE_REQUEST_KEYS =
+ new Key<int[]>("android.request.availableRequestKeys", int[].class);
+
+ /**
+ * <p>A list of all keys that the camera device has available
+ * to use with {@link android.hardware.camera2.CaptureResult }.</p>
+ * <p>Attempting to get a key from a CaptureResult that is not
+ * listed here will always return a <code>null</code> value. Getting a key from
+ * a CaptureResult that is listed here will generally never return a <code>null</code>
+ * value.</p>
+ * <p>The following keys may return <code>null</code> unless they are enabled:</p>
+ * <ul>
+ * <li>android.statistics.lensShadingMap (non-null iff {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON)</li>
+ * </ul>
+ * <p>(Those sometimes-null keys will nevertheless be listed here
+ * if they are available.)</p>
+ * <p>This field can be used to query the feature set of a camera device
+ * at a more granular level than capabilities. This is especially
+ * important for optional keys that are not listed under any capability
+ * in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
+ * @hide
+ */
+ public static final Key<int[]> REQUEST_AVAILABLE_RESULT_KEYS =
+ new Key<int[]>("android.request.availableResultKeys", int[].class);
+
+ /**
+ * <p>A list of all keys that the camera device has available
+ * to use with {@link android.hardware.camera2.CameraCharacteristics }.</p>
+ * <p>This entry follows the same rules as
+ * android.request.availableResultKeys (except that it applies for
+ * CameraCharacteristics instead of CaptureResult). See above for more
+ * details.</p>
+ * <p>This key is available on all devices.</p>
+ * @hide
+ */
+ public static final Key<int[]> REQUEST_AVAILABLE_CHARACTERISTICS_KEYS =
+ new Key<int[]>("android.request.availableCharacteristicsKeys", int[].class);
+
+ /**
+ * <p>The list of image formats that are supported by this
+ * camera device for output streams.</p>
+ * <p>All camera devices will support JPEG and YUV_420_888 formats.</p>
+ * <p>When set to YUV_420_888, application can access the YUV420 data directly.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<int[]> SCALER_AVAILABLE_FORMATS =
+ new Key<int[]>("android.scaler.availableFormats", int[].class);
+
+ /**
+ * <p>The minimum frame duration that is supported
+ * for each resolution in android.scaler.availableJpegSizes.</p>
+ * <p>This corresponds to the minimum steady-state frame duration when only
+ * that JPEG stream is active and captured in a burst, with all
+ * processing (typically in android.*.mode) set to FAST.</p>
+ * <p>When multiple streams are configured, the minimum
+ * frame duration will be &gt;= max(individual stream min
+ * durations)</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * TODO: Remove property.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<long[]> SCALER_AVAILABLE_JPEG_MIN_DURATIONS =
+ new Key<long[]>("android.scaler.availableJpegMinDurations", long[].class);
+
+ /**
+ * <p>The JPEG resolutions that are supported by this camera device.</p>
+ * <p>The resolutions are listed as <code>(width, height)</code> pairs. All camera devices will support
+ * sensor maximum resolution (defined by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}).</p>
+ * <p><b>Range of valid values:</b><br>
+ * TODO: Remove property.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<android.util.Size[]> SCALER_AVAILABLE_JPEG_SIZES =
+ new Key<android.util.Size[]>("android.scaler.availableJpegSizes", android.util.Size[].class);
+
+ /**
+ * <p>The maximum ratio between both active area width
+ * and crop region width, and active area height and
+ * crop region height, for {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}.</p>
+ * <p>This represents the maximum amount of zooming possible by
+ * the camera device, or equivalently, the minimum cropping
+ * window size.</p>
+ * <p>Crop regions that have a width or height that is smaller
+ * than this ratio allows will be rounded up to the minimum
+ * allowed size by the camera device.</p>
+ * <p><b>Units</b>: Zoom scale factor</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;=1</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#SCALER_CROP_REGION
+ */
+ @PublicKey
+ public static final Key<Float> SCALER_AVAILABLE_MAX_DIGITAL_ZOOM =
+ new Key<Float>("android.scaler.availableMaxDigitalZoom", float.class);
+
+ /**
+ * <p>For each available processed output size (defined in
+ * android.scaler.availableProcessedSizes), this property lists the
+ * minimum supportable frame duration for that size.</p>
+ * <p>This should correspond to the frame duration when only that processed
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to FAST.</p>
+ * <p>When multiple streams are configured, the minimum frame duration will
+ * be &gt;= max(individual stream min durations).</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<long[]> SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS =
+ new Key<long[]>("android.scaler.availableProcessedMinDurations", long[].class);
+
+ /**
+ * <p>The resolutions available for use with
+ * processed output streams, such as YV12, NV12, and
+ * platform opaque YUV/RGB streams to the GPU or video
+ * encoders.</p>
+ * <p>The resolutions are listed as <code>(width, height)</code> pairs.</p>
+ * <p>For a given use case, the actual maximum supported resolution
+ * may be lower than what is listed here, depending on the destination
+ * Surface for the image data. For example, for recording video,
+ * the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ * smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ * can provide.</p>
+ * <p>Please reference the documentation for the image data destination to
+ * check if it limits the maximum size for image data.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<android.util.Size[]> SCALER_AVAILABLE_PROCESSED_SIZES =
+ new Key<android.util.Size[]>("android.scaler.availableProcessedSizes", android.util.Size[].class);
+
+ /**
+ * <p>The mapping of image formats that are supported by this
+ * camera device for input streams, to their corresponding output formats.</p>
+ * <p>All camera devices with at least 1
+ * {@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} will have at least one
+ * available input format.</p>
+ * <p>The camera device will support the following map of formats,
+ * if its dependent capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}) is supported:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="left">Input Format</th>
+ * <th align="left">Output Format</th>
+ * <th align="left">Capability</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="left">{@link android.graphics.ImageFormat#PRIVATE }</td>
+ * <td align="left">{@link android.graphics.ImageFormat#JPEG }</td>
+ * <td align="left">PRIVATE_REPROCESSING</td>
+ * </tr>
+ * <tr>
+ * <td align="left">{@link android.graphics.ImageFormat#PRIVATE }</td>
+ * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td>
+ * <td align="left">PRIVATE_REPROCESSING</td>
+ * </tr>
+ * <tr>
+ * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td>
+ * <td align="left">{@link android.graphics.ImageFormat#JPEG }</td>
+ * <td align="left">YUV_REPROCESSING</td>
+ * </tr>
+ * <tr>
+ * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td>
+ * <td align="left">{@link android.graphics.ImageFormat#YUV_420_888 }</td>
+ * <td align="left">YUV_REPROCESSING</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>PRIVATE refers to a device-internal format that is not directly application-visible. A
+ * PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance }
+ * with {@link android.graphics.ImageFormat#PRIVATE } as the format.</p>
+ * <p>For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input
+ * or output will never hurt maximum frame rate (i.e. {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0),</p>
+ * <p>Attempting to configure an input stream with output streams not
+ * listed as available in this map is not valid.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.ReprocessFormatsMap> SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP =
+ new Key<android.hardware.camera2.params.ReprocessFormatsMap>("android.scaler.availableInputOutputFormatsMap", android.hardware.camera2.params.ReprocessFormatsMap.class);
+
+ /**
+ * <p>The available stream configurations that this
+ * camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ * <p>The configurations are listed as <code>(format, width, height, input?)</code>
+ * tuples.</p>
+ * <p>For a given use case, the actual maximum supported resolution
+ * may be lower than what is listed here, depending on the destination
+ * Surface for the image data. For example, for recording video,
+ * the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ * smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ * can provide.</p>
+ * <p>Please reference the documentation for the image data destination to
+ * check if it limits the maximum size for image data.</p>
+ * <p>Not all output formats may be supported in a configuration with
+ * an input stream of a particular format. For more details, see
+ * android.scaler.availableInputOutputFormatsMap.</p>
+ * <p>The following table describes the minimum required output stream
+ * configurations based on the hardware level
+ * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">Format</th>
+ * <th align="center">Size</th>
+ * <th align="center">Hardware Level</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">1920x1080 (1080p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 1080p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">1280x720 (720)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 720p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">640x480 (480p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 480p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">JPEG</td>
+ * <td align="center">320x240 (240p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 240p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">YUV_420_888</td>
+ * <td align="center">all output sizes available for JPEG</td>
+ * <td align="center">FULL</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">YUV_420_888</td>
+ * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
+ * <td align="center">LIMITED</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">IMPLEMENTATION_DEFINED</td>
+ * <td align="center">same as YUV_420_888</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} for additional
+ * mandatory stream configurations on a per-capability basis.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfiguration[]> SCALER_AVAILABLE_STREAM_CONFIGURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfiguration[]>("android.scaler.availableStreamConfigurations", android.hardware.camera2.params.StreamConfiguration[].class);
+
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination.</p>
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>The minimum frame duration of a stream (of a particular format, size)
+ * is the same regardless of whether the stream is input or output.</p>
+ * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
+ * android.scaler.availableStallDurations for more details about
+ * calculating the max frame rate.</p>
+ * <p>(Keep in sync with
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration })</p>
+ * <p><b>Units</b>: (format, width, height, ns) x n</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> SCALER_AVAILABLE_MIN_FRAME_DURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.scaler.availableMinFrameDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
+
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination.</p>
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>For example, consider JPEG captures which have the following
+ * characteristics:</p>
+ * <ul>
+ * <li>JPEG streams act like processed YUV streams in requests for which
+ * they are not included; in requests in which they are directly
+ * referenced, they act as JPEG streams. This is because supporting a
+ * JPEG stream requires the underlying YUV data to always be ready for
+ * use by a JPEG encoder, but the encoder will only be used (and impact
+ * frame duration) on requests that actually reference a JPEG stream.</li>
+ * <li>The JPEG processor can run concurrently to the rest of the camera
+ * pipeline, but cannot process more than 1 capture at a time.</li>
+ * </ul>
+ * <p>In other words, using a repeating YUV request would result
+ * in a steady frame rate (let's say it's 30 FPS). If a single
+ * JPEG request is submitted periodically, the frame rate will stay
+ * at 30 FPS (as long as we wait for the previous JPEG to return each
+ * time). If we try to submit a repeating YUV + JPEG request, then
+ * the frame rate will drop from 30 FPS.</p>
+ * <p>In general, submitting a new request with a non-0 stall time
+ * stream will <em>not</em> cause a frame rate drop unless there are still
+ * outstanding buffers for that stream from previous requests.</p>
+ * <p>Submitting a repeating request with streams (call this <code>S</code>)
+ * is the same as setting the minimum frame duration from
+ * the normal minimum frame duration corresponding to <code>S</code>, added with
+ * the maximum stall duration for <code>S</code>.</p>
+ * <p>If interleaving requests with and without a stall duration,
+ * a request will stall by the maximum of the remaining times
+ * for each can-stall stream with outstanding buffers.</p>
+ * <p>This means that a stalling request will not have an exposure start
+ * until the stall has completed.</p>
+ * <p>This should correspond to the stall duration when only that stream is
+ * active, with all processing (typically in android.*.mode) set to FAST
+ * or OFF. Setting any of the processing modes to HIGH_QUALITY
+ * effectively results in an indeterminate stall duration for all
+ * streams in a request (the regular stall calculation rules are
+ * ignored).</p>
+ * <p>The following formats may always have a stall duration:</p>
+ * <ul>
+ * <li>{@link android.graphics.ImageFormat#JPEG }</li>
+ * <li>{@link android.graphics.ImageFormat#RAW_SENSOR }</li>
+ * </ul>
+ * <p>The following formats will never have a stall duration:</p>
+ * <ul>
+ * <li>{@link android.graphics.ImageFormat#YUV_420_888 }</li>
+ * <li>{@link android.graphics.ImageFormat#RAW10 }</li>
+ * </ul>
+ * <p>All other formats may or may not have an allowed stall duration on
+ * a per-capability basis; refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
+ * for more details.</p>
+ * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} for more information about
+ * calculating the max frame rate (absent stalls).</p>
+ * <p>(Keep up to date with
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration } )</p>
+ * <p><b>Units</b>: (format, width, height, ns) x n</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> SCALER_AVAILABLE_STALL_DURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.scaler.availableStallDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
+
+ /**
+ * <p>The available stream configurations that this
+ * camera device supports; also includes the minimum frame durations
+ * and the stall durations for each format/size combination.</p>
+ * <p>All camera devices will support sensor maximum resolution (defined by
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}) for the JPEG format.</p>
+ * <p>For a given use case, the actual maximum supported resolution
+ * may be lower than what is listed here, depending on the destination
+ * Surface for the image data. For example, for recording video,
+ * the video encoder chosen may have a maximum size limit (e.g. 1080p)
+ * smaller than what the camera (e.g. maximum resolution is 3264x2448)
+ * can provide.</p>
+ * <p>Please reference the documentation for the image data destination to
+ * check if it limits the maximum size for image data.</p>
+ * <p>The following table describes the minimum required output stream
+ * configurations based on the hardware level
+ * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}):</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">Format</th>
+ * <th align="center">Size</th>
+ * <th align="center">Hardware Level</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td>
+ * <td align="center">{@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} (*1)</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td>
+ * <td align="center">1920x1080 (1080p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 1080p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td>
+ * <td align="center">1280x720 (720p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 720p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td>
+ * <td align="center">640x480 (480p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 480p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">{@link android.graphics.ImageFormat#JPEG }</td>
+ * <td align="center">320x240 (240p)</td>
+ * <td align="center">Any</td>
+ * <td align="center">if 240p &lt;= activeArraySize</td>
+ * </tr>
+ * <tr>
+ * <td align="center">{@link android.graphics.ImageFormat#YUV_420_888 }</td>
+ * <td align="center">all output sizes available for JPEG</td>
+ * <td align="center">FULL</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">{@link android.graphics.ImageFormat#YUV_420_888 }</td>
+ * <td align="center">all output sizes available for JPEG, up to the maximum video size</td>
+ * <td align="center">LIMITED</td>
+ * <td align="center"></td>
+ * </tr>
+ * <tr>
+ * <td align="center">{@link android.graphics.ImageFormat#PRIVATE }</td>
+ * <td align="center">same as YUV_420_888</td>
+ * <td align="center">Any</td>
+ * <td align="center"></td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>Refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} and {@link android.hardware.camera2.CameraDevice#createCaptureSession } for additional mandatory
+ * stream configurations on a per-capability basis.</p>
+ * <p>*1: For JPEG format, the sizes may be restricted by below conditions:</p>
+ * <ul>
+ * <li>The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones
+ * (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution
+ * (defined by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}) has an aspect ratio other than these,
+ * it does not have to be included in the supported JPEG sizes.</li>
+ * <li>Some hardware JPEG encoders may have pixel boundary alignment requirements, such as
+ * the dimensions being a multiple of 16.
+ * Therefore, the maximum JPEG size may be smaller than sensor maximum resolution.
+ * However, the largest JPEG size will be as close as possible to the sensor maximum
+ * resolution given above constraints. It is required that after aspect ratio adjustments,
+ * additional size reduction due to other issues must be less than 3% in area. For example,
+ * if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect
+ * ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be
+ * 3264x2448.</li>
+ * </ul>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<android.hardware.camera2.params.StreamConfigurationMap> SCALER_STREAM_CONFIGURATION_MAP =
+ new Key<android.hardware.camera2.params.StreamConfigurationMap>("android.scaler.streamConfigurationMap", android.hardware.camera2.params.StreamConfigurationMap.class);
+
+ /**
+ * <p>The crop type that this camera device supports.</p>
+ * <p>When passing a non-centered crop region ({@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}) to a camera
+ * device that only supports CENTER_ONLY cropping, the camera device will move the
+ * crop region to the center of the sensor active array ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize})
+ * and keep the crop region width and height unchanged. The camera device will return the
+ * final used crop region in metadata result {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}.</p>
+ * <p>Camera devices that support FREEFORM cropping will support any crop region that
+ * is inside of the active array. The camera device will apply the same crop region and
+ * return the final used crop region in capture result metadata {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}.</p>
+ * <p>LEGACY capability devices will only support CENTER_ONLY cropping.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SCALER_CROPPING_TYPE_CENTER_ONLY CENTER_ONLY}</li>
+ * <li>{@link #SCALER_CROPPING_TYPE_FREEFORM FREEFORM}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see #SCALER_CROPPING_TYPE_CENTER_ONLY
+ * @see #SCALER_CROPPING_TYPE_FREEFORM
+ */
+ @PublicKey
+ public static final Key<Integer> SCALER_CROPPING_TYPE =
+ new Key<Integer>("android.scaler.croppingType", int.class);
+
+ /**
+ * <p>The area of the image sensor which corresponds to active pixels after any geometric
+ * distortion correction has been applied.</p>
+ * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
+ * the region that actually receives light from the scene) after any geometric correction
+ * has been applied, and should be treated as the maximum size in pixels of any of the
+ * image output formats aside from the raw formats.</p>
+ * <p>This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+ * the full pixel array, and the size of the full pixel array is given by
+ * {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.</p>
+ * <p>The coordinate system for most other keys that list pixel coordinates, including
+ * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}, is defined relative to the active array rectangle given in
+ * this field, with <code>(0, 0)</code> being the top-left of this rectangle.</p>
+ * <p>The active array may be smaller than the full pixel array, since the full array may
+ * include black calibration pixels or other inactive regions, and geometric correction
+ * resulting in scaling or cropping may have been applied.</p>
+ * <p><b>Units</b>: Pixel coordinates on the image sensor</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.graphics.Rect> SENSOR_INFO_ACTIVE_ARRAY_SIZE =
+ new Key<android.graphics.Rect>("android.sensor.info.activeArraySize", android.graphics.Rect.class);
+
+ /**
+ * <p>Range of sensitivities for {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} supported by this
+ * camera device.</p>
+ * <p>The values are the standard ISO sensitivity values,
+ * as defined in ISO 12232:2006.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Min &lt;= 100, Max &gt;= 800</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<android.util.Range<Integer>> SENSOR_INFO_SENSITIVITY_RANGE =
+ new Key<android.util.Range<Integer>>("android.sensor.info.sensitivityRange", new TypeReference<android.util.Range<Integer>>() {{ }});
+
+ /**
+ * <p>The arrangement of color filters on sensor;
+ * represents the colors in the top-left 2x2 section of
+ * the sensor, in reading order.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB RGGB}</li>
+ * <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG GRBG}</li>
+ * <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG GBRG}</li>
+ * <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR BGGR}</li>
+ * <li>{@link #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB RGB}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB
+ * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG
+ * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG
+ * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR
+ * @see #SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_INFO_COLOR_FILTER_ARRANGEMENT =
+ new Key<Integer>("android.sensor.info.colorFilterArrangement", int.class);
+
+ /**
+ * <p>The range of image exposure times for {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime} supported
+ * by this camera device.</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * The minimum exposure time will be less than 100 us. For FULL
+ * capability devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} == FULL),
+ * the maximum exposure time will be greater than 100ms.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ */
+ @PublicKey
+ public static final Key<android.util.Range<Long>> SENSOR_INFO_EXPOSURE_TIME_RANGE =
+ new Key<android.util.Range<Long>>("android.sensor.info.exposureTimeRange", new TypeReference<android.util.Range<Long>>() {{ }});
+
+ /**
+ * <p>The maximum possible frame duration (minimum frame rate) for
+ * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} that is supported this camera device.</p>
+ * <p>Attempting to use frame durations beyond the maximum will result in the frame
+ * duration being clipped to the maximum. See that control for a full definition of frame
+ * durations.</p>
+ * <p>Refer to {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
+ * for the minimum frame duration values.</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * For FULL capability devices
+ * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} == FULL), at least 100ms.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ */
+ @PublicKey
+ public static final Key<Long> SENSOR_INFO_MAX_FRAME_DURATION =
+ new Key<Long>("android.sensor.info.maxFrameDuration", long.class);
+
+ /**
+ * <p>The physical dimensions of the full pixel
+ * array.</p>
+ * <p>This is the physical size of the sensor pixel
+ * array defined by {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.</p>
+ * <p><b>Units</b>: Millimeters</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.util.SizeF> SENSOR_INFO_PHYSICAL_SIZE =
+ new Key<android.util.SizeF>("android.sensor.info.physicalSize", android.util.SizeF.class);
+
+ /**
+ * <p>Dimensions of the full pixel array, possibly
+ * including black calibration pixels.</p>
+ * <p>The pixel count of the full pixel array of the image sensor, which covers
+ * {@link CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE android.sensor.info.physicalSize} area. This represents the full pixel dimensions of
+ * the raw buffers produced by this sensor.</p>
+ * <p>If a camera device supports raw sensor formats, either this or
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} is the maximum dimensions for the raw
+ * output formats listed in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} (this depends on
+ * whether or not the image sensor returns buffers containing pixels that are not
+ * part of the active array region for blacklevel calibration or other purposes).</p>
+ * <p>Some parts of the full pixel array may not receive light from the scene,
+ * or be otherwise inactive. The {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} key
+ * defines the rectangle of active pixels that will be included in processed image
+ * formats.</p>
+ * <p><b>Units</b>: Pixels</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ * @see CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE
+ * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.util.Size> SENSOR_INFO_PIXEL_ARRAY_SIZE =
+ new Key<android.util.Size>("android.sensor.info.pixelArraySize", android.util.Size.class);
+
+ /**
+ * <p>Maximum raw value output by sensor.</p>
+ * <p>This specifies the fully-saturated encoding level for the raw
+ * sample values from the sensor. This is typically caused by the
+ * sensor becoming highly non-linear or clipping. The minimum for
+ * each channel is specified by the offset in the
+ * {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} key.</p>
+ * <p>The white level is typically determined either by sensor bit depth
+ * (8-14 bits is expected), or by the point where the sensor response
+ * becomes too non-linear to be useful. The default value for this is
+ * maximum representable value for a 16-bit raw sample (2^16 - 1).</p>
+ * <p>The white level values of captured images may vary for different
+ * capture settings (e.g., {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}). This key
+ * represents a coarse approximation for such case. It is recommended
+ * to use {@link CaptureResult#SENSOR_DYNAMIC_WHITE_LEVEL android.sensor.dynamicWhiteLevel} for captures when supported
+ * by the camera device, which provides more accurate white level values.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt; 255 (8-bit output)</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
+ * @see CaptureResult#SENSOR_DYNAMIC_WHITE_LEVEL
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_INFO_WHITE_LEVEL =
+ new Key<Integer>("android.sensor.info.whiteLevel", int.class);
+
+ /**
+ * <p>The time base source for sensor capture start timestamps.</p>
+ * <p>The timestamps provided for captures are always in nanoseconds and monotonic, but
+ * may not based on a time source that can be compared to other system time sources.</p>
+ * <p>This characteristic defines the source for the timestamps, and therefore whether they
+ * can be compared against other system time sources/timestamps.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN UNKNOWN}</li>
+ * <li>{@link #SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME REALTIME}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ * @see #SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN
+ * @see #SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_INFO_TIMESTAMP_SOURCE =
+ new Key<Integer>("android.sensor.info.timestampSource", int.class);
+
+ /**
+ * <p>Whether the RAW images output from this camera device are subject to
+ * lens shading correction.</p>
+ * <p>If TRUE, all images produced by the camera device in the RAW image formats will
+ * have lens shading correction already applied to it. If FALSE, the images will
+ * not be adjusted for lens shading correction.
+ * See {@link CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW android.request.maxNumOutputRaw} for a list of RAW image formats.</p>
+ * <p>This key will be <code>null</code> for all devices do not report this information.
+ * Devices with RAW capability will always report this information in this key.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW
+ */
+ @PublicKey
+ public static final Key<Boolean> SENSOR_INFO_LENS_SHADING_APPLIED =
+ new Key<Boolean>("android.sensor.info.lensShadingApplied", boolean.class);
+
+ /**
+ * <p>The area of the image sensor which corresponds to active pixels prior to the
+ * application of any geometric distortion correction.</p>
+ * <p>This is the rectangle representing the size of the active region of the sensor (i.e.
+ * the region that actually receives light from the scene) before any geometric correction
+ * has been applied, and should be treated as the active region rectangle for any of the
+ * raw formats. All metadata associated with raw processing (e.g. the lens shading
+ * correction map, and radial distortion fields) treats the top, left of this rectangle as
+ * the origin, (0,0).</p>
+ * <p>The size of this region determines the maximum field of view and the maximum number of
+ * pixels that an image from this sensor can contain, prior to the application of
+ * geometric distortion correction. The effective maximum pixel dimensions of a
+ * post-distortion-corrected image is given by the {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}
+ * field, and the effective maximum field of view for a post-distortion-corrected image
+ * can be calculated by applying the geometric distortion correction fields to this
+ * rectangle, and cropping to the rectangle given in {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
+ * <p>E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the
+ * dimensions in {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} given the position of a pixel,
+ * (x', y'), in the raw pixel array with dimensions give in
+ * {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}:</p>
+ * <ol>
+ * <li>Choose a pixel (x', y') within the active array region of the raw buffer given in
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize}, otherwise this pixel is considered
+ * to be outside of the FOV, and will not be shown in the processed output image.</li>
+ * <li>Apply geometric distortion correction to get the post-distortion pixel coordinate,
+ * (x_i, y_i). When applying geometric correction metadata, note that metadata for raw
+ * buffers is defined relative to the top, left of the
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} rectangle.</li>
+ * <li>If the resulting corrected pixel coordinate is within the region given in
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, then the position of this pixel in the
+ * processed output image buffer is <code>(x_i - activeArray.left, y_i - activeArray.top)</code>,
+ * when the top, left coordinate of that buffer is treated as (0, 0).</li>
+ * </ol>
+ * <p>Thus, for pixel x',y' = (25, 25) on a sensor where {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}
+ * is (100,100), {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} is (10, 10, 100, 100),
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} is (20, 20, 80, 80), and the geometric distortion
+ * correction doesn't change the pixel coordinate, the resulting pixel selected in
+ * pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer
+ * with dimensions given in {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}, and would be (5, 5)
+ * relative to the top,left of post-processed YUV output buffer with dimensions given in
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
+ * <p>The currently supported fields that correct for geometric distortion are:</p>
+ * <ol>
+ * <li>{@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion}.</li>
+ * </ol>
+ * <p>If all of the geometric distortion fields are no-ops, this rectangle will be the same
+ * as the post-distortion-corrected rectangle given in
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
+ * <p>This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
+ * the full pixel array, and the size of the full pixel array is given by
+ * {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.</p>
+ * <p>The pre-correction active array may be smaller than the full pixel array, since the
+ * full array may include black calibration pixels or other inactive regions.</p>
+ * <p><b>Units</b>: Pixel coordinates on the image sensor</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
+ * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.graphics.Rect> SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE =
+ new Key<android.graphics.Rect>("android.sensor.info.preCorrectionActiveArraySize", android.graphics.Rect.class);
+
+ /**
+ * <p>The standard reference illuminant used as the scene light source when
+ * calculating the {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM1 android.sensor.colorTransform1},
+ * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM1 android.sensor.calibrationTransform1}, and
+ * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX1 android.sensor.forwardMatrix1} matrices.</p>
+ * <p>The values in this key correspond to the values defined for the
+ * EXIF LightSource tag. These illuminants are standard light sources
+ * that are often used calibrating camera devices.</p>
+ * <p>If this key is present, then {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM1 android.sensor.colorTransform1},
+ * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM1 android.sensor.calibrationTransform1}, and
+ * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX1 android.sensor.forwardMatrix1} will also be present.</p>
+ * <p>Some devices may choose to provide a second set of calibration
+ * information for improved quality, including
+ * {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2} and its corresponding matrices.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT DAYLIGHT}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT FLUORESCENT}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN TUNGSTEN}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_FLASH FLASH}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER FINE_WEATHER}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER CLOUDY_WEATHER}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_SHADE SHADE}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT DAYLIGHT_FLUORESCENT}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT DAY_WHITE_FLUORESCENT}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT COOL_WHITE_FLUORESCENT}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT WHITE_FLUORESCENT}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A STANDARD_A}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B STANDARD_B}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C STANDARD_C}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_D55 D55}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_D65 D65}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_D75 D75}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_D50 D50}</li>
+ * <li>{@link #SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN ISO_STUDIO_TUNGSTEN}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM1
+ * @see CameraCharacteristics#SENSOR_COLOR_TRANSFORM1
+ * @see CameraCharacteristics#SENSOR_FORWARD_MATRIX1
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_FLASH
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_SHADE
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_D55
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_D65
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_D75
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_D50
+ * @see #SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_REFERENCE_ILLUMINANT1 =
+ new Key<Integer>("android.sensor.referenceIlluminant1", int.class);
+
+ /**
+ * <p>The standard reference illuminant used as the scene light source when
+ * calculating the {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 android.sensor.colorTransform2},
+ * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 android.sensor.calibrationTransform2}, and
+ * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX2 android.sensor.forwardMatrix2} matrices.</p>
+ * <p>See {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1} for more details.</p>
+ * <p>If this key is present, then {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 android.sensor.colorTransform2},
+ * {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 android.sensor.calibrationTransform2}, and
+ * {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX2 android.sensor.forwardMatrix2} will also be present.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2
+ * @see CameraCharacteristics#SENSOR_COLOR_TRANSFORM2
+ * @see CameraCharacteristics#SENSOR_FORWARD_MATRIX2
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ @PublicKey
+ public static final Key<Byte> SENSOR_REFERENCE_ILLUMINANT2 =
+ new Key<Byte>("android.sensor.referenceIlluminant2", byte.class);
+
+ /**
+ * <p>A per-device calibration transform matrix that maps from the
+ * reference sensor colorspace to the actual device sensor colorspace.</p>
+ * <p>This matrix is used to correct for per-device variations in the
+ * sensor colorspace, and is used for processing raw buffer data.</p>
+ * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a per-device calibration transform that maps colors
+ * from reference sensor color space (i.e. the "golden module"
+ * colorspace) into this camera device's native sensor color
+ * space under the first reference illuminant
+ * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}).</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_CALIBRATION_TRANSFORM1 =
+ new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.calibrationTransform1", android.hardware.camera2.params.ColorSpaceTransform.class);
+
+ /**
+ * <p>A per-device calibration transform matrix that maps from the
+ * reference sensor colorspace to the actual device sensor colorspace
+ * (this is the colorspace of the raw buffer data).</p>
+ * <p>This matrix is used to correct for per-device variations in the
+ * sensor colorspace, and is used for processing raw buffer data.</p>
+ * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a per-device calibration transform that maps colors
+ * from reference sensor color space (i.e. the "golden module"
+ * colorspace) into this camera device's native sensor color
+ * space under the second reference illuminant
+ * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2}).</p>
+ * <p>This matrix will only be present if the second reference
+ * illuminant is present.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_CALIBRATION_TRANSFORM2 =
+ new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.calibrationTransform2", android.hardware.camera2.params.ColorSpaceTransform.class);
+
+ /**
+ * <p>A matrix that transforms color values from CIE XYZ color space to
+ * reference sensor color space.</p>
+ * <p>This matrix is used to convert from the standard CIE XYZ color
+ * space to the reference sensor colorspace, and is used when processing
+ * raw buffer data.</p>
+ * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a color transform matrix that maps colors from the CIE
+ * XYZ color space to the reference sensor color space (i.e. the
+ * "golden module" colorspace) under the first reference illuminant
+ * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}).</p>
+ * <p>The white points chosen in both the reference sensor color space
+ * and the CIE XYZ colorspace when calculating this transform will
+ * match the standard white point for the first reference illuminant
+ * (i.e. no chromatic adaptation will be applied by this transform).</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_COLOR_TRANSFORM1 =
+ new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.colorTransform1", android.hardware.camera2.params.ColorSpaceTransform.class);
+
+ /**
+ * <p>A matrix that transforms color values from CIE XYZ color space to
+ * reference sensor color space.</p>
+ * <p>This matrix is used to convert from the standard CIE XYZ color
+ * space to the reference sensor colorspace, and is used when processing
+ * raw buffer data.</p>
+ * <p>The matrix is expressed as a 3x3 matrix in row-major-order, and
+ * contains a color transform matrix that maps colors from the CIE
+ * XYZ color space to the reference sensor color space (i.e. the
+ * "golden module" colorspace) under the second reference illuminant
+ * ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2}).</p>
+ * <p>The white points chosen in both the reference sensor color space
+ * and the CIE XYZ colorspace when calculating this transform will
+ * match the standard white point for the second reference illuminant
+ * (i.e. no chromatic adaptation will be applied by this transform).</p>
+ * <p>This matrix will only be present if the second reference
+ * illuminant is present.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_COLOR_TRANSFORM2 =
+ new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.colorTransform2", android.hardware.camera2.params.ColorSpaceTransform.class);
+
+ /**
+ * <p>A matrix that transforms white balanced camera colors from the reference
+ * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
+ * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
+ * is used when processing raw buffer data.</p>
+ * <p>This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+ * a color transform matrix that maps white balanced colors from the
+ * reference sensor color space to the CIE XYZ color space with a D50 white
+ * point.</p>
+ * <p>Under the first reference illuminant ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1})
+ * this matrix is chosen so that the standard white point for this reference
+ * illuminant in the reference sensor colorspace is mapped to D50 in the
+ * CIE XYZ colorspace.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_FORWARD_MATRIX1 =
+ new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.forwardMatrix1", android.hardware.camera2.params.ColorSpaceTransform.class);
+
+ /**
+ * <p>A matrix that transforms white balanced camera colors from the reference
+ * sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
+ * <p>This matrix is used to convert to the standard CIE XYZ colorspace, and
+ * is used when processing raw buffer data.</p>
+ * <p>This matrix is expressed as a 3x3 matrix in row-major-order, and contains
+ * a color transform matrix that maps white balanced colors from the
+ * reference sensor color space to the CIE XYZ color space with a D50 white
+ * point.</p>
+ * <p>Under the second reference illuminant ({@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2})
+ * this matrix is chosen so that the standard white point for this reference
+ * illuminant in the reference sensor colorspace is mapped to D50 in the
+ * CIE XYZ colorspace.</p>
+ * <p>This matrix will only be present if the second reference
+ * illuminant is present.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.ColorSpaceTransform> SENSOR_FORWARD_MATRIX2 =
+ new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.sensor.forwardMatrix2", android.hardware.camera2.params.ColorSpaceTransform.class);
+
+ /**
+ * <p>A fixed black level offset for each of the color filter arrangement
+ * (CFA) mosaic channels.</p>
+ * <p>This key specifies the zero light value for each of the CFA mosaic
+ * channels in the camera sensor. The maximal value output by the
+ * sensor is represented by the value in {@link CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL android.sensor.info.whiteLevel}.</p>
+ * <p>The values are given in the same order as channels listed for the CFA
+ * layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
+ * nth value given corresponds to the black level offset for the nth
+ * color channel listed in the CFA.</p>
+ * <p>The black level values of captured images may vary for different
+ * capture settings (e.g., {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}). This key
+ * represents a coarse approximation for such case. It is recommended to
+ * use {@link CaptureResult#SENSOR_DYNAMIC_BLACK_LEVEL android.sensor.dynamicBlackLevel} or use pixels from
+ * {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} directly for captures when
+ * supported by the camera device, which provides more accurate black
+ * level values. For raw capture in particular, it is recommended to use
+ * pixels from {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} to calculate black
+ * level values for each frame.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 0 for each.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureResult#SENSOR_DYNAMIC_BLACK_LEVEL
+ * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ * @see CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL
+ * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.BlackLevelPattern> SENSOR_BLACK_LEVEL_PATTERN =
+ new Key<android.hardware.camera2.params.BlackLevelPattern>("android.sensor.blackLevelPattern", android.hardware.camera2.params.BlackLevelPattern.class);
+
+ /**
+ * <p>Maximum sensitivity that is implemented
+ * purely through analog gain.</p>
+ * <p>For {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} values less than or
+ * equal to this, all applied gain must be analog. For
+ * values above this, the gain applied can be a mix of analog and
+ * digital.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_MAX_ANALOG_SENSITIVITY =
+ new Key<Integer>("android.sensor.maxAnalogSensitivity", int.class);
+
+ /**
+ * <p>Clockwise angle through which the output image needs to be rotated to be
+ * upright on the device screen in its native orientation.</p>
+ * <p>Also defines the direction of rolling shutter readout, which is from top to bottom in
+ * the sensor's coordinate system.</p>
+ * <p><b>Units</b>: Degrees of clockwise rotation; always a multiple of
+ * 90</p>
+ * <p><b>Range of valid values:</b><br>
+ * 0, 90, 180, 270</p>
+ * <p>This key is available on all devices.</p>
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_ORIENTATION =
+ new Key<Integer>("android.sensor.orientation", int.class);
+
+ /**
+ * <p>List of sensor test pattern modes for {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode}
+ * supported by this camera device.</p>
+ * <p>Defaults to OFF, and always includes OFF if defined.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> SENSOR_AVAILABLE_TEST_PATTERN_MODES =
+ new Key<int[]>("android.sensor.availableTestPatternModes", int[].class);
+
+ /**
+ * <p>List of disjoint rectangles indicating the sensor
+ * optically shielded black pixel regions.</p>
+ * <p>In most camera sensors, the active array is surrounded by some
+ * optically shielded pixel areas. By blocking light, these pixels
+ * provides a reliable black reference for black level compensation
+ * in active array region.</p>
+ * <p>This key provides a list of disjoint rectangles specifying the
+ * regions of optically shielded (with metal shield) black pixel
+ * regions if the camera device is capable of reading out these black
+ * pixels in the output raw images. In comparison to the fixed black
+ * level values reported by {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern}, this key
+ * may provide a more accurate way for the application to calculate
+ * black level of each captured raw images.</p>
+ * <p>When this key is reported, the {@link CaptureResult#SENSOR_DYNAMIC_BLACK_LEVEL android.sensor.dynamicBlackLevel} and
+ * {@link CaptureResult#SENSOR_DYNAMIC_WHITE_LEVEL android.sensor.dynamicWhiteLevel} will also be reported.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
+ * @see CaptureResult#SENSOR_DYNAMIC_BLACK_LEVEL
+ * @see CaptureResult#SENSOR_DYNAMIC_WHITE_LEVEL
+ */
+ @PublicKey
+ public static final Key<android.graphics.Rect[]> SENSOR_OPTICAL_BLACK_REGIONS =
+ new Key<android.graphics.Rect[]>("android.sensor.opticalBlackRegions", android.graphics.Rect[].class);
+
+ /**
+ * <p>List of lens shading modes for {@link CaptureRequest#SHADING_MODE android.shading.mode} that are supported by this camera device.</p>
+ * <p>This list contains lens shading modes that can be set for the camera device.
+ * Camera devices that support the MANUAL_POST_PROCESSING capability will always
+ * list OFF and FAST mode. This includes all FULL level devices.
+ * LEGACY devices will always only support FAST mode.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#SHADING_MODE android.shading.mode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#SHADING_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> SHADING_AVAILABLE_MODES =
+ new Key<int[]>("android.shading.availableModes", int[].class);
+
+ /**
+ * <p>List of face detection modes for {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} that are
+ * supported by this camera device.</p>
+ * <p>OFF is always supported.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES =
+ new Key<int[]>("android.statistics.info.availableFaceDetectModes", int[].class);
+
+ /**
+ * <p>The maximum number of simultaneously detectable
+ * faces.</p>
+ * <p><b>Range of valid values:</b><br>
+ * 0 for cameras without available face detection; otherwise:
+ * <code>&gt;=4</code> for LIMITED or FULL hwlevel devices or
+ * <code>&gt;0</code> for LEGACY devices.</p>
+ * <p>This key is available on all devices.</p>
+ */
+ @PublicKey
+ public static final Key<Integer> STATISTICS_INFO_MAX_FACE_COUNT =
+ new Key<Integer>("android.statistics.info.maxFaceCount", int.class);
+
+ /**
+ * <p>List of hot pixel map output modes for {@link CaptureRequest#STATISTICS_HOT_PIXEL_MAP_MODE android.statistics.hotPixelMapMode} that are
+ * supported by this camera device.</p>
+ * <p>If no hotpixel map output is available for this camera device, this will contain only
+ * <code>false</code>.</p>
+ * <p>ON is always supported on devices with the RAW capability.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#STATISTICS_HOT_PIXEL_MAP_MODE android.statistics.hotPixelMapMode}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#STATISTICS_HOT_PIXEL_MAP_MODE
+ */
+ @PublicKey
+ public static final Key<boolean[]> STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES =
+ new Key<boolean[]>("android.statistics.info.availableHotPixelMapModes", boolean[].class);
+
+ /**
+ * <p>List of lens shading map output modes for {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} that
+ * are supported by this camera device.</p>
+ * <p>If no lens shading map output is available for this camera device, this key will
+ * contain only OFF.</p>
+ * <p>ON is always supported on devices with the RAW capability.
+ * LEGACY mode devices will always only support OFF.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES =
+ new Key<int[]>("android.statistics.info.availableLensShadingMapModes", int[].class);
+
+ /**
+ * <p>Maximum number of supported points in the
+ * tonemap curve that can be used for {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.</p>
+ * <p>If the actual number of points provided by the application (in {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}*) is
+ * less than this maximum, the camera device will resample the curve to its internal
+ * representation, using linear interpolation.</p>
+ * <p>The output curves in the result metadata may have a different number
+ * of points than the input curves, and will represent the actual
+ * hardware curves used as closely as possible when linearly interpolated.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#TONEMAP_CURVE
+ */
+ @PublicKey
+ public static final Key<Integer> TONEMAP_MAX_CURVE_POINTS =
+ new Key<Integer>("android.tonemap.maxCurvePoints", int.class);
+
+ /**
+ * <p>List of tonemapping modes for {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} that are supported by this camera
+ * device.</p>
+ * <p>Camera devices that support the MANUAL_POST_PROCESSING capability will always contain
+ * at least one of below mode combinations:</p>
+ * <ul>
+ * <li>CONTRAST_CURVE, FAST and HIGH_QUALITY</li>
+ * <li>GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY</li>
+ * </ul>
+ * <p>This includes all FULL level devices.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any value listed in {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> TONEMAP_AVAILABLE_TONE_MAP_MODES =
+ new Key<int[]>("android.tonemap.availableToneMapModes", int[].class);
+
+ /**
+ * <p>A list of camera LEDs that are available on this system.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #LED_AVAILABLE_LEDS_TRANSMIT TRANSMIT}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @see #LED_AVAILABLE_LEDS_TRANSMIT
+ * @hide
+ */
+ public static final Key<int[]> LED_AVAILABLE_LEDS =
+ new Key<int[]>("android.led.availableLeds", int[].class);
+
+ /**
+ * <p>Generally classifies the overall set of the camera device functionality.</p>
+ * <p>The supported hardware level is a high-level description of the camera device's
+ * capabilities, summarizing several capabilities into one field. Each level adds additional
+ * features to the previous one, and is always a strict superset of the previous level.
+ * The ordering is <code>LEGACY &lt; LIMITED &lt; FULL &lt; LEVEL_3</code>.</p>
+ * <p>Starting from <code>LEVEL_3</code>, the level enumerations are guaranteed to be in increasing
+ * numerical value as well. To check if a given device is at least at a given hardware level,
+ * the following code snippet can be used:</p>
+ * <pre><code>// Returns true if the device supports the required hardware level, or better.
+ * boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
+ * int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ * if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
+ * return requiredLevel == deviceLevel;
+ * }
+ * // deviceLevel is not LEGACY, can use numerical sort
+ * return requiredLevel &lt;= deviceLevel;
+ * }
+ * </code></pre>
+ * <p>At a high level, the levels are:</p>
+ * <ul>
+ * <li><code>LEGACY</code> devices operate in a backwards-compatibility mode for older
+ * Android devices, and have very limited capabilities.</li>
+ * <li><code>LIMITED</code> devices represent the
+ * baseline feature set, and may also include additional capabilities that are
+ * subsets of <code>FULL</code>.</li>
+ * <li><code>FULL</code> devices additionally support per-frame manual control of sensor, flash, lens and
+ * post-processing settings, and image capture at a high rate.</li>
+ * <li><code>LEVEL_3</code> devices additionally support YUV reprocessing and RAW image capture, along
+ * with additional output stream configurations.</li>
+ * </ul>
+ * <p>See the individual level enums for full descriptions of the supported capabilities. The
+ * {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} entry describes the device's capabilities at a
+ * finer-grain level, if needed. In addition, many controls have their available settings or
+ * ranges defined in individual {@link android.hardware.camera2.CameraCharacteristics } entries.</p>
+ * <p>Some features are not part of any particular hardware level or capability and must be
+ * queried separately. These include:</p>
+ * <ul>
+ * <li>Calibrated timestamps ({@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> REALTIME)</li>
+ * <li>Precision lens control ({@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} <code>==</code> CALIBRATED)</li>
+ * <li>Face detection ({@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes})</li>
+ * <li>Optical or electrical image stabilization
+ * ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization},
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES android.control.availableVideoStabilizationModes})</li>
+ * </ul>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED}</li>
+ * <li>{@link #INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL}</li>
+ * <li>{@link #INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}</li>
+ * <li>{@link #INFO_SUPPORTED_HARDWARE_LEVEL_3 3}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
+ * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
+ * @see #INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED
+ * @see #INFO_SUPPORTED_HARDWARE_LEVEL_FULL
+ * @see #INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY
+ * @see #INFO_SUPPORTED_HARDWARE_LEVEL_3
+ */
+ @PublicKey
+ public static final Key<Integer> INFO_SUPPORTED_HARDWARE_LEVEL =
+ new Key<Integer>("android.info.supportedHardwareLevel", int.class);
+
+ /**
+ * <p>The maximum number of frames that can occur after a request
+ * (different than the previous) has been submitted, and before the
+ * result's state becomes synchronized.</p>
+ * <p>This defines the maximum distance (in number of metadata results),
+ * between the frame number of the request that has new controls to apply
+ * and the frame number of the result that has all the controls applied.</p>
+ * <p>In other words this acts as an upper boundary for how many frames
+ * must occur before the camera device knows for a fact that the new
+ * submitted camera settings have been applied in outgoing frames.</p>
+ * <p><b>Units</b>: Frame counts</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SYNC_MAX_LATENCY_PER_FRAME_CONTROL PER_FRAME_CONTROL}</li>
+ * <li>{@link #SYNC_MAX_LATENCY_UNKNOWN UNKNOWN}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * A positive value, PER_FRAME_CONTROL, or UNKNOWN.</p>
+ * <p>This key is available on all devices.</p>
+ * @see #SYNC_MAX_LATENCY_PER_FRAME_CONTROL
+ * @see #SYNC_MAX_LATENCY_UNKNOWN
+ */
+ @PublicKey
+ public static final Key<Integer> SYNC_MAX_LATENCY =
+ new Key<Integer>("android.sync.maxLatency", int.class);
+
+ /**
+ * <p>The maximal camera capture pipeline stall (in unit of frame count) introduced by a
+ * reprocess capture request.</p>
+ * <p>The key describes the maximal interference that one reprocess (input) request
+ * can introduce to the camera simultaneous streaming of regular (output) capture
+ * requests, including repeating requests.</p>
+ * <p>When a reprocessing capture request is submitted while a camera output repeating request
+ * (e.g. preview) is being served by the camera device, it may preempt the camera capture
+ * pipeline for at least one frame duration so that the camera device is unable to process
+ * the following capture request in time for the next sensor start of exposure boundary.
+ * When this happens, the application may observe a capture time gap (longer than one frame
+ * duration) between adjacent capture output frames, which usually exhibits as preview
+ * glitch if the repeating request output targets include a preview surface. This key gives
+ * the worst-case number of frame stall introduced by one reprocess request with any kind of
+ * formats/sizes combination.</p>
+ * <p>If this key reports 0, it means a reprocess request doesn't introduce any glitch to the
+ * ongoing camera repeating request outputs, as if this reprocess request is never issued.</p>
+ * <p>This key is supported if the camera device supports PRIVATE or YUV reprocessing (
+ * i.e. {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains PRIVATE_REPROCESSING or
+ * YUV_REPROCESSING).</p>
+ * <p><b>Units</b>: Number of frames.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &lt;= 4</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ @PublicKey
+ public static final Key<Integer> REPROCESS_MAX_CAPTURE_STALL =
+ new Key<Integer>("android.reprocess.maxCaptureStall", int.class);
+
+ /**
+ * <p>The available depth dataspace stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ * <p>These are output stream configurations for use with
+ * dataSpace HAL_DATASPACE_DEPTH. The configurations are
+ * listed as <code>(format, width, height, input?)</code> tuples.</p>
+ * <p>Only devices that support depth output for at least
+ * the HAL_PIXEL_FORMAT_Y16 dense depth map may include
+ * this entry.</p>
+ * <p>A device that also supports the HAL_PIXEL_FORMAT_BLOB
+ * sparse depth point cloud must report a single entry for
+ * the format in this list as <code>(HAL_PIXEL_FORMAT_BLOB,
+ * android.depth.maxDepthSamples, 1, OUTPUT)</code> in addition to
+ * the entries for HAL_PIXEL_FORMAT_Y16.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfiguration[]> DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfiguration[]>("android.depth.availableDepthStreamConfigurations", android.hardware.camera2.params.StreamConfiguration[].class);
+
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for depth output formats.</p>
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>The minimum frame duration of a stream (of a particular format, size)
+ * is the same regardless of whether the stream is input or output.</p>
+ * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} and
+ * android.scaler.availableStallDurations for more details about
+ * calculating the max frame rate.</p>
+ * <p>(Keep in sync with {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration })</p>
+ * <p><b>Units</b>: (format, width, height, ns) x n</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.depth.availableDepthMinFrameDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
+
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for depth streams.</p>
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>This functions similarly to
+ * android.scaler.availableStallDurations for depth
+ * streams.</p>
+ * <p>All depth output stream formats may have a nonzero stall
+ * duration.</p>
+ * <p><b>Units</b>: (format, width, height, ns) x n</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @hide
+ */
+ public static final Key<android.hardware.camera2.params.StreamConfigurationDuration[]> DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS =
+ new Key<android.hardware.camera2.params.StreamConfigurationDuration[]>("android.depth.availableDepthStallDurations", android.hardware.camera2.params.StreamConfigurationDuration[].class);
+
+ /**
+ * <p>Indicates whether a capture request may target both a
+ * DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
+ * YUV_420_888, JPEG, or RAW) simultaneously.</p>
+ * <p>If TRUE, including both depth and color outputs in a single
+ * capture request is not supported. An application must interleave color
+ * and depth requests. If FALSE, a single request can target both types
+ * of output.</p>
+ * <p>Typically, this restriction exists on camera devices that
+ * need to emit a specific pattern or wavelength of light to
+ * measure depth values, which causes the color image to be
+ * corrupted during depth measurement.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<Boolean> DEPTH_DEPTH_IS_EXCLUSIVE =
+ new Key<Boolean>("android.depth.depthIsExclusive", boolean.class);
+
+ /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+ * End generated code
+ *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+
+
+}
diff --git a/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.java b/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.java
new file mode 100644
index 00000000..07d2443f
--- /dev/null
+++ b/android/hardware/camera2/CameraConstrainedHighSpeedCaptureSession.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.hardware.camera2.params.StreamConfigurationMap;
+
+import java.util.List;
+
+/**
+ * A constrained high speed capture session for a {@link CameraDevice}, used for capturing high
+ * speed images from the {@link CameraDevice} for high speed video recording use case.
+ * <p>
+ * A CameraHighSpeedCaptureSession is created by providing a set of target output surfaces to
+ * {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, Once created, the session is
+ * active until a new session is created by the camera device, or the camera device is closed.
+ * </p>
+ * <p>
+ * An active high speed capture session is a specialized capture session that is only targeted at
+ * high speed video recording (>=120fps) use case if the camera device supports high speed video
+ * capability (i.e., {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES} contains
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO}). It only
+ * accepts request lists created via {@link #createHighSpeedRequestList}, and the request list can
+ * only be submitted to this session via {@link CameraCaptureSession#captureBurst captureBurst}, or
+ * {@link CameraCaptureSession#setRepeatingBurst setRepeatingBurst}. See
+ * {@link CameraDevice#createConstrainedHighSpeedCaptureSession} for more details of the
+ * limitations.
+ * </p>
+ * <p>
+ * Creating a session is an expensive operation and can take several hundred milliseconds, since it
+ * requires configuring the camera device's internal pipelines and allocating memory buffers for
+ * sending images to the desired targets. Therefore the setup is done asynchronously, and
+ * {@link CameraDevice#createConstrainedHighSpeedCaptureSession} will send the ready-to-use
+ * CameraCaptureSession to the provided listener's
+ * {@link CameraCaptureSession.StateCallback#onConfigured} callback. If configuration cannot be
+ * completed, then the {@link CameraCaptureSession.StateCallback#onConfigureFailed} is called, and
+ * the session will not become active.
+ * </p>
+ * <!--
+ * <p>
+ * Any capture requests (repeating or non-repeating) submitted before the session is ready will be
+ * queued up and will begin capture once the session becomes ready. In case the session cannot be
+ * configured and {@link CameraCaptureSession.StateCallback#onConfigureFailed onConfigureFailed} is
+ * called, all queued capture requests are discarded. </p>
+ * -->
+ * <p>
+ * If a new session is created by the camera device, then the previous session is closed, and its
+ * associated {@link CameraCaptureSession.StateCallback#onClosed onClosed} callback will be
+ * invoked. All of the session methods will throw an IllegalStateException if called once the
+ * session is closed.
+ * </p>
+ * <p>
+ * A closed session clears any repeating requests (as if {@link #stopRepeating} had been called),
+ * but will still complete all of its in-progress capture requests as normal, before a newly created
+ * session takes over and reconfigures the camera device.
+ * </p>
+ */
+public abstract class CameraConstrainedHighSpeedCaptureSession extends CameraCaptureSession {
+
+ /**
+ * <p>Create a unmodifiable list of requests that is suitable for constrained high speed capture
+ * session streaming.</p>
+ *
+ * <p>High speed video streaming creates significant performance pressure on the camera device,
+ * so to achieve efficient high speed streaming, the camera device may have to aggregate
+ * multiple frames together. This means requests must be sent in batched groups, with all
+ * requests sharing the same settings. This method takes the list of output target
+ * Surfaces (subject to the output Surface requirements specified by the constrained high speed
+ * session) and a {@link CaptureRequest request}, and generates a request list that has the same
+ * controls for each request. The input {@link CaptureRequest request} must contain the target
+ * output Surfaces and target high speed FPS range that is one of the
+ * {@link StreamConfigurationMap#getHighSpeedVideoFpsRangesFor} for the Surface size.</p>
+ *
+ * <p>If both preview and recording Surfaces are specified in the {@code request}, the
+ * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE target FPS range} in the input
+ * {@link CaptureRequest request} must be a fixed frame rate FPS range, where the
+ * {@link android.util.Range#getLower minimal FPS} ==
+ * {@link android.util.Range#getUpper() maximum FPS}. The created request list will contain
+ * a interleaved request pattern such that the preview output FPS is at least 30fps, the
+ * recording output FPS is {@link android.util.Range#getUpper() maximum FPS} of the requested
+ * FPS range. The application can submit this request list directly to an active high speed
+ * capture session to achieve high speed video recording. When only preview or recording
+ * Surface is specified, this method will return a list of request that have the same controls
+ * and output targets for all requests.</p>
+ *
+ * <p>Submitting a request list created by this method to a normal capture session will result
+ * in an {@link IllegalArgumentException} if the high speed
+ * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} is not supported by
+ * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES}.</p>
+ *
+ * @param request The high speed capture request that will be used to generate the high speed
+ * request list.
+ * @return A unmodifiable CaptureRequest list that is suitable for constrained high speed
+ * capture.
+ *
+ * @throws IllegalArgumentException if the set of output Surfaces in the request do not meet the
+ * high speed video capability requirements, or the camera
+ * device doesn't support high speed video capability, or the
+ * request doesn't meet the high speed video capability
+ * requirements, or the request doesn't contain the required
+ * controls for high speed capture.
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if the camera device has been closed
+ *
+ * @see CameraDevice#createConstrainedHighSpeedCaptureSession
+ * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE
+ * @see android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes
+ * @see android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRangesFor
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
+ */
+ @NonNull
+ public abstract List<CaptureRequest> createHighSpeedRequestList(
+ @NonNull CaptureRequest request) throws CameraAccessException;
+
+}
diff --git a/android/hardware/camera2/CameraDevice.java b/android/hardware/camera2/CameraDevice.java
new file mode 100644
index 00000000..55343a29
--- /dev/null
+++ b/android/hardware/camera2/CameraDevice.java
@@ -0,0 +1,1081 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.IntDef;
+import android.annotation.SystemApi;
+import android.annotation.TestApi;
+import static android.hardware.camera2.ICameraDeviceUser.NORMAL_MODE;
+import static android.hardware.camera2.ICameraDeviceUser.CONSTRAINED_HIGH_SPEED_MODE;
+import android.hardware.camera2.params.InputConfiguration;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.params.OutputConfiguration;
+import android.os.Handler;
+import android.view.Surface;
+
+import java.util.List;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * <p>The CameraDevice class is a representation of a single camera connected to an
+ * Android device, allowing for fine-grain control of image capture and
+ * post-processing at high frame rates.</p>
+ *
+ * <p>Your application must declare the
+ * {@link android.Manifest.permission#CAMERA Camera} permission in its manifest
+ * in order to access camera devices.</p>
+ *
+ * <p>A given camera device may provide support at one of two levels: limited or
+ * full. If a device only supports the limited level, then Camera2 exposes a
+ * feature set that is roughly equivalent to the older
+ * {@link android.hardware.Camera Camera} API, although with a cleaner and more
+ * efficient interface. Devices that implement the full level of support
+ * provide substantially improved capabilities over the older camera
+ * API. Applications that target the limited level devices will run unchanged on
+ * the full-level devices; if your application requires a full-level device for
+ * proper operation, declare the "android.hardware.camera.level.full" feature in your
+ * manifest.</p>
+ *
+ * @see CameraManager#openCamera
+ * @see android.Manifest.permission#CAMERA
+ */
+public abstract class CameraDevice implements AutoCloseable {
+
+ /**
+ * Create a request suitable for a camera preview window. Specifically, this
+ * means that high frame rate is given priority over the highest-quality
+ * post-processing. These requests would normally be used with the
+ * {@link CameraCaptureSession#setRepeatingRequest} method.
+ * This template is guaranteed to be supported on all camera devices.
+ *
+ * @see #createCaptureRequest
+ */
+ public static final int TEMPLATE_PREVIEW = 1;
+
+ /**
+ * Create a request suitable for still image capture. Specifically, this
+ * means prioritizing image quality over frame rate. These requests would
+ * commonly be used with the {@link CameraCaptureSession#capture} method.
+ * This template is guaranteed to be supported on all camera devices except
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT DEPTH_OUTPUT} devices
+ * that are not {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE
+ * BACKWARD_COMPATIBLE}.
+ * @see #createCaptureRequest
+ */
+ public static final int TEMPLATE_STILL_CAPTURE = 2;
+
+ /**
+ * Create a request suitable for video recording. Specifically, this means
+ * that a stable frame rate is used, and post-processing is set for
+ * recording quality. These requests would commonly be used with the
+ * {@link CameraCaptureSession#setRepeatingRequest} method.
+ * This template is guaranteed to be supported on all camera devices except
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT DEPTH_OUTPUT} devices
+ * that are not {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE
+ * BACKWARD_COMPATIBLE}.
+ *
+ * @see #createCaptureRequest
+ */
+ public static final int TEMPLATE_RECORD = 3;
+
+ /**
+ * Create a request suitable for still image capture while recording
+ * video. Specifically, this means maximizing image quality without
+ * disrupting the ongoing recording. These requests would commonly be used
+ * with the {@link CameraCaptureSession#capture} method while a request based on
+ * {@link #TEMPLATE_RECORD} is is in use with {@link CameraCaptureSession#setRepeatingRequest}.
+ * This template is guaranteed to be supported on all camera devices except
+ * legacy devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
+ * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}) and
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT DEPTH_OUTPUT} devices
+ * that are not {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE
+ * BACKWARD_COMPATIBLE}.
+ *
+ * @see #createCaptureRequest
+ */
+ public static final int TEMPLATE_VIDEO_SNAPSHOT = 4;
+
+ /**
+ * Create a request suitable for zero shutter lag still capture. This means
+ * means maximizing image quality without compromising preview frame rate.
+ * AE/AWB/AF should be on auto mode. This is intended for application-operated ZSL. For
+ * device-operated ZSL, use {@link CaptureRequest#CONTROL_ENABLE_ZSL} if available.
+ * This template is guaranteed to be supported on camera devices that support the
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING PRIVATE_REPROCESSING}
+ * capability or the
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING YUV_REPROCESSING}
+ * capability.
+ *
+ * @see #createCaptureRequest
+ * @see CaptureRequest#CONTROL_ENABLE_ZSL
+ */
+ public static final int TEMPLATE_ZERO_SHUTTER_LAG = 5;
+
+ /**
+ * A basic template for direct application control of capture
+ * parameters. All automatic control is disabled (auto-exposure, auto-white
+ * balance, auto-focus), and post-processing parameters are set to preview
+ * quality. The manual capture parameters (exposure, sensitivity, and so on)
+ * are set to reasonable defaults, but should be overriden by the
+ * application depending on the intended use case.
+ * This template is guaranteed to be supported on camera devices that support the
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR MANUAL_SENSOR}
+ * capability.
+ *
+ * @see #createCaptureRequest
+ */
+ public static final int TEMPLATE_MANUAL = 6;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(prefix = {"TEMPLATE_"}, value =
+ {TEMPLATE_PREVIEW,
+ TEMPLATE_STILL_CAPTURE,
+ TEMPLATE_RECORD,
+ TEMPLATE_VIDEO_SNAPSHOT,
+ TEMPLATE_ZERO_SHUTTER_LAG,
+ TEMPLATE_MANUAL })
+ public @interface RequestTemplate {};
+
+ /**
+ * Get the ID of this camera device.
+ *
+ * <p>This matches the ID given to {@link CameraManager#openCamera} to instantiate this
+ * this camera device.</p>
+ *
+ * <p>This ID can be used to query the camera device's {@link
+ * CameraCharacteristics fixed properties} with {@link
+ * CameraManager#getCameraCharacteristics}.</p>
+ *
+ * <p>This method can be called even if the device has been closed or has encountered
+ * a serious error.</p>
+ *
+ * @return the ID for this camera device
+ *
+ * @see CameraManager#getCameraCharacteristics
+ * @see CameraManager#getCameraIdList
+ */
+ @NonNull
+ public abstract String getId();
+
+ /**
+ * <p>Create a new camera capture session by providing the target output set of Surfaces to the
+ * camera device.</p>
+ *
+ * <p>The active capture session determines the set of potential output Surfaces for
+ * the camera device for each capture request. A given request may use all
+ * or only some of the outputs. Once the CameraCaptureSession is created, requests can be
+ * submitted with {@link CameraCaptureSession#capture capture},
+ * {@link CameraCaptureSession#captureBurst captureBurst},
+ * {@link CameraCaptureSession#setRepeatingRequest setRepeatingRequest}, or
+ * {@link CameraCaptureSession#setRepeatingBurst setRepeatingBurst}.</p>
+ *
+ * <p>Surfaces suitable for inclusion as a camera output can be created for
+ * various use cases and targets:</p>
+ *
+ * <ul>
+ *
+ * <li>For drawing to a {@link android.view.SurfaceView SurfaceView}: Once the SurfaceView's
+ * Surface is {@link android.view.SurfaceHolder.Callback#surfaceCreated created}, set the size
+ * of the Surface with {@link android.view.SurfaceHolder#setFixedSize} to be one of the sizes
+ * returned by {@link StreamConfigurationMap#getOutputSizes(Class)
+ * getOutputSizes(SurfaceHolder.class)} and then obtain the Surface by calling {@link
+ * android.view.SurfaceHolder#getSurface}. If the size is not set by the application, it will
+ * be rounded to the nearest supported size less than 1080p, by the camera device.</li>
+ *
+ * <li>For accessing through an OpenGL texture via a {@link android.graphics.SurfaceTexture
+ * SurfaceTexture}: Set the size of the SurfaceTexture with {@link
+ * android.graphics.SurfaceTexture#setDefaultBufferSize} to be one of the sizes returned by
+ * {@link StreamConfigurationMap#getOutputSizes(Class) getOutputSizes(SurfaceTexture.class)}
+ * before creating a Surface from the SurfaceTexture with {@link Surface#Surface}. If the size
+ * is not set by the application, it will be set to be the smallest supported size less than
+ * 1080p, by the camera device.</li>
+ *
+ * <li>For recording with {@link android.media.MediaCodec}: Call
+ * {@link android.media.MediaCodec#createInputSurface} after configuring
+ * the media codec to use one of the sizes returned by
+ * {@link StreamConfigurationMap#getOutputSizes(Class) getOutputSizes(MediaCodec.class)}
+ * </li>
+ *
+ * <li>For recording with {@link android.media.MediaRecorder}: Call
+ * {@link android.media.MediaRecorder#getSurface} after configuring the media recorder to use
+ * one of the sizes returned by
+ * {@link StreamConfigurationMap#getOutputSizes(Class) getOutputSizes(MediaRecorder.class)},
+ * or configuring it to use one of the supported
+ * {@link android.media.CamcorderProfile CamcorderProfiles}.</li>
+ *
+ * <li>For efficient YUV processing with {@link android.renderscript}:
+ * Create a RenderScript
+ * {@link android.renderscript.Allocation Allocation} with a supported YUV
+ * type, the IO_INPUT flag, and one of the sizes returned by
+ * {@link StreamConfigurationMap#getOutputSizes(Class) getOutputSizes(Allocation.class)},
+ * Then obtain the Surface with
+ * {@link android.renderscript.Allocation#getSurface}.</li>
+ *
+ * <li>For access to RAW, uncompressed YUV, or compressed JPEG data in the application: Create an
+ * {@link android.media.ImageReader} object with one of the supported output formats given by
+ * {@link StreamConfigurationMap#getOutputFormats()}, setting its size to one of the
+ * corresponding supported sizes by passing the chosen output format into
+ * {@link StreamConfigurationMap#getOutputSizes(int)}. Then obtain a
+ * {@link android.view.Surface} from it with {@link android.media.ImageReader#getSurface()}.
+ * If the ImageReader size is not set to a supported size, it will be rounded to a supported
+ * size less than 1080p by the camera device.
+ * </li>
+ *
+ * </ul>
+ *
+ * <p>The camera device will query each Surface's size and formats upon this
+ * call, so they must be set to a valid setting at this time.</p>
+ *
+ * <p>It can take several hundred milliseconds for the session's configuration to complete,
+ * since camera hardware may need to be powered on or reconfigured. Once the configuration is
+ * complete and the session is ready to actually capture data, the provided
+ * {@link CameraCaptureSession.StateCallback}'s
+ * {@link CameraCaptureSession.StateCallback#onConfigured} callback will be called.</p>
+ *
+ * <p>If a prior CameraCaptureSession already exists when this method is called, the previous
+ * session will no longer be able to accept new capture requests and will be closed. Any
+ * in-progress capture requests made on the prior session will be completed before it's closed.
+ * {@link CameraCaptureSession.StateCallback#onConfigured} for the new session may be invoked
+ * before {@link CameraCaptureSession.StateCallback#onClosed} is invoked for the prior
+ * session. Once the new session is {@link CameraCaptureSession.StateCallback#onConfigured
+ * configured}, it is able to start capturing its own requests. To minimize the transition time,
+ * the {@link CameraCaptureSession#abortCaptures} call can be used to discard the remaining
+ * requests for the prior capture session before a new one is created. Note that once the new
+ * session is created, the old one can no longer have its captures aborted.</p>
+ *
+ * <p>Using larger resolution outputs, or more outputs, can result in slower
+ * output rate from the device.</p>
+ *
+ * <p>Configuring a session with an empty or null list will close the current session, if
+ * any. This can be used to release the current session's target surfaces for another use.</p>
+ *
+ * <p>While any of the sizes from {@link StreamConfigurationMap#getOutputSizes} can be used when
+ * a single output stream is configured, a given camera device may not be able to support all
+ * combination of sizes, formats, and targets when multiple outputs are configured at once. The
+ * tables below list the maximum guaranteed resolutions for combinations of streams and targets,
+ * given the capabilities of the camera device.</p>
+ *
+ * <p>If an application tries to create a session using a set of targets that exceed the limits
+ * described in the below tables, one of three possibilities may occur. First, the session may
+ * be successfully created and work normally. Second, the session may be successfully created,
+ * but the camera device won't meet the frame rate guarantees as described in
+ * {@link StreamConfigurationMap#getOutputMinFrameDuration}. Or third, if the output set
+ * cannot be used at all, session creation will fail entirely, with
+ * {@link CameraCaptureSession.StateCallback#onConfigureFailed} being invoked.</p>
+ *
+ * <p>For the type column, {@code PRIV} refers to any target whose available sizes are found
+ * using {@link StreamConfigurationMap#getOutputSizes(Class)} with no direct application-visible
+ * format, {@code YUV} refers to a target Surface using the
+ * {@link android.graphics.ImageFormat#YUV_420_888} format, {@code JPEG} refers to the
+ * {@link android.graphics.ImageFormat#JPEG} format, and {@code RAW} refers to the
+ * {@link android.graphics.ImageFormat#RAW_SENSOR} format.</p>
+ *
+ * <p>For the maximum size column, {@code PREVIEW} refers to the best size match to the
+ * device's screen resolution, or to 1080p ({@code 1920x1080}), whichever is
+ * smaller. {@code RECORD} refers to the camera device's maximum supported recording resolution,
+ * as determined by {@link android.media.CamcorderProfile}. And {@code MAXIMUM} refers to the
+ * camera device's maximum output resolution for that format or target from
+ * {@link StreamConfigurationMap#getOutputSizes}.</p>
+ *
+ * <p>To use these tables, determine the number and the formats/targets of outputs needed, and
+ * find the row(s) of the table with those targets. The sizes indicate the maximum set of sizes
+ * that can be used; it is guaranteed that for those targets, the listed sizes and anything
+ * smaller from the list given by {@link StreamConfigurationMap#getOutputSizes} can be
+ * successfully used to create a session. For example, if a row indicates that a 8 megapixel
+ * (MP) YUV_420_888 output can be used together with a 2 MP {@code PRIV} output, then a session
+ * can be created with targets {@code [8 MP YUV, 2 MP PRIV]} or targets {@code [2 MP YUV, 2 MP
+ * PRIV]}; but a session with targets {@code [8 MP YUV, 4 MP PRIV]}, targets {@code [4 MP YUV, 4
+ * MP PRIV]}, or targets {@code [8 MP PRIV, 2 MP YUV]} would not be guaranteed to work, unless
+ * some other row of the table lists such a combination.</p>
+ *
+ * <style scoped>
+ * #rb { border-right-width: thick; }
+ * </style>
+ * <p>Legacy devices ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
+ * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY}) support at
+ * least the following stream combinations:
+ *
+ * <table>
+ * <tr><th colspan="7">LEGACY-level guaranteed configurations</th></tr>
+ * <tr> <th colspan="2" id="rb">Target 1</th> <th colspan="2" id="rb">Target 2</th> <th colspan="2" id="rb">Target 3</th> <th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr> <th>Type</th><th id="rb">Max size</th> <th>Type</th><th id="rb">Max size</th> <th>Type</th><th id="rb">Max size</th></tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td colspan="2" id="rb"></td> <td>Simple preview, GPU video processing, or no-preview video recording.</td> </tr>
+ * <tr> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td colspan="2" id="rb"></td> <td>No-viewfinder still image capture.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td colspan="2" id="rb"></td> <td>In-application video/image processing.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td>Standard still imaging.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td>In-app processing plus still capture.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td colspan="2" id="rb"></td> <td>Standard recording.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td colspan="2" id="rb"></td> <td>Preview plus in-app processing.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>Still capture plus in-app processing.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>Limited-level ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
+ * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED}) devices
+ * support at least the following stream combinations in addition to those for
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY LEGACY} devices:
+ *
+ * <table>
+ * <tr><th colspan="7">LIMITED-level additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th> <th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th></tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code RECORD }</td> <td colspan="2" id="rb"></td> <td>High-resolution video recording with preview.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code RECORD }</td> <td colspan="2" id="rb"></td> <td>High-resolution in-app video processing with preview.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code RECORD }</td> <td colspan="2" id="rb"></td> <td>Two-input in-app video processing.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code RECORD }</td> <td>{@code JPEG}</td><td id="rb">{@code RECORD }</td> <td>High-resolution recording with video snapshot.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code RECORD }</td> <td>{@code JPEG}</td><td id="rb">{@code RECORD }</td> <td>High-resolution in-app processing with video snapshot.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>Two-input in-app processing with still capture.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>FULL-level ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
+ * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL}) devices
+ * support at least the following stream combinations in addition to those for
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices:
+ *
+ * <table>
+ * <tr><th colspan="7">FULL-level additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th> <th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>Video recording with maximum-size video snapshot</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code 640x480}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code MAXIMUM}</td> <td>Standard video recording plus maximum-resolution in-app processing.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code 640x480}</td> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code MAXIMUM}</td> <td>Preview plus two-input maximum-resolution in-app processing.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>RAW-capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES} includes
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_RAW RAW}) devices additionally support
+ * at least the following stream combinations on both
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL} and
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices:
+ *
+ * <table>
+ * <tr><th colspan="7">RAW-capability additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th> <th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
+ * <tr> <td>{@code RAW }</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td colspan="2" id="rb"></td> <td>No-preview DNG capture.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code RAW }</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td>Standard DNG capture.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code RAW }</td><td id="rb">{@code MAXIMUM}</td> <td colspan="2" id="rb"></td> <td>In-app processing plus DNG capture.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code RAW }</td><td id="rb">{@code MAXIMUM}</td> <td>Video recording with DNG capture.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code RAW }</td><td id="rb">{@code MAXIMUM}</td> <td>Preview with in-app processing and DNG capture.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code RAW }</td><td id="rb">{@code MAXIMUM}</td> <td>Two-input in-app processing plus DNG capture.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code RAW }</td><td id="rb">{@code MAXIMUM}</td> <td>Still capture with simultaneous JPEG and DNG.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code RAW }</td><td id="rb">{@code MAXIMUM}</td> <td>In-app processing with simultaneous JPEG and DNG.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>BURST-capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES} includes
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}) devices
+ * support at least the below stream combinations in addition to those for
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices. Note that all
+ * FULL-level devices support the BURST capability, and the below list is a strict subset of the
+ * list for FULL-level devices, so this table is only relevant for LIMITED-level devices that
+ * support the BURST_CAPTURE capability.
+ *
+ * <table>
+ * <tr><th colspan="5">BURST-capability additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code MAXIMUM}</td> <td>Maximum-resolution GPU processing with preview.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code MAXIMUM}</td> <td>Maximum-resolution in-app processing with preview.</td> </tr>
+ * <tr> <td>{@code YUV }</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV }</td><td id="rb">{@code MAXIMUM}</td> <td>Maximum-resolution two-input in-app processsing.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>LEVEL-3 ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
+ * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_3 LEVEL_3})
+ * support at least the following stream combinations in addition to the combinations for
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL} and for
+ * RAW capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES} includes
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_RAW RAW}):
+ *
+ * <table>
+ * <tr><th colspan="11">LEVEL-3 additional guaranteed configurations</th></tr>
+ * <tr><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th><th colspan="2" id="rb">Target 4</th><th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code 640x480}</td> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td>In-app viewfinder analysis with dynamic selection of output format.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code 640x480}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td>In-app viewfinder analysis with dynamic selection of output format.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>Since the capabilities of camera devices vary greatly, a given camera device may support
+ * target combinations with sizes outside of these guarantees, but this can only be tested for
+ * by attempting to create a session with such targets.</p>
+ *
+ * @param outputs The new set of Surfaces that should be made available as
+ * targets for captured image data.
+ * @param callback The callback to notify about the status of the new capture session.
+ * @param handler The handler on which the callback should be invoked, or {@code null} to use
+ * the current thread's {@link android.os.Looper looper}.
+ *
+ * @throws IllegalArgumentException if the set of output Surfaces do not meet the requirements,
+ * the callback is null, or the handler is null but the current
+ * thread has no looper.
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if the camera device has been closed
+ *
+ * @see CameraCaptureSession
+ * @see StreamConfigurationMap#getOutputFormats()
+ * @see StreamConfigurationMap#getOutputSizes(int)
+ * @see StreamConfigurationMap#getOutputSizes(Class)
+ */
+ public abstract void createCaptureSession(@NonNull List<Surface> outputs,
+ @NonNull CameraCaptureSession.StateCallback callback, @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * <p>Create a new camera capture session by providing the target output set of Surfaces and
+ * its corresponding surface configuration to the camera device.</p>
+ *
+ * @see #createCaptureSession
+ * @see OutputConfiguration
+ */
+ public abstract void createCaptureSessionByOutputConfigurations(
+ List<OutputConfiguration> outputConfigurations,
+ CameraCaptureSession.StateCallback callback, @Nullable Handler handler)
+ throws CameraAccessException;
+ /**
+ * Create a new reprocessable camera capture session by providing the desired reprocessing
+ * input Surface configuration and the target output set of Surfaces to the camera device.
+ *
+ * <p>If a camera device supports YUV reprocessing
+ * ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING}) or PRIVATE
+ * reprocessing
+ * ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING}), besides
+ * the capture session created via {@link #createCaptureSession createCaptureSession}, the
+ * application can also create a reprocessable capture session to submit reprocess capture
+ * requests in addition to regular capture requests. A reprocess capture request takes the next
+ * available buffer from the session's input Surface, and sends it through the camera device's
+ * processing pipeline again, to produce buffers for the request's target output Surfaces. No
+ * new image data is captured for a reprocess request. However the input buffer provided by
+ * the application must be captured previously by the same camera device in the same session
+ * directly (e.g. for Zero-Shutter-Lag use case) or indirectly (e.g. combining multiple output
+ * images).</p>
+ *
+ * <p>The active reprocessable capture session determines an input {@link Surface} and the set
+ * of potential output Surfaces for the camera devices for each capture request. The application
+ * can use {@link #createCaptureRequest createCaptureRequest} to create regular capture requests
+ * to capture new images from the camera device, and use {@link #createReprocessCaptureRequest
+ * createReprocessCaptureRequest} to create reprocess capture requests to process buffers from
+ * the input {@link Surface}. Some combinations of output Surfaces in a session may not be used
+ * in a request simultaneously. The guaranteed combinations of output Surfaces that can be used
+ * in a request simultaneously are listed in the tables under {@link #createCaptureSession
+ * createCaptureSession}. All the output Surfaces in one capture request will come from the
+ * same source, either from a new capture by the camera device, or from the input Surface
+ * depending on if the request is a reprocess capture request.</p>
+ *
+ * <p>Input formats and sizes supported by the camera device can be queried via
+ * {@link StreamConfigurationMap#getInputFormats} and
+ * {@link StreamConfigurationMap#getInputSizes}. For each supported input format, the camera
+ * device supports a set of output formats and sizes for reprocessing that can be queried via
+ * {@link StreamConfigurationMap#getValidOutputFormatsForInput} and
+ * {@link StreamConfigurationMap#getOutputSizes}. While output Surfaces with formats that
+ * aren't valid reprocess output targets for the input configuration can be part of a session,
+ * they cannot be used as targets for a reprocessing request.</p>
+ *
+ * <p>Since the application cannot access {@link android.graphics.ImageFormat#PRIVATE} images
+ * directly, an output Surface created by {@link android.media.ImageReader#newInstance} with
+ * {@link android.graphics.ImageFormat#PRIVATE} as the format will be considered as intended to
+ * be used for reprocessing input and thus the {@link android.media.ImageReader} size must
+ * match one of the supported input sizes for {@link android.graphics.ImageFormat#PRIVATE}
+ * format. Otherwise, creating a reprocessable capture session will fail.</p>
+ *
+ * <p>The guaranteed stream configurations listed in
+ * {@link #createCaptureSession createCaptureSession} are also guaranteed to work for
+ * {@link #createReprocessableCaptureSession createReprocessableCaptureSession}. In addition,
+ * the configurations in the tables below are also guaranteed for creating a reprocessable
+ * capture session if the camera device supports YUV reprocessing or PRIVATE reprocessing.
+ * However, not all output targets used to create a reprocessable session may be used in a
+ * {@link CaptureRequest} simultaneously. For devices that support only 1 output target in a
+ * reprocess {@link CaptureRequest}, submitting a reprocess {@link CaptureRequest} with multiple
+ * output targets will result in a {@link CaptureFailure}. For devices that support multiple
+ * output targets in a reprocess {@link CaptureRequest}, the guaranteed output targets that can
+ * be included in a {@link CaptureRequest} simultaneously are listed in the tables under
+ * {@link #createCaptureSession createCaptureSession}. For example, with a FULL-capability
+ * ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL} {@code == }
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL}) device that supports PRIVATE
+ * reprocessing, an application can create a reprocessable capture session with 1 input,
+ * ({@code PRIV}, {@code MAXIMUM}), and 3 outputs, ({@code PRIV}, {@code MAXIMUM}),
+ * ({@code PRIV}, {@code PREVIEW}), and ({@code YUV}, {@code MAXIMUM}). However, it's not
+ * guaranteed that an application can submit a regular or reprocess capture with ({@code PRIV},
+ * {@code MAXIMUM}) and ({@code YUV}, {@code MAXIMUM}) outputs based on the table listed under
+ * {@link #createCaptureSession createCaptureSession}. In other words, use the tables below to
+ * determine the guaranteed stream configurations for creating a reprocessable capture session,
+ * and use the tables under {@link #createCaptureSession createCaptureSession} to determine the
+ * guaranteed output targets that can be submitted in a regular or reprocess
+ * {@link CaptureRequest} simultaneously.</p>
+ *
+ * <style scoped>
+ * #rb { border-right-width: thick; }
+ * </style>
+ *
+ * <p>LIMITED-level ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
+ * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED}) devices
+ * support at least the following stream combinations for creating a reprocessable capture
+ * session in addition to those listed in {@link #createCaptureSession createCaptureSession} for
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices:
+ *
+ * <table>
+ * <tr><th colspan="11">LIMITED-level additional guaranteed configurations for creating a reprocessable capture session<br>({@code PRIV} input is guaranteed only if PRIVATE reprocessing is supported. {@code YUV} input is guaranteed only if YUV reprocessing is supported)</th></tr>
+ * <tr><th colspan="2" id="rb">Input</th><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th><th colspan="2" id="rb">Target 4</th><th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th></tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td></td><td id="rb"></td> <td></td><td id="rb"></td> <td>No-viewfinder still image reprocessing.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td></td><td id="rb"></td> <td>ZSL(Zero-Shutter-Lag) still imaging.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td></td><td id="rb"></td> <td>ZSL still and in-app processing imaging.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>ZSL in-app processing with still capture.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>FULL-level ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
+ * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL}) devices
+ * support at least the following stream combinations for creating a reprocessable capture
+ * session in addition to those for
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices:
+ *
+ * <table>
+ * <tr><th colspan="11">FULL-level additional guaranteed configurations for creating a reprocessable capture session<br>({@code PRIV} input is guaranteed only if PRIVATE reprocessing is supported. {@code YUV} input is guaranteed only if YUV reprocessing is supported)</th></tr>
+ * <tr><th colspan="2" id="rb">Input</th><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th><th colspan="2" id="rb">Target 4</th><th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th></tr>
+ * <tr> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td></td><td id="rb"></td> <td></td><td id="rb"></td> <td>Maximum-resolution multi-frame image fusion in-app processing with regular preview.</td> </tr>
+ * <tr> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td></td><td id="rb"></td> <td></td><td id="rb"></td> <td>Maximum-resolution multi-frame image fusion two-input in-app processing.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV}</td><td id="rb">{@code RECORD}</td> <td></td><td id="rb"></td> <td>High-resolution ZSL in-app video processing with regular preview.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td></td><td id="rb"></td> <td>Maximum-resolution ZSL in-app processing with regular preview.</td> </tr>
+ * <tr> <td>{@code PRIV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td></td><td id="rb"></td> <td>Maximum-resolution two-input ZSL in-app processing.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>ZSL still capture and in-app processing.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>RAW-capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES} includes
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_RAW RAW}) devices additionally support
+ * at least the following stream combinations for creating a reprocessable capture session
+ * on both {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL} and
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED LIMITED} devices
+ *
+ * <table>
+ * <tr><th colspan="11">RAW-capability additional guaranteed configurations for creating a reprocessable capture session<br>({@code PRIV} input is guaranteed only if PRIVATE reprocessing is supported. {@code YUV} input is guaranteed only if YUV reprocessing is supported)</th></tr>
+ * <tr><th colspan="2" id="rb">Input</th><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th><th colspan="2" id="rb">Target 4</th><th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th></tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td></td><td id="rb"></td> <td>Mutually exclusive ZSL in-app processing and DNG capture.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td>Mutually exclusive ZSL in-app processing and preview with DNG capture.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td>Mutually exclusive ZSL two-input in-app processing and DNG capture.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td>Mutually exclusive ZSL still capture and preview with DNG capture.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td>Mutually exclusive ZSL in-app processing with still capture and DNG capture.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * <p>LEVEL-3 ({@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL}
+ * {@code == }{@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_3 LEVEL_3}) devices
+ * support at least the following stream combinations for creating a reprocessable capture
+ * session in addition to those for
+ * {@link CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_FULL FULL} devices. Note that while
+ * the second configuration allows for configuring {@code MAXIMUM} {@code YUV} and {@code JPEG}
+ * outputs at the same time, that configuration is not listed for regular capture sessions, and
+ * therefore simultaneous output to both targets is not allowed.
+ *
+ * <table>
+ * <tr><th colspan="13">LEVEL-3 additional guaranteed configurations for creating a reprocessable capture session<br>({@code PRIV} input is guaranteed only if PRIVATE reprocessing is supported. {@code YUV} input is always guaranteed.</th></tr>
+ * <tr><th colspan="2" id="rb">Input</th><th colspan="2" id="rb">Target 1</th><th colspan="2" id="rb">Target 2</th><th colspan="2" id="rb">Target 3</th><th colspan="2" id="rb">Target 4</th><th colspan="2" id="rb">Target 5</th><th rowspan="2">Sample use case(s)</th> </tr>
+ * <tr><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th><th>Type</th><th id="rb">Max size</th></tr>
+ * <tr> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code 640x480}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td></td><td id="rb"></td> <td>In-app viewfinder analysis with ZSL and RAW.</td> </tr>
+ * <tr> <td>{@code PRIV}/{@code YUV}</td><td id="rb">{@code MAXIMUM}</td> <td>Same as input</td><td id="rb">{@code MAXIMUM}</td> <td>{@code PRIV}</td><td id="rb">{@code PREVIEW}</td> <td>{@code PRIV}</td><td id="rb">{@code 640x480}</td> <td>{@code RAW}</td><td id="rb">{@code MAXIMUM}</td> <td>{@code JPEG}</td><td id="rb">{@code MAXIMUM}</td><td>In-app viewfinder analysis with ZSL, RAW, and JPEG reprocessing output.</td> </tr>
+ * </table><br>
+ * </p>
+ *
+ * @param inputConfig The configuration for the input {@link Surface}
+ * @param outputs The new set of Surfaces that should be made available as
+ * targets for captured image data.
+ * @param callback The callback to notify about the status of the new capture session.
+ * @param handler The handler on which the callback should be invoked, or {@code null} to use
+ * the current thread's {@link android.os.Looper looper}.
+ *
+ * @throws IllegalArgumentException if the input configuration is null or not supported, the set
+ * of output Surfaces do not meet the requirements, the
+ * callback is null, or the handler is null but the current
+ * thread has no looper.
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if the camera device has been closed
+ *
+ * @see #createCaptureSession
+ * @see CameraCaptureSession
+ * @see StreamConfigurationMap#getInputFormats
+ * @see StreamConfigurationMap#getInputSizes
+ * @see StreamConfigurationMap#getValidOutputFormatsForInput
+ * @see StreamConfigurationMap#getOutputSizes
+ * @see android.media.ImageWriter
+ * @see android.media.ImageReader
+ */
+ public abstract void createReprocessableCaptureSession(@NonNull InputConfiguration inputConfig,
+ @NonNull List<Surface> outputs, @NonNull CameraCaptureSession.StateCallback callback,
+ @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * Create a new reprocessable camera capture session by providing the desired reprocessing
+ * input configuration and output {@link OutputConfiguration}
+ * to the camera device.
+ *
+ * @see #createReprocessableCaptureSession
+ * @see OutputConfiguration
+ *
+ */
+ public abstract void createReprocessableCaptureSessionByConfigurations(
+ @NonNull InputConfiguration inputConfig,
+ @NonNull List<OutputConfiguration> outputs,
+ @NonNull CameraCaptureSession.StateCallback callback,
+ @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * <p>Create a new constrained high speed capture session.</p>
+ *
+ * <p>The application can use normal capture session (created via {@link #createCaptureSession})
+ * for high speed capture if the desired high speed FPS ranges are advertised by
+ * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES}, in which case all API
+ * semantics associated with normal capture sessions applies.</p>
+ *
+ * <p>The method creates a specialized capture session that is only targeted at high speed
+ * video recording (>=120fps) use case if the camera device supports high speed video
+ * capability (i.e., {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES} contains
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO}).
+ * Therefore, it has special characteristics compared with a normal capture session:</p>
+ *
+ * <ul>
+ *
+ * <li>In addition to the output target Surface requirements specified by the
+ * {@link #createCaptureSession} method, an active high speed capture session will support up
+ * to 2 output Surfaces, though the application might choose to configure just one Surface
+ * (e.g., preview only). All Surfaces must be either video encoder surfaces (acquired by
+ * {@link android.media.MediaRecorder#getSurface} or
+ * {@link android.media.MediaCodec#createInputSurface}) or preview surfaces (obtained from
+ * {@link android.view.SurfaceView}, {@link android.graphics.SurfaceTexture} via
+ * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}). The Surface sizes
+ * must be one of the sizes reported by {@link StreamConfigurationMap#getHighSpeedVideoSizes}.
+ * When multiple Surfaces are configured, their size must be same.</li>
+ *
+ * <li>An active high speed capture session only accepts request lists created via
+ * {@link CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}, and the
+ * request list can only be submitted to this session via
+ * {@link CameraCaptureSession#captureBurst captureBurst}, or
+ * {@link CameraCaptureSession#setRepeatingBurst setRepeatingBurst}.</li>
+ *
+ * <li>The FPS ranges being requested to this session must be selected from
+ * {@link StreamConfigurationMap#getHighSpeedVideoFpsRangesFor}. The application can still use
+ * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE} to control the desired FPS range.
+ * Switching to an FPS range that has different
+ * {@link android.util.Range#getUpper() maximum FPS} may trigger some camera device
+ * reconfigurations, which may introduce extra latency. It is recommended that the
+ * application avoids unnecessary maximum target FPS changes as much as possible during high
+ * speed streaming.</li>
+ *
+ * <li>For the request lists submitted to this session, the camera device will override the
+ * {@link CaptureRequest#CONTROL_MODE control mode}, auto-exposure (AE), auto-white balance
+ * (AWB) and auto-focus (AF) to {@link CameraMetadata#CONTROL_MODE_AUTO},
+ * {@link CameraMetadata#CONTROL_AE_MODE_ON}, {@link CameraMetadata#CONTROL_AWB_MODE_AUTO}
+ * and {@link CameraMetadata#CONTROL_AF_MODE_CONTINUOUS_VIDEO}, respectively. All
+ * post-processing block mode controls will be overridden to be FAST. Therefore, no manual
+ * control of capture and post-processing parameters is possible. Beside these, only a subset
+ * of controls will work, see
+ * {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} for
+ * more details.</li>
+ *
+ * </ul>
+ *
+ * @param outputs The new set of Surfaces that should be made available as
+ * targets for captured high speed image data.
+ * @param callback The callback to notify about the status of the new capture session.
+ * @param handler The handler on which the callback should be invoked, or {@code null} to use
+ * the current thread's {@link android.os.Looper looper}.
+ *
+ * @throws IllegalArgumentException if the set of output Surfaces do not meet the requirements,
+ * the callback is null, or the handler is null but the current
+ * thread has no looper, or the camera device doesn't support
+ * high speed video capability.
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if the camera device has been closed
+ *
+ * @see #createCaptureSession
+ * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE
+ * @see StreamConfigurationMap#getHighSpeedVideoSizes
+ * @see StreamConfigurationMap#getHighSpeedVideoFpsRangesFor
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
+ * @see CameraCaptureSession#captureBurst
+ * @see CameraCaptureSession#setRepeatingBurst
+ * @see CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
+ */
+ public abstract void createConstrainedHighSpeedCaptureSession(@NonNull List<Surface> outputs,
+ @NonNull CameraCaptureSession.StateCallback callback,
+ @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * Standard camera operation mode.
+ *
+ * @see #createCustomCaptureSession
+ * @hide
+ */
+ @SystemApi
+ @TestApi
+ public static final int SESSION_OPERATION_MODE_NORMAL =
+ 0; // ICameraDeviceUser.NORMAL_MODE;
+
+ /**
+ * Constrained high-speed operation mode.
+ *
+ * @see #createCustomCaptureSession
+ * @hide
+ */
+ @SystemApi
+ @TestApi
+ public static final int SESSION_OPERATION_MODE_CONSTRAINED_HIGH_SPEED =
+ 1; // ICameraDeviceUser.CONSTRAINED_HIGH_SPEED_MODE;
+
+ /**
+ * First vendor-specific operating mode
+ *
+ * @see #createCustomCaptureSession
+ * @hide
+ */
+ @SystemApi
+ @TestApi
+ public static final int SESSION_OPERATION_MODE_VENDOR_START =
+ 0x8000; // ICameraDeviceUser.VENDOR_MODE_START;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(prefix = {"SESSION_OPERATION_MODE"}, value =
+ {SESSION_OPERATION_MODE_NORMAL,
+ SESSION_OPERATION_MODE_CONSTRAINED_HIGH_SPEED,
+ SESSION_OPERATION_MODE_VENDOR_START})
+ public @interface SessionOperatingMode {};
+
+ /**
+ * Create a new camera capture session with a custom operating mode.
+ *
+ * @param inputConfig The configuration for the input {@link Surface} if a reprocessing session
+ * is desired, or {@code null} otherwise.
+ * @param outputs The new set of {@link OutputConfiguration OutputConfigurations} that should be
+ * made available as targets for captured image data.
+ * @param operatingMode The custom operating mode to use; a nonnegative value, either a custom
+ * vendor value or one of the SESSION_OPERATION_MODE_* values.
+ * @param callback The callback to notify about the status of the new capture session.
+ * @param handler The handler on which the callback should be invoked, or {@code null} to use
+ * the current thread's {@link android.os.Looper looper}.
+ *
+ * @throws IllegalArgumentException if the input configuration is null or not supported, the set
+ * of output Surfaces do not meet the requirements, the
+ * callback is null, or the handler is null but the current
+ * thread has no looper.
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if the camera device has been closed
+ *
+ * @see #createCaptureSession
+ * @see #createReprocessableCaptureSession
+ * @see CameraCaptureSession
+ * @see OutputConfiguration
+ * @hide
+ */
+ @SystemApi
+ @TestApi
+ public abstract void createCustomCaptureSession(
+ InputConfiguration inputConfig,
+ @NonNull List<OutputConfiguration> outputs,
+ @SessionOperatingMode int operatingMode,
+ @NonNull CameraCaptureSession.StateCallback callback,
+ @Nullable Handler handler)
+ throws CameraAccessException;
+
+ /**
+ * <p>Create a {@link CaptureRequest.Builder} for new capture requests,
+ * initialized with template for a target use case. The settings are chosen
+ * to be the best options for the specific camera device, so it is not
+ * recommended to reuse the same request for a different camera device;
+ * create a builder specific for that device and template and override the
+ * settings as desired, instead.</p>
+ *
+ * @param templateType An enumeration selecting the use case for this request. Not all template
+ * types are supported on every device. See the documentation for each template type for
+ * details.
+ * @return a builder for a capture request, initialized with default
+ * settings for that template, and no output streams
+ *
+ * @throws IllegalArgumentException if the templateType is not supported by
+ * this device.
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if the camera device has been closed
+ *
+ * @see #TEMPLATE_PREVIEW
+ * @see #TEMPLATE_RECORD
+ * @see #TEMPLATE_STILL_CAPTURE
+ * @see #TEMPLATE_VIDEO_SNAPSHOT
+ * @see #TEMPLATE_MANUAL
+ */
+ @NonNull
+ public abstract CaptureRequest.Builder createCaptureRequest(@RequestTemplate int templateType)
+ throws CameraAccessException;
+
+ /**
+ * <p>Create a {@link CaptureRequest.Builder} for a new reprocess {@link CaptureRequest} from a
+ * {@link TotalCaptureResult}.
+ *
+ * <p>Each reprocess {@link CaptureRequest} processes one buffer from
+ * {@link CameraCaptureSession}'s input {@link Surface} to all output {@link Surface Surfaces}
+ * included in the reprocess capture request. The reprocess input images must be generated from
+ * one or multiple output images captured from the same camera device. The application can
+ * provide input images to camera device via {@link android.media.ImageWriter#queueInputImage}.
+ * The application must use the capture result of one of those output images to create a
+ * reprocess capture request so that the camera device can use the information to achieve
+ * optimal reprocess image quality. For camera devices that support only 1 output
+ * {@link Surface}, submitting a reprocess {@link CaptureRequest} with multiple
+ * output targets will result in a {@link CaptureFailure}.
+ *
+ * @param inputResult The capture result of the output image or one of the output images used
+ * to generate the reprocess input image for this capture request.
+ *
+ * @throws IllegalArgumentException if inputResult is null.
+ * @throws CameraAccessException if the camera device is no longer connected or has
+ * encountered a fatal error
+ * @throws IllegalStateException if the camera device has been closed
+ *
+ * @see CaptureRequest.Builder
+ * @see TotalCaptureResult
+ * @see CameraDevice#createReprocessableCaptureSession
+ * @see android.media.ImageWriter
+ */
+ @NonNull
+ public abstract CaptureRequest.Builder createReprocessCaptureRequest(
+ @NonNull TotalCaptureResult inputResult) throws CameraAccessException;
+
+ /**
+ * Close the connection to this camera device as quickly as possible.
+ *
+ * <p>Immediately after this call, all calls to the camera device or active session interface
+ * will throw a {@link IllegalStateException}, except for calls to close(). Once the device has
+ * fully shut down, the {@link StateCallback#onClosed} callback will be called, and the camera
+ * is free to be re-opened.</p>
+ *
+ * <p>Immediately after this call, besides the final {@link StateCallback#onClosed} calls, no
+ * further callbacks from the device or the active session will occur, and any remaining
+ * submitted capture requests will be discarded, as if
+ * {@link CameraCaptureSession#abortCaptures} had been called, except that no success or failure
+ * callbacks will be invoked.</p>
+ *
+ */
+ @Override
+ public abstract void close();
+
+ /**
+ * A callback objects for receiving updates about the state of a camera device.
+ *
+ * <p>A callback instance must be provided to the {@link CameraManager#openCamera} method to
+ * open a camera device.</p>
+ *
+ * <p>These state updates include notifications about the device completing startup (
+ * allowing for {@link #createCaptureSession} to be called), about device
+ * disconnection or closure, and about unexpected device errors.</p>
+ *
+ * <p>Events about the progress of specific {@link CaptureRequest CaptureRequests} are provided
+ * through a {@link CameraCaptureSession.CaptureCallback} given to the
+ * {@link CameraCaptureSession#capture}, {@link CameraCaptureSession#captureBurst},
+ * {@link CameraCaptureSession#setRepeatingRequest}, or
+ * {@link CameraCaptureSession#setRepeatingBurst} methods.
+ *
+ * @see CameraManager#openCamera
+ */
+ public static abstract class StateCallback {
+ /**
+ * An error code that can be reported by {@link #onError}
+ * indicating that the camera device is in use already.
+ *
+ * <p>
+ * This error can be produced when opening the camera fails due to the camera
+ * being used by a higher-priority camera API client.
+ * </p>
+ *
+ * @see #onError
+ */
+ public static final int ERROR_CAMERA_IN_USE = 1;
+
+ /**
+ * An error code that can be reported by {@link #onError}
+ * indicating that the camera device could not be opened
+ * because there are too many other open camera devices.
+ *
+ * <p>
+ * The system-wide limit for number of open cameras has been reached,
+ * and more camera devices cannot be opened until previous instances are
+ * closed.
+ * </p>
+ *
+ * <p>
+ * This error can be produced when opening the camera fails.
+ * </p>
+ *
+ * @see #onError
+ */
+ public static final int ERROR_MAX_CAMERAS_IN_USE = 2;
+
+ /**
+ * An error code that can be reported by {@link #onError}
+ * indicating that the camera device could not be opened due to a device
+ * policy.
+ *
+ * @see android.app.admin.DevicePolicyManager#setCameraDisabled(android.content.ComponentName, boolean)
+ * @see #onError
+ */
+ public static final int ERROR_CAMERA_DISABLED = 3;
+
+ /**
+ * An error code that can be reported by {@link #onError}
+ * indicating that the camera device has encountered a fatal error.
+ *
+ * <p>The camera device needs to be re-opened to be used again.</p>
+ *
+ * @see #onError
+ */
+ public static final int ERROR_CAMERA_DEVICE = 4;
+
+ /**
+ * An error code that can be reported by {@link #onError}
+ * indicating that the camera service has encountered a fatal error.
+ *
+ * <p>The Android device may need to be shut down and restarted to restore
+ * camera function, or there may be a persistent hardware problem.</p>
+ *
+ * <p>An attempt at recovery <i>may</i> be possible by closing the
+ * CameraDevice and the CameraManager, and trying to acquire all resources
+ * again from scratch.</p>
+ *
+ * @see #onError
+ */
+ public static final int ERROR_CAMERA_SERVICE = 5;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(prefix = {"ERROR_"}, value =
+ {ERROR_CAMERA_IN_USE,
+ ERROR_MAX_CAMERAS_IN_USE,
+ ERROR_CAMERA_DISABLED,
+ ERROR_CAMERA_DEVICE,
+ ERROR_CAMERA_SERVICE })
+ public @interface ErrorCode {};
+
+ /**
+ * The method called when a camera device has finished opening.
+ *
+ * <p>At this point, the camera device is ready to use, and
+ * {@link CameraDevice#createCaptureSession} can be called to set up the first capture
+ * session.</p>
+ *
+ * @param camera the camera device that has become opened
+ */
+ public abstract void onOpened(@NonNull CameraDevice camera); // Must implement
+
+ /**
+ * The method called when a camera device has been closed with
+ * {@link CameraDevice#close}.
+ *
+ * <p>Any attempt to call methods on this CameraDevice in the
+ * future will throw a {@link IllegalStateException}.</p>
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param camera the camera device that has become closed
+ */
+ public void onClosed(@NonNull CameraDevice camera) {
+ // Default empty implementation
+ }
+
+ /**
+ * The method called when a camera device is no longer available for
+ * use.
+ *
+ * <p>This callback may be called instead of {@link #onOpened}
+ * if opening the camera fails.</p>
+ *
+ * <p>Any attempt to call methods on this CameraDevice will throw a
+ * {@link CameraAccessException}. The disconnection could be due to a
+ * change in security policy or permissions; the physical disconnection
+ * of a removable camera device; or the camera being needed for a
+ * higher-priority camera API client.</p>
+ *
+ * <p>There may still be capture callbacks that are invoked
+ * after this method is called, or new image buffers that are delivered
+ * to active outputs.</p>
+ *
+ * <p>The default implementation logs a notice to the system log
+ * about the disconnection.</p>
+ *
+ * <p>You should clean up the camera with {@link CameraDevice#close} after
+ * this happens, as it is not recoverable until the camera can be opened
+ * again. For most use cases, this will be when the camera again becomes
+ * {@link CameraManager.AvailabilityCallback#onCameraAvailable available}.
+ * </p>
+ *
+ * @param camera the device that has been disconnected
+ */
+ public abstract void onDisconnected(@NonNull CameraDevice camera); // Must implement
+
+ /**
+ * The method called when a camera device has encountered a serious error.
+ *
+ * <p>This callback may be called instead of {@link #onOpened}
+ * if opening the camera fails.</p>
+ *
+ * <p>This indicates a failure of the camera device or camera service in
+ * some way. Any attempt to call methods on this CameraDevice in the
+ * future will throw a {@link CameraAccessException} with the
+ * {@link CameraAccessException#CAMERA_ERROR CAMERA_ERROR} reason.
+ * </p>
+ *
+ * <p>There may still be capture completion or camera stream callbacks
+ * that will be called after this error is received.</p>
+ *
+ * <p>You should clean up the camera with {@link CameraDevice#close} after
+ * this happens. Further attempts at recovery are error-code specific.</p>
+ *
+ * @param camera The device reporting the error
+ * @param error The error code.
+ *
+ * @see #ERROR_CAMERA_IN_USE
+ * @see #ERROR_MAX_CAMERAS_IN_USE
+ * @see #ERROR_CAMERA_DISABLED
+ * @see #ERROR_CAMERA_DEVICE
+ * @see #ERROR_CAMERA_SERVICE
+ */
+ public abstract void onError(@NonNull CameraDevice camera,
+ @ErrorCode int error); // Must implement
+ }
+
+ /**
+ * To be inherited by android.hardware.camera2.* code only.
+ * @hide
+ */
+ public CameraDevice() {}
+}
diff --git a/android/hardware/camera2/CameraManager.java b/android/hardware/camera2/CameraManager.java
new file mode 100644
index 00000000..90bf896c
--- /dev/null
+++ b/android/hardware/camera2/CameraManager.java
@@ -0,0 +1,1230 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.RequiresPermission;
+import android.annotation.SystemService;
+import android.content.Context;
+import android.hardware.CameraInfo;
+import android.hardware.CameraStatus;
+import android.hardware.ICameraService;
+import android.hardware.ICameraServiceListener;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.legacy.CameraDeviceUserShim;
+import android.hardware.camera2.legacy.LegacyMetadataMapper;
+import android.os.Binder;
+import android.os.DeadObjectException;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.RemoteException;
+import android.os.ServiceManager;
+import android.os.ServiceSpecificException;
+import android.os.SystemProperties;
+import android.util.ArrayMap;
+import android.util.Log;
+
+import java.util.ArrayList;
+
+/**
+ * <p>A system service manager for detecting, characterizing, and connecting to
+ * {@link CameraDevice CameraDevices}.</p>
+ *
+ * <p>For more details about communicating with camera devices, read the Camera
+ * developer guide or the {@link android.hardware.camera2 camera2}
+ * package documentation.</p>
+ */
+@SystemService(Context.CAMERA_SERVICE)
+public final class CameraManager {
+
+ private static final String TAG = "CameraManager";
+ private final boolean DEBUG = false;
+
+ private static final int USE_CALLING_UID = -1;
+
+ @SuppressWarnings("unused")
+ private static final int API_VERSION_1 = 1;
+ private static final int API_VERSION_2 = 2;
+
+ private static final int CAMERA_TYPE_BACKWARD_COMPATIBLE = 0;
+ private static final int CAMERA_TYPE_ALL = 1;
+
+ private ArrayList<String> mDeviceIdList;
+
+ private final Context mContext;
+ private final Object mLock = new Object();
+
+ /**
+ * @hide
+ */
+ public CameraManager(Context context) {
+ synchronized(mLock) {
+ mContext = context;
+ }
+ }
+
+ /**
+ * Return the list of currently connected camera devices by identifier, including
+ * cameras that may be in use by other camera API clients.
+ *
+ * <p>Non-removable cameras use integers starting at 0 for their
+ * identifiers, while removable cameras have a unique identifier for each
+ * individual device, even if they are the same model.</p>
+ *
+ * @return The list of currently connected camera devices.
+ */
+ @NonNull
+ public String[] getCameraIdList() throws CameraAccessException {
+ return CameraManagerGlobal.get().getCameraIdList();
+ }
+
+ /**
+ * Register a callback to be notified about camera device availability.
+ *
+ * <p>Registering the same callback again will replace the handler with the
+ * new one provided.</p>
+ *
+ * <p>The first time a callback is registered, it is immediately called
+ * with the availability status of all currently known camera devices.</p>
+ *
+ * <p>{@link AvailabilityCallback#onCameraUnavailable(String)} will be called whenever a camera
+ * device is opened by any camera API client. As of API level 23, other camera API clients may
+ * still be able to open such a camera device, evicting the existing client if they have higher
+ * priority than the existing client of a camera device. See open() for more details.</p>
+ *
+ * <p>Since this callback will be registered with the camera service, remember to unregister it
+ * once it is no longer needed; otherwise the callback will continue to receive events
+ * indefinitely and it may prevent other resources from being released. Specifically, the
+ * callbacks will be invoked independently of the general activity lifecycle and independently
+ * of the state of individual CameraManager instances.</p>
+ *
+ * @param callback the new callback to send camera availability notices to
+ * @param handler The handler on which the callback should be invoked, or {@code null} to use
+ * the current thread's {@link android.os.Looper looper}.
+ *
+ * @throws IllegalArgumentException if the handler is {@code null} but the current thread has
+ * no looper.
+ */
+ public void registerAvailabilityCallback(@NonNull AvailabilityCallback callback,
+ @Nullable Handler handler) {
+ if (handler == null) {
+ Looper looper = Looper.myLooper();
+ if (looper == null) {
+ throw new IllegalArgumentException(
+ "No handler given, and current thread has no looper!");
+ }
+ handler = new Handler(looper);
+ }
+
+ CameraManagerGlobal.get().registerAvailabilityCallback(callback, handler);
+ }
+
+ /**
+ * Remove a previously-added callback; the callback will no longer receive connection and
+ * disconnection callbacks.
+ *
+ * <p>Removing a callback that isn't registered has no effect.</p>
+ *
+ * @param callback The callback to remove from the notification list
+ */
+ public void unregisterAvailabilityCallback(@NonNull AvailabilityCallback callback) {
+ CameraManagerGlobal.get().unregisterAvailabilityCallback(callback);
+ }
+
+ /**
+ * Register a callback to be notified about torch mode status.
+ *
+ * <p>Registering the same callback again will replace the handler with the
+ * new one provided.</p>
+ *
+ * <p>The first time a callback is registered, it is immediately called
+ * with the torch mode status of all currently known camera devices with a flash unit.</p>
+ *
+ * <p>Since this callback will be registered with the camera service, remember to unregister it
+ * once it is no longer needed; otherwise the callback will continue to receive events
+ * indefinitely and it may prevent other resources from being released. Specifically, the
+ * callbacks will be invoked independently of the general activity lifecycle and independently
+ * of the state of individual CameraManager instances.</p>
+ *
+ * @param callback The new callback to send torch mode status to
+ * @param handler The handler on which the callback should be invoked, or {@code null} to use
+ * the current thread's {@link android.os.Looper looper}.
+ *
+ * @throws IllegalArgumentException if the handler is {@code null} but the current thread has
+ * no looper.
+ */
+ public void registerTorchCallback(@NonNull TorchCallback callback, @Nullable Handler handler) {
+ if (handler == null) {
+ Looper looper = Looper.myLooper();
+ if (looper == null) {
+ throw new IllegalArgumentException(
+ "No handler given, and current thread has no looper!");
+ }
+ handler = new Handler(looper);
+ }
+ CameraManagerGlobal.get().registerTorchCallback(callback, handler);
+ }
+
+ /**
+ * Remove a previously-added callback; the callback will no longer receive torch mode status
+ * callbacks.
+ *
+ * <p>Removing a callback that isn't registered has no effect.</p>
+ *
+ * @param callback The callback to remove from the notification list
+ */
+ public void unregisterTorchCallback(@NonNull TorchCallback callback) {
+ CameraManagerGlobal.get().unregisterTorchCallback(callback);
+ }
+
+ /**
+ * <p>Query the capabilities of a camera device. These capabilities are
+ * immutable for a given camera.</p>
+ *
+ * @param cameraId The id of the camera device to query
+ * @return The properties of the given camera
+ *
+ * @throws IllegalArgumentException if the cameraId does not match any
+ * known camera device.
+ * @throws CameraAccessException if the camera device has been disconnected.
+ *
+ * @see #getCameraIdList
+ * @see android.app.admin.DevicePolicyManager#setCameraDisabled
+ */
+ @NonNull
+ public CameraCharacteristics getCameraCharacteristics(@NonNull String cameraId)
+ throws CameraAccessException {
+ CameraCharacteristics characteristics = null;
+ if (CameraManagerGlobal.sCameraServiceDisabled) {
+ throw new IllegalArgumentException("No cameras available on device");
+ }
+ synchronized (mLock) {
+ /*
+ * Get the camera characteristics from the camera service directly if it supports it,
+ * otherwise get them from the legacy shim instead.
+ */
+ ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
+ if (cameraService == null) {
+ throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED,
+ "Camera service is currently unavailable");
+ }
+ try {
+ if (!supportsCamera2ApiLocked(cameraId)) {
+ // Legacy backwards compatibility path; build static info from the camera
+ // parameters
+ int id = Integer.parseInt(cameraId);
+
+ String parameters = cameraService.getLegacyParameters(id);
+
+ CameraInfo info = cameraService.getCameraInfo(id);
+
+ characteristics = LegacyMetadataMapper.createCharacteristics(parameters, info);
+ } else {
+ // Normal path: Get the camera characteristics directly from the camera service
+ CameraMetadataNative info = cameraService.getCameraCharacteristics(cameraId);
+
+ characteristics = new CameraCharacteristics(info);
+ }
+ } catch (ServiceSpecificException e) {
+ throwAsPublicException(e);
+ } catch (RemoteException e) {
+ // Camera service died - act as if the camera was disconnected
+ throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED,
+ "Camera service is currently unavailable", e);
+ }
+ }
+ return characteristics;
+ }
+
+ /**
+ * Helper for opening a connection to a camera with the given ID.
+ *
+ * @param cameraId The unique identifier of the camera device to open
+ * @param callback The callback for the camera. Must not be null.
+ * @param handler The handler to invoke the callback on. Must not be null.
+ * @param uid The UID of the application actually opening the camera.
+ * Must be USE_CALLING_UID unless the caller is a service
+ * that is trusted to open the device on behalf of an
+ * application and to forward the real UID.
+ *
+ * @throws CameraAccessException if the camera is disabled by device policy,
+ * too many camera devices are already open, or the cameraId does not match
+ * any currently available camera device.
+ *
+ * @throws SecurityException if the application does not have permission to
+ * access the camera
+ * @throws IllegalArgumentException if callback or handler is null.
+ * @return A handle to the newly-created camera device.
+ *
+ * @see #getCameraIdList
+ * @see android.app.admin.DevicePolicyManager#setCameraDisabled
+ */
+ private CameraDevice openCameraDeviceUserAsync(String cameraId,
+ CameraDevice.StateCallback callback, Handler handler, final int uid)
+ throws CameraAccessException {
+ CameraCharacteristics characteristics = getCameraCharacteristics(cameraId);
+ CameraDevice device = null;
+
+ synchronized (mLock) {
+
+ ICameraDeviceUser cameraUser = null;
+
+ android.hardware.camera2.impl.CameraDeviceImpl deviceImpl =
+ new android.hardware.camera2.impl.CameraDeviceImpl(
+ cameraId,
+ callback,
+ handler,
+ characteristics,
+ mContext.getApplicationInfo().targetSdkVersion);
+
+ ICameraDeviceCallbacks callbacks = deviceImpl.getCallbacks();
+
+ try {
+ if (supportsCamera2ApiLocked(cameraId)) {
+ // Use cameraservice's cameradeviceclient implementation for HAL3.2+ devices
+ ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
+ if (cameraService == null) {
+ throw new ServiceSpecificException(
+ ICameraService.ERROR_DISCONNECTED,
+ "Camera service is currently unavailable");
+ }
+ cameraUser = cameraService.connectDevice(callbacks, cameraId,
+ mContext.getOpPackageName(), uid);
+ } else {
+ // Use legacy camera implementation for HAL1 devices
+ int id;
+ try {
+ id = Integer.parseInt(cameraId);
+ } catch (NumberFormatException e) {
+ throw new IllegalArgumentException("Expected cameraId to be numeric, but it was: "
+ + cameraId);
+ }
+
+ Log.i(TAG, "Using legacy camera HAL.");
+ cameraUser = CameraDeviceUserShim.connectBinderShim(callbacks, id);
+ }
+ } catch (ServiceSpecificException e) {
+ if (e.errorCode == ICameraService.ERROR_DEPRECATED_HAL) {
+ throw new AssertionError("Should've gone down the shim path");
+ } else if (e.errorCode == ICameraService.ERROR_CAMERA_IN_USE ||
+ e.errorCode == ICameraService.ERROR_MAX_CAMERAS_IN_USE ||
+ e.errorCode == ICameraService.ERROR_DISABLED ||
+ e.errorCode == ICameraService.ERROR_DISCONNECTED ||
+ e.errorCode == ICameraService.ERROR_INVALID_OPERATION) {
+ // Received one of the known connection errors
+ // The remote camera device cannot be connected to, so
+ // set the local camera to the startup error state
+ deviceImpl.setRemoteFailure(e);
+
+ if (e.errorCode == ICameraService.ERROR_DISABLED ||
+ e.errorCode == ICameraService.ERROR_DISCONNECTED ||
+ e.errorCode == ICameraService.ERROR_CAMERA_IN_USE) {
+ // Per API docs, these failures call onError and throw
+ throwAsPublicException(e);
+ }
+ } else {
+ // Unexpected failure - rethrow
+ throwAsPublicException(e);
+ }
+ } catch (RemoteException e) {
+ // Camera service died - act as if it's a CAMERA_DISCONNECTED case
+ ServiceSpecificException sse = new ServiceSpecificException(
+ ICameraService.ERROR_DISCONNECTED,
+ "Camera service is currently unavailable");
+ deviceImpl.setRemoteFailure(sse);
+ throwAsPublicException(sse);
+ }
+
+ // TODO: factor out callback to be non-nested, then move setter to constructor
+ // For now, calling setRemoteDevice will fire initial
+ // onOpened/onUnconfigured callbacks.
+ // This function call may post onDisconnected and throw CAMERA_DISCONNECTED if
+ // cameraUser dies during setup.
+ deviceImpl.setRemoteDevice(cameraUser);
+ device = deviceImpl;
+ }
+
+ return device;
+ }
+
+ /**
+ * Open a connection to a camera with the given ID.
+ *
+ * <p>Use {@link #getCameraIdList} to get the list of available camera
+ * devices. Note that even if an id is listed, open may fail if the device
+ * is disconnected between the calls to {@link #getCameraIdList} and
+ * {@link #openCamera}, or if a higher-priority camera API client begins using the
+ * camera device.</p>
+ *
+ * <p>As of API level 23, devices for which the
+ * {@link AvailabilityCallback#onCameraUnavailable(String)} callback has been called due to the
+ * device being in use by a lower-priority, background camera API client can still potentially
+ * be opened by calling this method when the calling camera API client has a higher priority
+ * than the current camera API client using this device. In general, if the top, foreground
+ * activity is running within your application process, your process will be given the highest
+ * priority when accessing the camera, and this method will succeed even if the camera device is
+ * in use by another camera API client. Any lower-priority application that loses control of the
+ * camera in this way will receive an
+ * {@link android.hardware.camera2.CameraDevice.StateCallback#onDisconnected} callback.</p>
+ *
+ * <p>Once the camera is successfully opened, {@link CameraDevice.StateCallback#onOpened} will
+ * be invoked with the newly opened {@link CameraDevice}. The camera device can then be set up
+ * for operation by calling {@link CameraDevice#createCaptureSession} and
+ * {@link CameraDevice#createCaptureRequest}</p>
+ *
+ * <!--
+ * <p>Since the camera device will be opened asynchronously, any asynchronous operations done
+ * on the returned CameraDevice instance will be queued up until the device startup has
+ * completed and the callback's {@link CameraDevice.StateCallback#onOpened onOpened} method is
+ * called. The pending operations are then processed in order.</p>
+ * -->
+ * <p>If the camera becomes disconnected during initialization
+ * after this function call returns,
+ * {@link CameraDevice.StateCallback#onDisconnected} with a
+ * {@link CameraDevice} in the disconnected state (and
+ * {@link CameraDevice.StateCallback#onOpened} will be skipped).</p>
+ *
+ * <p>If opening the camera device fails, then the device callback's
+ * {@link CameraDevice.StateCallback#onError onError} method will be called, and subsequent
+ * calls on the camera device will throw a {@link CameraAccessException}.</p>
+ *
+ * @param cameraId
+ * The unique identifier of the camera device to open
+ * @param callback
+ * The callback which is invoked once the camera is opened
+ * @param handler
+ * The handler on which the callback should be invoked, or
+ * {@code null} to use the current thread's {@link android.os.Looper looper}.
+ *
+ * @throws CameraAccessException if the camera is disabled by device policy,
+ * has been disconnected, or is being used by a higher-priority camera API client.
+ *
+ * @throws IllegalArgumentException if cameraId or the callback was null,
+ * or the cameraId does not match any currently or previously available
+ * camera device.
+ *
+ * @throws SecurityException if the application does not have permission to
+ * access the camera
+ *
+ * @see #getCameraIdList
+ * @see android.app.admin.DevicePolicyManager#setCameraDisabled
+ */
+ @RequiresPermission(android.Manifest.permission.CAMERA)
+ public void openCamera(@NonNull String cameraId,
+ @NonNull final CameraDevice.StateCallback callback, @Nullable Handler handler)
+ throws CameraAccessException {
+
+ openCameraForUid(cameraId, callback, handler, USE_CALLING_UID);
+ }
+
+ /**
+ * Open a connection to a camera with the given ID, on behalf of another application
+ * specified by clientUid.
+ *
+ * <p>The behavior of this method matches that of {@link #openCamera}, except that it allows
+ * the caller to specify the UID to use for permission/etc verification. This can only be
+ * done by services trusted by the camera subsystem to act on behalf of applications and
+ * to forward the real UID.</p>
+ *
+ * @param clientUid
+ * The UID of the application on whose behalf the camera is being opened.
+ * Must be USE_CALLING_UID unless the caller is a trusted service.
+ *
+ * @hide
+ */
+ public void openCameraForUid(@NonNull String cameraId,
+ @NonNull final CameraDevice.StateCallback callback, @Nullable Handler handler,
+ int clientUid)
+ throws CameraAccessException {
+
+ if (cameraId == null) {
+ throw new IllegalArgumentException("cameraId was null");
+ } else if (callback == null) {
+ throw new IllegalArgumentException("callback was null");
+ } else if (handler == null) {
+ if (Looper.myLooper() != null) {
+ handler = new Handler();
+ } else {
+ throw new IllegalArgumentException(
+ "Handler argument is null, but no looper exists in the calling thread");
+ }
+ }
+ if (CameraManagerGlobal.sCameraServiceDisabled) {
+ throw new IllegalArgumentException("No cameras available on device");
+ }
+
+ openCameraDeviceUserAsync(cameraId, callback, handler, clientUid);
+ }
+
+ /**
+ * Set the flash unit's torch mode of the camera of the given ID without opening the camera
+ * device.
+ *
+ * <p>Use {@link #getCameraIdList} to get the list of available camera devices and use
+ * {@link #getCameraCharacteristics} to check whether the camera device has a flash unit.
+ * Note that even if a camera device has a flash unit, turning on the torch mode may fail
+ * if the camera device or other camera resources needed to turn on the torch mode are in use.
+ * </p>
+ *
+ * <p> If {@link #setTorchMode} is called to turn on or off the torch mode successfully,
+ * {@link CameraManager.TorchCallback#onTorchModeChanged} will be invoked.
+ * However, even if turning on the torch mode is successful, the application does not have the
+ * exclusive ownership of the flash unit or the camera device. The torch mode will be turned
+ * off and becomes unavailable when the camera device that the flash unit belongs to becomes
+ * unavailable or when other camera resources to keep the torch on become unavailable (
+ * {@link CameraManager.TorchCallback#onTorchModeUnavailable} will be invoked). Also,
+ * other applications are free to call {@link #setTorchMode} to turn off the torch mode (
+ * {@link CameraManager.TorchCallback#onTorchModeChanged} will be invoked). If the latest
+ * application that turned on the torch mode exits, the torch mode will be turned off.
+ *
+ * @param cameraId
+ * The unique identifier of the camera device that the flash unit belongs to.
+ * @param enabled
+ * The desired state of the torch mode for the target camera device. Set to
+ * {@code true} to turn on the torch mode. Set to {@code false} to turn off the
+ * torch mode.
+ *
+ * @throws CameraAccessException if it failed to access the flash unit.
+ * {@link CameraAccessException#CAMERA_IN_USE} will be thrown if the camera device
+ * is in use. {@link CameraAccessException#MAX_CAMERAS_IN_USE} will be thrown if
+ * other camera resources needed to turn on the torch mode are in use.
+ * {@link CameraAccessException#CAMERA_DISCONNECTED} will be thrown if camera
+ * service is not available.
+ *
+ * @throws IllegalArgumentException if cameraId was null, cameraId doesn't match any currently
+ * or previously available camera device, or the camera device doesn't have a
+ * flash unit.
+ */
+ public void setTorchMode(@NonNull String cameraId, boolean enabled)
+ throws CameraAccessException {
+ if (CameraManagerGlobal.sCameraServiceDisabled) {
+ throw new IllegalArgumentException("No cameras available on device");
+ }
+ CameraManagerGlobal.get().setTorchMode(cameraId, enabled);
+ }
+
+ /**
+ * A callback for camera devices becoming available or unavailable to open.
+ *
+ * <p>Cameras become available when they are no longer in use, or when a new
+ * removable camera is connected. They become unavailable when some
+ * application or service starts using a camera, or when a removable camera
+ * is disconnected.</p>
+ *
+ * <p>Extend this callback and pass an instance of the subclass to
+ * {@link CameraManager#registerAvailabilityCallback} to be notified of such availability
+ * changes.</p>
+ *
+ * @see #registerAvailabilityCallback
+ */
+ public static abstract class AvailabilityCallback {
+
+ /**
+ * A new camera has become available to use.
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param cameraId The unique identifier of the new camera.
+ */
+ public void onCameraAvailable(@NonNull String cameraId) {
+ // default empty implementation
+ }
+
+ /**
+ * A previously-available camera has become unavailable for use.
+ *
+ * <p>If an application had an active CameraDevice instance for the
+ * now-disconnected camera, that application will receive a
+ * {@link CameraDevice.StateCallback#onDisconnected disconnection error}.</p>
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param cameraId The unique identifier of the disconnected camera.
+ */
+ public void onCameraUnavailable(@NonNull String cameraId) {
+ // default empty implementation
+ }
+ }
+
+ /**
+ * A callback for camera flash torch modes becoming unavailable, disabled, or enabled.
+ *
+ * <p>The torch mode becomes unavailable when the camera device it belongs to becomes
+ * unavailable or other camera resources it needs become busy due to other higher priority
+ * camera activities. The torch mode becomes disabled when it was turned off or when the camera
+ * device it belongs to is no longer in use and other camera resources it needs are no longer
+ * busy. A camera's torch mode is turned off when an application calls {@link #setTorchMode} to
+ * turn off the camera's torch mode, or when an application turns on another camera's torch mode
+ * if keeping multiple torch modes on simultaneously is not supported. The torch mode becomes
+ * enabled when it is turned on via {@link #setTorchMode}.</p>
+ *
+ * <p>The torch mode is available to set via {@link #setTorchMode} only when it's in a disabled
+ * or enabled state.</p>
+ *
+ * <p>Extend this callback and pass an instance of the subclass to
+ * {@link CameraManager#registerTorchCallback} to be notified of such status changes.
+ * </p>
+ *
+ * @see #registerTorchCallback
+ */
+ public static abstract class TorchCallback {
+ /**
+ * A camera's torch mode has become unavailable to set via {@link #setTorchMode}.
+ *
+ * <p>If torch mode was previously turned on by calling {@link #setTorchMode}, it will be
+ * turned off before {@link CameraManager.TorchCallback#onTorchModeUnavailable} is
+ * invoked. {@link #setTorchMode} will fail until the torch mode has entered a disabled or
+ * enabled state again.</p>
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param cameraId The unique identifier of the camera whose torch mode has become
+ * unavailable.
+ */
+ public void onTorchModeUnavailable(@NonNull String cameraId) {
+ // default empty implementation
+ }
+
+ /**
+ * A camera's torch mode has become enabled or disabled and can be changed via
+ * {@link #setTorchMode}.
+ *
+ * <p>The default implementation of this method does nothing.</p>
+ *
+ * @param cameraId The unique identifier of the camera whose torch mode has been changed.
+ *
+ * @param enabled The state that the torch mode of the camera has been changed to.
+ * {@code true} when the torch mode has become on and available to be turned
+ * off. {@code false} when the torch mode has becomes off and available to
+ * be turned on.
+ */
+ public void onTorchModeChanged(@NonNull String cameraId, boolean enabled) {
+ // default empty implementation
+ }
+ }
+
+ /**
+ * Convert ServiceSpecificExceptions and Binder RemoteExceptions from camera binder interfaces
+ * into the correct public exceptions.
+ *
+ * @hide
+ */
+ public static void throwAsPublicException(Throwable t) throws CameraAccessException {
+ if (t instanceof ServiceSpecificException) {
+ ServiceSpecificException e = (ServiceSpecificException) t;
+ int reason = CameraAccessException.CAMERA_ERROR;
+ switch(e.errorCode) {
+ case ICameraService.ERROR_DISCONNECTED:
+ reason = CameraAccessException.CAMERA_DISCONNECTED;
+ break;
+ case ICameraService.ERROR_DISABLED:
+ reason = CameraAccessException.CAMERA_DISABLED;
+ break;
+ case ICameraService.ERROR_CAMERA_IN_USE:
+ reason = CameraAccessException.CAMERA_IN_USE;
+ break;
+ case ICameraService.ERROR_MAX_CAMERAS_IN_USE:
+ reason = CameraAccessException.MAX_CAMERAS_IN_USE;
+ break;
+ case ICameraService.ERROR_DEPRECATED_HAL:
+ reason = CameraAccessException.CAMERA_DEPRECATED_HAL;
+ break;
+ case ICameraService.ERROR_ILLEGAL_ARGUMENT:
+ case ICameraService.ERROR_ALREADY_EXISTS:
+ throw new IllegalArgumentException(e.getMessage(), e);
+ case ICameraService.ERROR_PERMISSION_DENIED:
+ throw new SecurityException(e.getMessage(), e);
+ case ICameraService.ERROR_TIMED_OUT:
+ case ICameraService.ERROR_INVALID_OPERATION:
+ default:
+ reason = CameraAccessException.CAMERA_ERROR;
+ }
+ throw new CameraAccessException(reason, e.getMessage(), e);
+ } else if (t instanceof DeadObjectException) {
+ throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED,
+ "Camera service has died unexpectedly",
+ t);
+ } else if (t instanceof RemoteException) {
+ throw new UnsupportedOperationException("An unknown RemoteException was thrown" +
+ " which should never happen.", t);
+ } else if (t instanceof RuntimeException) {
+ RuntimeException e = (RuntimeException) t;
+ throw e;
+ }
+ }
+
+ /**
+ * Queries the camera service if it supports the camera2 api directly, or needs a shim.
+ *
+ * @param cameraId a non-{@code null} camera identifier
+ * @return {@code false} if the legacy shim needs to be used, {@code true} otherwise.
+ */
+ private boolean supportsCamera2ApiLocked(String cameraId) {
+ return supportsCameraApiLocked(cameraId, API_VERSION_2);
+ }
+
+ /**
+ * Queries the camera service if it supports a camera api directly, or needs a shim.
+ *
+ * @param cameraId a non-{@code null} camera identifier
+ * @param apiVersion the version, i.e. {@code API_VERSION_1} or {@code API_VERSION_2}
+ * @return {@code true} if connecting will work for that device version.
+ */
+ private boolean supportsCameraApiLocked(String cameraId, int apiVersion) {
+ /*
+ * Possible return values:
+ * - NO_ERROR => CameraX API is supported
+ * - CAMERA_DEPRECATED_HAL => CameraX API is *not* supported (thrown as an exception)
+ * - Remote exception => If the camera service died
+ *
+ * Anything else is an unexpected error we don't want to recover from.
+ */
+ try {
+ ICameraService cameraService = CameraManagerGlobal.get().getCameraService();
+ // If no camera service, no support
+ if (cameraService == null) return false;
+
+ return cameraService.supportsCameraApi(cameraId, apiVersion);
+ } catch (RemoteException e) {
+ // Camera service is now down, no support for any API level
+ }
+ return false;
+ }
+
+ /**
+ * A per-process global camera manager instance, to retain a connection to the camera service,
+ * and to distribute camera availability notices to API-registered callbacks
+ */
+ private static final class CameraManagerGlobal extends ICameraServiceListener.Stub
+ implements IBinder.DeathRecipient {
+
+ private static final String TAG = "CameraManagerGlobal";
+ private final boolean DEBUG = false;
+
+ private final int CAMERA_SERVICE_RECONNECT_DELAY_MS = 1000;
+
+ // Singleton instance
+ private static final CameraManagerGlobal gCameraManager =
+ new CameraManagerGlobal();
+
+ /**
+ * This must match the ICameraService definition
+ */
+ private static final String CAMERA_SERVICE_BINDER_NAME = "media.camera";
+
+ // Camera ID -> Status map
+ private final ArrayMap<String, Integer> mDeviceStatus = new ArrayMap<String, Integer>();
+
+ // Registered availablility callbacks and their handlers
+ private final ArrayMap<AvailabilityCallback, Handler> mCallbackMap =
+ new ArrayMap<AvailabilityCallback, Handler>();
+
+ // torch client binder to set the torch mode with.
+ private Binder mTorchClientBinder = new Binder();
+
+ // Camera ID -> Torch status map
+ private final ArrayMap<String, Integer> mTorchStatus = new ArrayMap<String, Integer>();
+
+ // Registered torch callbacks and their handlers
+ private final ArrayMap<TorchCallback, Handler> mTorchCallbackMap =
+ new ArrayMap<TorchCallback, Handler>();
+
+ private final Object mLock = new Object();
+
+ // Access only through getCameraService to deal with binder death
+ private ICameraService mCameraService;
+
+ // Singleton, don't allow construction
+ private CameraManagerGlobal() {
+ }
+
+ public static final boolean sCameraServiceDisabled =
+ SystemProperties.getBoolean("config.disable_cameraservice", false);
+
+ public static CameraManagerGlobal get() {
+ return gCameraManager;
+ }
+
+ @Override
+ public IBinder asBinder() {
+ return this;
+ }
+
+ /**
+ * Return a best-effort ICameraService.
+ *
+ * <p>This will be null if the camera service is not currently available. If the camera
+ * service has died since the last use of the camera service, will try to reconnect to the
+ * service.</p>
+ */
+ public ICameraService getCameraService() {
+ synchronized(mLock) {
+ connectCameraServiceLocked();
+ if (mCameraService == null && !sCameraServiceDisabled) {
+ Log.e(TAG, "Camera service is unavailable");
+ }
+ return mCameraService;
+ }
+ }
+
+ /**
+ * Connect to the camera service if it's available, and set up listeners.
+ * If the service is already connected, do nothing.
+ *
+ * <p>Sets mCameraService to a valid pointer or null if the connection does not succeed.</p>
+ */
+ private void connectCameraServiceLocked() {
+ // Only reconnect if necessary
+ if (mCameraService != null || sCameraServiceDisabled) return;
+
+ Log.i(TAG, "Connecting to camera service");
+
+ IBinder cameraServiceBinder = ServiceManager.getService(CAMERA_SERVICE_BINDER_NAME);
+ if (cameraServiceBinder == null) {
+ // Camera service is now down, leave mCameraService as null
+ return;
+ }
+ try {
+ cameraServiceBinder.linkToDeath(this, /*flags*/ 0);
+ } catch (RemoteException e) {
+ // Camera service is now down, leave mCameraService as null
+ return;
+ }
+
+ ICameraService cameraService = ICameraService.Stub.asInterface(cameraServiceBinder);
+
+ try {
+ CameraMetadataNative.setupGlobalVendorTagDescriptor();
+ } catch (ServiceSpecificException e) {
+ handleRecoverableSetupErrors(e);
+ }
+
+ try {
+ CameraStatus[] cameraStatuses = cameraService.addListener(this);
+ for (CameraStatus c : cameraStatuses) {
+ onStatusChangedLocked(c.status, c.cameraId);
+ }
+ mCameraService = cameraService;
+ } catch(ServiceSpecificException e) {
+ // Unexpected failure
+ throw new IllegalStateException("Failed to register a camera service listener", e);
+ } catch (RemoteException e) {
+ // Camera service is now down, leave mCameraService as null
+ }
+ }
+
+ /**
+ * Get a list of all camera IDs that are at least PRESENT; ignore devices that are
+ * NOT_PRESENT or ENUMERATING, since they cannot be used by anyone.
+ */
+ public String[] getCameraIdList() {
+ String[] cameraIds = null;
+ synchronized(mLock) {
+ // Try to make sure we have an up-to-date list of camera devices.
+ connectCameraServiceLocked();
+
+ int idCount = 0;
+ for (int i = 0; i < mDeviceStatus.size(); i++) {
+ int status = mDeviceStatus.valueAt(i);
+ if (status == ICameraServiceListener.STATUS_NOT_PRESENT ||
+ status == ICameraServiceListener.STATUS_ENUMERATING) continue;
+ idCount++;
+ }
+ cameraIds = new String[idCount];
+ idCount = 0;
+ for (int i = 0; i < mDeviceStatus.size(); i++) {
+ int status = mDeviceStatus.valueAt(i);
+ if (status == ICameraServiceListener.STATUS_NOT_PRESENT ||
+ status == ICameraServiceListener.STATUS_ENUMERATING) continue;
+ cameraIds[idCount] = mDeviceStatus.keyAt(i);
+ idCount++;
+ }
+ }
+ return cameraIds;
+ }
+
+ public void setTorchMode(String cameraId, boolean enabled) throws CameraAccessException {
+ synchronized(mLock) {
+
+ if (cameraId == null) {
+ throw new IllegalArgumentException("cameraId was null");
+ }
+
+ ICameraService cameraService = getCameraService();
+ if (cameraService == null) {
+ throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED,
+ "Camera service is currently unavailable");
+ }
+
+ try {
+ cameraService.setTorchMode(cameraId, enabled, mTorchClientBinder);
+ } catch(ServiceSpecificException e) {
+ throwAsPublicException(e);
+ } catch (RemoteException e) {
+ throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED,
+ "Camera service is currently unavailable");
+ }
+ }
+ }
+
+ private void handleRecoverableSetupErrors(ServiceSpecificException e) {
+ switch (e.errorCode) {
+ case ICameraService.ERROR_DISCONNECTED:
+ Log.w(TAG, e.getMessage());
+ break;
+ default:
+ throw new IllegalStateException(e);
+ }
+ }
+
+ private boolean isAvailable(int status) {
+ switch (status) {
+ case ICameraServiceListener.STATUS_PRESENT:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private boolean validStatus(int status) {
+ switch (status) {
+ case ICameraServiceListener.STATUS_NOT_PRESENT:
+ case ICameraServiceListener.STATUS_PRESENT:
+ case ICameraServiceListener.STATUS_ENUMERATING:
+ case ICameraServiceListener.STATUS_NOT_AVAILABLE:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private boolean validTorchStatus(int status) {
+ switch (status) {
+ case ICameraServiceListener.TORCH_STATUS_NOT_AVAILABLE:
+ case ICameraServiceListener.TORCH_STATUS_AVAILABLE_ON:
+ case ICameraServiceListener.TORCH_STATUS_AVAILABLE_OFF:
+ return true;
+ default:
+ return false;
+ }
+ }
+
+ private void postSingleUpdate(final AvailabilityCallback callback, final Handler handler,
+ final String id, final int status) {
+ if (isAvailable(status)) {
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ callback.onCameraAvailable(id);
+ }
+ });
+ } else {
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ callback.onCameraUnavailable(id);
+ }
+ });
+ }
+ }
+
+ private void postSingleTorchUpdate(final TorchCallback callback, final Handler handler,
+ final String id, final int status) {
+ switch(status) {
+ case ICameraServiceListener.TORCH_STATUS_AVAILABLE_ON:
+ case ICameraServiceListener.TORCH_STATUS_AVAILABLE_OFF:
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ callback.onTorchModeChanged(id, status ==
+ ICameraServiceListener.TORCH_STATUS_AVAILABLE_ON);
+ }
+ });
+ break;
+ default:
+ handler.post(
+ new Runnable() {
+ @Override
+ public void run() {
+ callback.onTorchModeUnavailable(id);
+ }
+ });
+ break;
+ }
+ }
+
+ /**
+ * Send the state of all known cameras to the provided listener, to initialize
+ * the listener's knowledge of camera state.
+ */
+ private void updateCallbackLocked(AvailabilityCallback callback, Handler handler) {
+ for (int i = 0; i < mDeviceStatus.size(); i++) {
+ String id = mDeviceStatus.keyAt(i);
+ Integer status = mDeviceStatus.valueAt(i);
+ postSingleUpdate(callback, handler, id, status);
+ }
+ }
+
+ private void onStatusChangedLocked(int status, String id) {
+ if (DEBUG) {
+ Log.v(TAG,
+ String.format("Camera id %s has status changed to 0x%x", id, status));
+ }
+
+ if (!validStatus(status)) {
+ Log.e(TAG, String.format("Ignoring invalid device %s status 0x%x", id,
+ status));
+ return;
+ }
+
+ Integer oldStatus = mDeviceStatus.put(id, status);
+
+ if (oldStatus != null && oldStatus == status) {
+ if (DEBUG) {
+ Log.v(TAG, String.format(
+ "Device status changed to 0x%x, which is what it already was",
+ status));
+ }
+ return;
+ }
+
+ // TODO: consider abstracting out this state minimization + transition
+ // into a separate
+ // more easily testable class
+ // i.e. (new State()).addState(STATE_AVAILABLE)
+ // .addState(STATE_NOT_AVAILABLE)
+ // .addTransition(STATUS_PRESENT, STATE_AVAILABLE),
+ // .addTransition(STATUS_NOT_PRESENT, STATE_NOT_AVAILABLE)
+ // .addTransition(STATUS_ENUMERATING, STATE_NOT_AVAILABLE);
+ // .addTransition(STATUS_NOT_AVAILABLE, STATE_NOT_AVAILABLE);
+
+ // Translate all the statuses to either 'available' or 'not available'
+ // available -> available => no new update
+ // not available -> not available => no new update
+ if (oldStatus != null && isAvailable(status) == isAvailable(oldStatus)) {
+ if (DEBUG) {
+ Log.v(TAG,
+ String.format(
+ "Device status was previously available (%b), " +
+ " and is now again available (%b)" +
+ "so no new client visible update will be sent",
+ isAvailable(oldStatus), isAvailable(status)));
+ }
+ return;
+ }
+
+ final int callbackCount = mCallbackMap.size();
+ for (int i = 0; i < callbackCount; i++) {
+ Handler handler = mCallbackMap.valueAt(i);
+ final AvailabilityCallback callback = mCallbackMap.keyAt(i);
+
+ postSingleUpdate(callback, handler, id, status);
+ }
+ } // onStatusChangedLocked
+
+ private void updateTorchCallbackLocked(TorchCallback callback, Handler handler) {
+ for (int i = 0; i < mTorchStatus.size(); i++) {
+ String id = mTorchStatus.keyAt(i);
+ Integer status = mTorchStatus.valueAt(i);
+ postSingleTorchUpdate(callback, handler, id, status);
+ }
+ }
+
+ private void onTorchStatusChangedLocked(int status, String id) {
+ if (DEBUG) {
+ Log.v(TAG,
+ String.format("Camera id %s has torch status changed to 0x%x", id, status));
+ }
+
+ if (!validTorchStatus(status)) {
+ Log.e(TAG, String.format("Ignoring invalid device %s torch status 0x%x", id,
+ status));
+ return;
+ }
+
+ Integer oldStatus = mTorchStatus.put(id, status);
+ if (oldStatus != null && oldStatus == status) {
+ if (DEBUG) {
+ Log.v(TAG, String.format(
+ "Torch status changed to 0x%x, which is what it already was",
+ status));
+ }
+ return;
+ }
+
+ final int callbackCount = mTorchCallbackMap.size();
+ for (int i = 0; i < callbackCount; i++) {
+ final Handler handler = mTorchCallbackMap.valueAt(i);
+ final TorchCallback callback = mTorchCallbackMap.keyAt(i);
+ postSingleTorchUpdate(callback, handler, id, status);
+ }
+ } // onTorchStatusChangedLocked
+
+ /**
+ * Register a callback to be notified about camera device availability with the
+ * global listener singleton.
+ *
+ * @param callback the new callback to send camera availability notices to
+ * @param handler The handler on which the callback should be invoked. May not be null.
+ */
+ public void registerAvailabilityCallback(AvailabilityCallback callback, Handler handler) {
+ synchronized (mLock) {
+ connectCameraServiceLocked();
+
+ Handler oldHandler = mCallbackMap.put(callback, handler);
+ // For new callbacks, provide initial availability information
+ if (oldHandler == null) {
+ updateCallbackLocked(callback, handler);
+ }
+
+ // If not connected to camera service, schedule a reconnect to camera service.
+ if (mCameraService == null) {
+ scheduleCameraServiceReconnectionLocked();
+ }
+ }
+ }
+
+ /**
+ * Remove a previously-added callback; the callback will no longer receive connection and
+ * disconnection callbacks, and is no longer referenced by the global listener singleton.
+ *
+ * @param callback The callback to remove from the notification list
+ */
+ public void unregisterAvailabilityCallback(AvailabilityCallback callback) {
+ synchronized (mLock) {
+ mCallbackMap.remove(callback);
+ }
+ }
+
+ public void registerTorchCallback(TorchCallback callback, Handler handler) {
+ synchronized(mLock) {
+ connectCameraServiceLocked();
+
+ Handler oldHandler = mTorchCallbackMap.put(callback, handler);
+ // For new callbacks, provide initial torch information
+ if (oldHandler == null) {
+ updateTorchCallbackLocked(callback, handler);
+ }
+
+ // If not connected to camera service, schedule a reconnect to camera service.
+ if (mCameraService == null) {
+ scheduleCameraServiceReconnectionLocked();
+ }
+ }
+ }
+
+ public void unregisterTorchCallback(TorchCallback callback) {
+ synchronized(mLock) {
+ mTorchCallbackMap.remove(callback);
+ }
+ }
+
+ /**
+ * Callback from camera service notifying the process about camera availability changes
+ */
+ @Override
+ public void onStatusChanged(int status, String cameraId) throws RemoteException {
+ synchronized(mLock) {
+ onStatusChangedLocked(status, cameraId);
+ }
+ }
+
+ @Override
+ public void onTorchStatusChanged(int status, String cameraId) throws RemoteException {
+ synchronized (mLock) {
+ onTorchStatusChangedLocked(status, cameraId);
+ }
+ }
+
+ /**
+ * Try to connect to camera service after some delay if any client registered camera
+ * availability callback or torch status callback.
+ */
+ private void scheduleCameraServiceReconnectionLocked() {
+ final Handler handler;
+
+ if (mCallbackMap.size() > 0) {
+ handler = mCallbackMap.valueAt(0);
+ } else if (mTorchCallbackMap.size() > 0) {
+ handler = mTorchCallbackMap.valueAt(0);
+ } else {
+ // Not necessary to reconnect camera service if no client registers a callback.
+ return;
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "Reconnecting Camera Service in " + CAMERA_SERVICE_RECONNECT_DELAY_MS +
+ " ms");
+ }
+
+ handler.postDelayed(
+ new Runnable() {
+ @Override
+ public void run() {
+ ICameraService cameraService = getCameraService();
+ if (cameraService == null) {
+ synchronized(mLock) {
+ if (DEBUG) {
+ Log.v(TAG, "Reconnecting Camera Service failed.");
+ }
+ scheduleCameraServiceReconnectionLocked();
+ }
+ }
+ }
+ },
+ CAMERA_SERVICE_RECONNECT_DELAY_MS);
+ }
+
+ /**
+ * Listener for camera service death.
+ *
+ * <p>The camera service isn't supposed to die under any normal circumstances, but can be
+ * turned off during debug, or crash due to bugs. So detect that and null out the interface
+ * object, so that the next calls to the manager can try to reconnect.</p>
+ */
+ public void binderDied() {
+ synchronized(mLock) {
+ // Only do this once per service death
+ if (mCameraService == null) return;
+
+ mCameraService = null;
+
+ // Tell listeners that the cameras and torch modes are unavailable and schedule a
+ // reconnection to camera service. When camera service is reconnected, the camera
+ // and torch statuses will be updated.
+ for (int i = 0; i < mDeviceStatus.size(); i++) {
+ String cameraId = mDeviceStatus.keyAt(i);
+ onStatusChangedLocked(ICameraServiceListener.STATUS_NOT_PRESENT, cameraId);
+ }
+ for (int i = 0; i < mTorchStatus.size(); i++) {
+ String cameraId = mTorchStatus.keyAt(i);
+ onTorchStatusChangedLocked(ICameraServiceListener.TORCH_STATUS_NOT_AVAILABLE,
+ cameraId);
+ }
+
+ scheduleCameraServiceReconnectionLocked();
+ }
+ }
+
+ } // CameraManagerGlobal
+
+} // CameraManager
diff --git a/android/hardware/camera2/CameraMetadata.java b/android/hardware/camera2/CameraMetadata.java
new file mode 100644
index 00000000..8c8c49fa
--- /dev/null
+++ b/android/hardware/camera2/CameraMetadata.java
@@ -0,0 +1,2872 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.impl.PublicKey;
+import android.hardware.camera2.impl.SyntheticKey;
+import android.util.Log;
+
+import java.lang.reflect.Field;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * The base class for camera controls and information.
+ *
+ * <p>
+ * This class defines the basic key/value map used for querying for camera
+ * characteristics or capture results, and for setting camera request
+ * parameters.
+ * </p>
+ *
+ * <p>
+ * All instances of CameraMetadata are immutable. The list of keys with {@link #getKeys()}
+ * never changes, nor do the values returned by any key with {@code #get} throughout
+ * the lifetime of the object.
+ * </p>
+ *
+ * @see CameraDevice
+ * @see CameraManager
+ * @see CameraCharacteristics
+ **/
+public abstract class CameraMetadata<TKey> {
+
+ private static final String TAG = "CameraMetadataAb";
+ private static final boolean DEBUG = false;
+ private CameraMetadataNative mNativeInstance = null;
+
+ /**
+ * Set a camera metadata field to a value. The field definitions can be
+ * found in {@link CameraCharacteristics}, {@link CaptureResult}, and
+ * {@link CaptureRequest}.
+ *
+ * @param key The metadata field to write.
+ * @param value The value to set the field to, which must be of a matching
+ * type to the key.
+ *
+ * @hide
+ */
+ protected CameraMetadata() {
+ }
+
+ /**
+ * Get a camera metadata field value.
+ *
+ * <p>The field definitions can be
+ * found in {@link CameraCharacteristics}, {@link CaptureResult}, and
+ * {@link CaptureRequest}.</p>
+ *
+ * <p>Querying the value for the same key more than once will return a value
+ * which is equal to the previous queried value.</p>
+ *
+ * @throws IllegalArgumentException if the key was not valid
+ *
+ * @param key The metadata field to read.
+ * @return The value of that key, or {@code null} if the field is not set.
+ *
+ * @hide
+ */
+ protected abstract <T> T getProtected(TKey key);
+
+ /**
+ * @hide
+ */
+ protected void setNativeInstance(CameraMetadataNative nativeInstance) {
+ mNativeInstance = nativeInstance;
+ }
+
+ /**
+ * @hide
+ */
+ protected abstract Class<TKey> getKeyClass();
+
+ /**
+ * Returns a list of the keys contained in this map.
+ *
+ * <p>The list returned is not modifiable, so any attempts to modify it will throw
+ * a {@code UnsupportedOperationException}.</p>
+ *
+ * <p>All values retrieved by a key from this list with {@code #get} are guaranteed to be
+ * non-{@code null}. Each key is only listed once in the list. The order of the keys
+ * is undefined.</p>
+ *
+ * @return List of the keys contained in this map.
+ */
+ @SuppressWarnings("unchecked")
+ @NonNull
+ public List<TKey> getKeys() {
+ Class<CameraMetadata<TKey>> thisClass = (Class<CameraMetadata<TKey>>) getClass();
+ return Collections.unmodifiableList(
+ getKeys(thisClass, getKeyClass(), this, /*filterTags*/null));
+ }
+
+ /**
+ * Return a list of all the Key<?> that are declared as a field inside of the class
+ * {@code type}.
+ *
+ * <p>
+ * Optionally, if {@code instance} is not null, then filter out any keys with null values.
+ * </p>
+ *
+ * <p>
+ * Optionally, if {@code filterTags} is not {@code null}, then filter out any keys
+ * whose native {@code tag} is not in {@code filterTags}. The {@code filterTags} array will be
+ * sorted as a side effect.
+ * </p>
+ */
+ /*package*/ @SuppressWarnings("unchecked")
+ <TKey> ArrayList<TKey> getKeys(
+ Class<?> type, Class<TKey> keyClass,
+ CameraMetadata<TKey> instance,
+ int[] filterTags) {
+
+ if (DEBUG) Log.v(TAG, "getKeysStatic for " + type);
+
+ // TotalCaptureResult does not have any of the keys on it, use CaptureResult instead
+ if (type.equals(TotalCaptureResult.class)) {
+ type = CaptureResult.class;
+ }
+
+ if (filterTags != null) {
+ Arrays.sort(filterTags);
+ }
+
+ ArrayList<TKey> keyList = new ArrayList<TKey>();
+
+ Field[] fields = type.getDeclaredFields();
+ for (Field field : fields) {
+ // Filter for Keys that are public
+ if (field.getType().isAssignableFrom(keyClass) &&
+ (field.getModifiers() & Modifier.PUBLIC) != 0) {
+
+ TKey key;
+ try {
+ key = (TKey) field.get(instance);
+ } catch (IllegalAccessException e) {
+ throw new AssertionError("Can't get IllegalAccessException", e);
+ } catch (IllegalArgumentException e) {
+ throw new AssertionError("Can't get IllegalArgumentException", e);
+ }
+
+ if (instance == null || instance.getProtected(key) != null) {
+ if (shouldKeyBeAdded(key, field, filterTags)) {
+ keyList.add(key);
+
+ if (DEBUG) {
+ Log.v(TAG, "getKeysStatic - key was added - " + key);
+ }
+ } else if (DEBUG) {
+ Log.v(TAG, "getKeysStatic - key was filtered - " + key);
+ }
+ }
+ }
+ }
+
+ if (null == mNativeInstance) {
+ return keyList;
+ }
+
+ ArrayList<TKey> vendorKeys = mNativeInstance.getAllVendorKeys(keyClass);
+
+ if (vendorKeys != null) {
+ for (TKey k : vendorKeys) {
+ String keyName;
+ long vendorId;
+ if (k instanceof CaptureRequest.Key<?>) {
+ keyName = ((CaptureRequest.Key<?>) k).getName();
+ vendorId = ((CaptureRequest.Key<?>) k).getVendorId();
+ } else if (k instanceof CaptureResult.Key<?>) {
+ keyName = ((CaptureResult.Key<?>) k).getName();
+ vendorId = ((CaptureResult.Key<?>) k).getVendorId();
+ } else if (k instanceof CameraCharacteristics.Key<?>) {
+ keyName = ((CameraCharacteristics.Key<?>) k).getName();
+ vendorId = ((CameraCharacteristics.Key<?>) k).getVendorId();
+ } else {
+ continue;
+ }
+
+ if (filterTags == null || Arrays.binarySearch(filterTags,
+ CameraMetadataNative.getTag(keyName, vendorId)) >= 0) {
+ keyList.add(k);
+ }
+ }
+ }
+
+ return keyList;
+ }
+
+ @SuppressWarnings("rawtypes")
+ private static <TKey> boolean shouldKeyBeAdded(TKey key, Field field, int[] filterTags) {
+ if (key == null) {
+ throw new NullPointerException("key must not be null");
+ }
+
+ CameraMetadataNative.Key nativeKey;
+
+ /*
+ * Get the native key from the public api key
+ */
+ if (key instanceof CameraCharacteristics.Key) {
+ nativeKey = ((CameraCharacteristics.Key)key).getNativeKey();
+ } else if (key instanceof CaptureResult.Key) {
+ nativeKey = ((CaptureResult.Key)key).getNativeKey();
+ } else if (key instanceof CaptureRequest.Key) {
+ nativeKey = ((CaptureRequest.Key)key).getNativeKey();
+ } else {
+ // Reject fields that aren't a key
+ throw new IllegalArgumentException("key type must be that of a metadata key");
+ }
+
+ if (field.getAnnotation(PublicKey.class) == null) {
+ // Never expose @hide keys up to the API user
+ return false;
+ }
+
+ // No filtering necessary
+ if (filterTags == null) {
+ return true;
+ }
+
+ if (field.getAnnotation(SyntheticKey.class) != null) {
+ // This key is synthetic, so calling #getTag will throw IAE
+
+ // TODO: don't just assume all public+synthetic keys are always available
+ return true;
+ }
+
+ /*
+ * Regular key: look up it's native tag and see if it's in filterTags
+ */
+
+ int keyTag = nativeKey.getTag();
+
+ // non-negative result is returned iff the value is in the array
+ return Arrays.binarySearch(filterTags, keyTag) >= 0;
+ }
+
+ /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+ * The enum values below this point are generated from metadata
+ * definitions in /system/media/camera/docs. Do not modify by hand or
+ * modify the comment blocks at the start or end.
+ *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
+
+ //
+ // Enumeration values for CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ //
+
+ /**
+ * <p>The lens focus distance is not accurate, and the units used for
+ * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} do not correspond to any physical units.</p>
+ * <p>Setting the lens to the same focus distance on separate occasions may
+ * result in a different real focus distance, depending on factors such
+ * as the orientation of the device, the age of the focusing mechanism,
+ * and the device temperature. The focus distance value will still be
+ * in the range of <code>[0, {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}]</code>, where 0
+ * represents the farthest focus.</p>
+ *
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ */
+ public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED = 0;
+
+ /**
+ * <p>The lens focus distance is measured in diopters.</p>
+ * <p>However, setting the lens to the same focus distance
+ * on separate occasions may result in a different real
+ * focus distance, depending on factors such as the
+ * orientation of the device, the age of the focusing
+ * mechanism, and the device temperature.</p>
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ */
+ public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE = 1;
+
+ /**
+ * <p>The lens focus distance is measured in diopters, and
+ * is calibrated.</p>
+ * <p>The lens mechanism is calibrated so that setting the
+ * same focus distance is repeatable on multiple
+ * occasions with good accuracy, and the focus distance
+ * corresponds to the real physical distance to the plane
+ * of best focus.</p>
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ */
+ public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED = 2;
+
+ //
+ // Enumeration values for CameraCharacteristics#LENS_FACING
+ //
+
+ /**
+ * <p>The camera device faces the same direction as the device's screen.</p>
+ * @see CameraCharacteristics#LENS_FACING
+ */
+ public static final int LENS_FACING_FRONT = 0;
+
+ /**
+ * <p>The camera device faces the opposite direction as the device's screen.</p>
+ * @see CameraCharacteristics#LENS_FACING
+ */
+ public static final int LENS_FACING_BACK = 1;
+
+ /**
+ * <p>The camera device is an external camera, and has no fixed facing relative to the
+ * device's screen.</p>
+ * @see CameraCharacteristics#LENS_FACING
+ */
+ public static final int LENS_FACING_EXTERNAL = 2;
+
+ //
+ // Enumeration values for CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ //
+
+ /**
+ * <p>The minimal set of capabilities that every camera
+ * device (regardless of {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel})
+ * supports.</p>
+ * <p>This capability is listed by all normal devices, and
+ * indicates that the camera device has a feature set
+ * that's comparable to the baseline requirements for the
+ * older android.hardware.Camera API.</p>
+ * <p>Devices with the DEPTH_OUTPUT capability might not list this
+ * capability, indicating that they support only depth measurement,
+ * not standard color output.</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE = 0;
+
+ /**
+ * <p>The camera device can be manually controlled (3A algorithms such
+ * as auto-exposure, and auto-focus can be bypassed).
+ * The camera device supports basic manual control of the sensor image
+ * acquisition related stages. This means the following controls are
+ * guaranteed to be supported:</p>
+ * <ul>
+ * <li>Manual frame duration control<ul>
+ * <li>{@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}</li>
+ * <li>{@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}</li>
+ * </ul>
+ * </li>
+ * <li>Manual exposure control<ul>
+ * <li>{@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}</li>
+ * <li>{@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}</li>
+ * </ul>
+ * </li>
+ * <li>Manual sensitivity control<ul>
+ * <li>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</li>
+ * <li>{@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}</li>
+ * </ul>
+ * </li>
+ * <li>Manual lens control (if the lens is adjustable)<ul>
+ * <li>android.lens.*</li>
+ * </ul>
+ * </li>
+ * <li>Manual flash control (if a flash unit is present)<ul>
+ * <li>android.flash.*</li>
+ * </ul>
+ * </li>
+ * <li>Manual black level locking<ul>
+ * <li>{@link CaptureRequest#BLACK_LEVEL_LOCK android.blackLevel.lock}</li>
+ * </ul>
+ * </li>
+ * <li>Auto exposure lock<ul>
+ * <li>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock}</li>
+ * </ul>
+ * </li>
+ * </ul>
+ * <p>If any of the above 3A algorithms are enabled, then the camera
+ * device will accurately report the values applied by 3A in the
+ * result.</p>
+ * <p>A given camera device may also support additional manual sensor controls,
+ * but this capability only covers the above list of controls.</p>
+ * <p>If this is supported, {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} will
+ * additionally return a min frame duration that is greater than
+ * zero for each supported size-format combination.</p>
+ *
+ * @see CaptureRequest#BLACK_LEVEL_LOCK
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE
+ * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
+ * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR = 1;
+
+ /**
+ * <p>The camera device post-processing stages can be manually controlled.
+ * The camera device supports basic manual control of the image post-processing
+ * stages. This means the following controls are guaranteed to be supported:</p>
+ * <ul>
+ * <li>
+ * <p>Manual tonemap control</p>
+ * <ul>
+ * <li>{@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}</li>
+ * <li>{@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}</li>
+ * <li>{@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</li>
+ * <li>{@link CaptureRequest#TONEMAP_GAMMA android.tonemap.gamma}</li>
+ * <li>{@link CaptureRequest#TONEMAP_PRESET_CURVE android.tonemap.presetCurve}</li>
+ * </ul>
+ * </li>
+ * <li>
+ * <p>Manual white balance control</p>
+ * <ul>
+ * <li>{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}</li>
+ * <li>{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}</li>
+ * </ul>
+ * </li>
+ * <li>Manual lens shading map control<ul>
+ * <li>{@link CaptureRequest#SHADING_MODE android.shading.mode}</li>
+ * <li>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode}</li>
+ * <li>android.statistics.lensShadingMap</li>
+ * <li>android.lens.info.shadingMapSize</li>
+ * </ul>
+ * </li>
+ * <li>Manual aberration correction control (if aberration correction is supported)<ul>
+ * <li>{@link CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE android.colorCorrection.aberrationMode}</li>
+ * <li>{@link CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES android.colorCorrection.availableAberrationModes}</li>
+ * </ul>
+ * </li>
+ * <li>Auto white balance lock<ul>
+ * <li>{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}</li>
+ * </ul>
+ * </li>
+ * </ul>
+ * <p>If auto white balance is enabled, then the camera device
+ * will accurately report the values applied by AWB in the result.</p>
+ * <p>A given camera device may also support additional post-processing
+ * controls, but this capability only covers the above list of controls.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE
+ * @see CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_LOCK
+ * @see CaptureRequest#SHADING_MODE
+ * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
+ * @see CaptureRequest#TONEMAP_CURVE
+ * @see CaptureRequest#TONEMAP_GAMMA
+ * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
+ * @see CaptureRequest#TONEMAP_MODE
+ * @see CaptureRequest#TONEMAP_PRESET_CURVE
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING = 2;
+
+ /**
+ * <p>The camera device supports outputting RAW buffers and
+ * metadata for interpreting them.</p>
+ * <p>Devices supporting the RAW capability allow both for
+ * saving DNG files, and for direct application processing of
+ * raw sensor images.</p>
+ * <ul>
+ * <li>RAW_SENSOR is supported as an output format.</li>
+ * <li>The maximum available resolution for RAW_SENSOR streams
+ * will match either the value in
+ * {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize} or
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize}.</li>
+ * <li>All DNG-related optional metadata entries are provided
+ * by the camera device.</li>
+ * </ul>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
+ * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_RAW = 3;
+
+ /**
+ * <p>The camera device supports the Zero Shutter Lag reprocessing use case.</p>
+ * <ul>
+ * <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li>
+ * <li>{@link android.graphics.ImageFormat#PRIVATE } is supported as an output/input format,
+ * that is, {@link android.graphics.ImageFormat#PRIVATE } is included in the lists of
+ * formats returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.</li>
+ * <li>{@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput }
+ * returns non empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.</li>
+ * <li>Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(ImageFormat.PRIVATE)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(ImageFormat.PRIVATE)}</li>
+ * <li>Using {@link android.graphics.ImageFormat#PRIVATE } does not cause a frame rate drop
+ * relative to the sensor's maximum capture rate (at that resolution).</li>
+ * <li>{@link android.graphics.ImageFormat#PRIVATE } will be reprocessable into both
+ * {@link android.graphics.ImageFormat#YUV_420_888 } and
+ * {@link android.graphics.ImageFormat#JPEG } formats.</li>
+ * <li>The maximum available resolution for PRIVATE streams
+ * (both input/output) will match the maximum available
+ * resolution of JPEG streams.</li>
+ * <li>Static metadata {@link CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL android.reprocess.maxCaptureStall}.</li>
+ * <li>Only below controls are effective for reprocessing requests and
+ * will be present in capture results, other controls in reprocess
+ * requests will be ignored by the camera device.<ul>
+ * <li>android.jpeg.*</li>
+ * <li>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}</li>
+ * <li>{@link CaptureRequest#EDGE_MODE android.edge.mode}</li>
+ * </ul>
+ * </li>
+ * <li>{@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} and
+ * {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes} will both list ZERO_SHUTTER_LAG as a supported mode.</li>
+ * </ul>
+ *
+ * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES
+ * @see CaptureRequest#EDGE_MODE
+ * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ * @see CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL
+ * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING = 4;
+
+ /**
+ * <p>The camera device supports accurately reporting the sensor settings for many of
+ * the sensor controls while the built-in 3A algorithm is running. This allows
+ * reporting of sensor settings even when these settings cannot be manually changed.</p>
+ * <p>The values reported for the following controls are guaranteed to be available
+ * in the CaptureResult, including when 3A is enabled:</p>
+ * <ul>
+ * <li>Exposure control<ul>
+ * <li>{@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}</li>
+ * </ul>
+ * </li>
+ * <li>Sensitivity control<ul>
+ * <li>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</li>
+ * </ul>
+ * </li>
+ * <li>Lens controls (if the lens is adjustable)<ul>
+ * <li>{@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance}</li>
+ * <li>{@link CaptureRequest#LENS_APERTURE android.lens.aperture}</li>
+ * </ul>
+ * </li>
+ * </ul>
+ * <p>This capability is a subset of the MANUAL_SENSOR control capability, and will
+ * always be included if the MANUAL_SENSOR capability is available.</p>
+ *
+ * @see CaptureRequest#LENS_APERTURE
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS = 5;
+
+ /**
+ * <p>The camera device supports capturing high-resolution images at &gt;= 20 frames per
+ * second, in at least the uncompressed YUV format, when post-processing settings are set
+ * to FAST. Additionally, maximum-resolution images can be captured at &gt;= 10 frames
+ * per second. Here, 'high resolution' means at least 8 megapixels, or the maximum
+ * resolution of the device, whichever is smaller.</p>
+ * <p>More specifically, this means that a size matching the camera device's active array
+ * size is listed as a supported size for the {@link android.graphics.ImageFormat#YUV_420_888 } format in either {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } or {@link android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes },
+ * with a minimum frame duration for that format and size of either &lt;= 1/20 s, or
+ * &lt;= 1/10 s, respectively; and the {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges} entry
+ * lists at least one FPS range where the minimum FPS is &gt;= 1 / minimumFrameDuration
+ * for the maximum-size YUV_420_888 format. If that maximum size is listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes },
+ * then the list of resolutions for YUV_420_888 from {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } contains at
+ * least one resolution &gt;= 8 megapixels, with a minimum frame duration of &lt;= 1/20
+ * s.</p>
+ * <p>If the device supports the {@link android.graphics.ImageFormat#RAW10 }, {@link android.graphics.ImageFormat#RAW12 }, then those can also be captured at the same rate
+ * as the maximum-size YUV_420_888 resolution is.</p>
+ * <p>If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees
+ * as for the YUV_420_888 format also apply to the {@link android.graphics.ImageFormat#PRIVATE } format.</p>
+ * <p>In addition, the {@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} field is guaranted to have a value between 0
+ * and 4, inclusive. {@link CameraCharacteristics#CONTROL_AE_LOCK_AVAILABLE android.control.aeLockAvailable} and {@link CameraCharacteristics#CONTROL_AWB_LOCK_AVAILABLE android.control.awbLockAvailable}
+ * are also guaranteed to be <code>true</code> so burst capture with these two locks ON yields
+ * consistent image output.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+ * @see CameraCharacteristics#CONTROL_AE_LOCK_AVAILABLE
+ * @see CameraCharacteristics#CONTROL_AWB_LOCK_AVAILABLE
+ * @see CameraCharacteristics#SYNC_MAX_LATENCY
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE = 6;
+
+ /**
+ * <p>The camera device supports the YUV_420_888 reprocessing use case, similar as
+ * PRIVATE_REPROCESSING, This capability requires the camera device to support the
+ * following:</p>
+ * <ul>
+ * <li>One input stream is supported, that is, <code>{@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1</code>.</li>
+ * <li>{@link android.graphics.ImageFormat#YUV_420_888 } is supported as an output/input format, that is,
+ * YUV_420_888 is included in the lists of formats returned by
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.</li>
+ * <li>{@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput }
+ * returns non-empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.</li>
+ * <li>Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(YUV_420_888)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(YUV_420_888)}</li>
+ * <li>Using {@link android.graphics.ImageFormat#YUV_420_888 } does not cause a frame rate drop
+ * relative to the sensor's maximum capture rate (at that resolution).</li>
+ * <li>{@link android.graphics.ImageFormat#YUV_420_888 } will be reprocessable into both
+ * {@link android.graphics.ImageFormat#YUV_420_888 } and {@link android.graphics.ImageFormat#JPEG } formats.</li>
+ * <li>The maximum available resolution for {@link android.graphics.ImageFormat#YUV_420_888 } streams (both input/output) will match the
+ * maximum available resolution of {@link android.graphics.ImageFormat#JPEG } streams.</li>
+ * <li>Static metadata {@link CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL android.reprocess.maxCaptureStall}.</li>
+ * <li>Only the below controls are effective for reprocessing requests and will be present
+ * in capture results. The reprocess requests are from the original capture results that
+ * are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888 }
+ * output buffers. All other controls in the reprocess requests will be ignored by the
+ * camera device.<ul>
+ * <li>android.jpeg.*</li>
+ * <li>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}</li>
+ * <li>{@link CaptureRequest#EDGE_MODE android.edge.mode}</li>
+ * <li>{@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor}</li>
+ * </ul>
+ * </li>
+ * <li>{@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} and
+ * {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes} will both list ZERO_SHUTTER_LAG as a supported mode.</li>
+ * </ul>
+ *
+ * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES
+ * @see CaptureRequest#EDGE_MODE
+ * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
+ * @see CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL
+ * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING = 7;
+
+ /**
+ * <p>The camera device can produce depth measurements from its field of view.</p>
+ * <p>This capability requires the camera device to support the following:</p>
+ * <ul>
+ * <li>{@link android.graphics.ImageFormat#DEPTH16 } is supported as an output format.</li>
+ * <li>{@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD } is optionally supported as an
+ * output format.</li>
+ * <li>This camera device, and all camera devices with the same {@link CameraCharacteristics#LENS_FACING android.lens.facing},
+ * will list the following calibration entries in both
+ * {@link android.hardware.camera2.CameraCharacteristics } and
+ * {@link android.hardware.camera2.CaptureResult }:<ul>
+ * <li>{@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}</li>
+ * <li>{@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}</li>
+ * <li>{@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}</li>
+ * <li>{@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion}</li>
+ * </ul>
+ * </li>
+ * <li>The {@link CameraCharacteristics#DEPTH_DEPTH_IS_EXCLUSIVE android.depth.depthIsExclusive} entry is listed by this device.</li>
+ * <li>A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support
+ * normal YUV_420_888, JPEG, and PRIV-format outputs. It only has to support the DEPTH16
+ * format.</li>
+ * </ul>
+ * <p>Generally, depth output operates at a slower frame rate than standard color capture,
+ * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that
+ * should be accounted for (see
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }).
+ * On a device that supports both depth and color-based output, to enable smooth preview,
+ * using a repeating burst is recommended, where a depth-output target is only included
+ * once every N frames, where N is the ratio between preview output rate and depth output
+ * rate, including depth stall time.</p>
+ *
+ * @see CameraCharacteristics#DEPTH_DEPTH_IS_EXCLUSIVE
+ * @see CameraCharacteristics#LENS_FACING
+ * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
+ * @see CameraCharacteristics#LENS_POSE_ROTATION
+ * @see CameraCharacteristics#LENS_POSE_TRANSLATION
+ * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT = 8;
+
+ /**
+ * <p>The device supports constrained high speed video recording (frame rate &gt;=120fps)
+ * use case. The camera device will support high speed capture session created by
+ * {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }, which
+ * only accepts high speed request lists created by
+ * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList }.</p>
+ * <p>A camera device can still support high speed video streaming by advertising the high speed
+ * FPS ranges in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}. For this case, all normal
+ * capture request per frame control and synchronization requirements will apply to
+ * the high speed fps ranges, the same as all other fps ranges. This capability describes
+ * the capability of a specialized operating mode with many limitations (see below), which
+ * is only targeted at high speed video recording.</p>
+ * <p>The supported high speed video sizes and fps ranges are specified in
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges }.
+ * To get desired output frame rates, the application is only allowed to select video size
+ * and FPS range combinations provided by
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes }.
+ * The fps range can be controlled via {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}.</p>
+ * <p>In this capability, the camera device will override aeMode, awbMode, and afMode to
+ * ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
+ * controls will be overridden to be FAST. Therefore, no manual control of capture
+ * and post-processing parameters is possible. All other controls operate the
+ * same as when {@link CaptureRequest#CONTROL_MODE android.control.mode} == AUTO. This means that all other
+ * android.control.* fields continue to work, such as</p>
+ * <ul>
+ * <li>{@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}</li>
+ * <li>{@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation}</li>
+ * <li>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock}</li>
+ * <li>{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}</li>
+ * <li>{@link CaptureRequest#CONTROL_EFFECT_MODE android.control.effectMode}</li>
+ * <li>{@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}</li>
+ * <li>{@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}</li>
+ * <li>{@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}</li>
+ * <li>{@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}</li>
+ * <li>{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}</li>
+ * </ul>
+ * <p>Outside of android.control.*, the following controls will work:</p>
+ * <ul>
+ * <li>{@link CaptureRequest#FLASH_MODE android.flash.mode} (TORCH mode only, automatic flash for still capture will not
+ * work since aeMode is ON)</li>
+ * <li>{@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode} (if it is supported)</li>
+ * <li>{@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}</li>
+ * <li>{@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} (if it is supported)</li>
+ * </ul>
+ * <p>For high speed recording use case, the actual maximum supported frame rate may
+ * be lower than what camera can output, depending on the destination Surfaces for
+ * the image data. For example, if the destination surface is from video encoder,
+ * the application need check if the video encoder is capable of supporting the
+ * high frame rate for a given video size, or it will end up with lower recording
+ * frame rate. If the destination surface is from preview window, the actual preview frame
+ * rate will be bounded by the screen refresh rate.</p>
+ * <p>The camera device will only support up to 2 high speed simultaneous output surfaces
+ * (preview and recording surfaces)
+ * in this mode. Above controls will be effective only if all of below conditions are true:</p>
+ * <ul>
+ * <li>The application creates a camera capture session with no more than 2 surfaces via
+ * {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }. The
+ * targeted surfaces must be preview surface (either from
+ * {@link android.view.SurfaceView } or {@link android.graphics.SurfaceTexture }) or
+ * recording surface(either from {@link android.media.MediaRecorder#getSurface } or
+ * {@link android.media.MediaCodec#createInputSurface }).</li>
+ * <li>The stream sizes are selected from the sizes reported by
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes }.</li>
+ * <li>The FPS ranges are selected from
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges }.</li>
+ * </ul>
+ * <p>When above conditions are NOT satistied,
+ * {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }
+ * will fail.</p>
+ * <p>Switching to a FPS range that has different maximum FPS may trigger some camera device
+ * reconfigurations, which may introduce extra latency. It is recommended that
+ * the application avoids unnecessary maximum target FPS changes as much as possible
+ * during high speed streaming.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+ * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureRequest#CONTROL_AE_REGIONS
+ * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE
+ * @see CaptureRequest#CONTROL_AF_REGIONS
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureRequest#CONTROL_AWB_LOCK
+ * @see CaptureRequest#CONTROL_AWB_REGIONS
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureRequest#FLASH_MODE
+ * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ public static final int REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO = 9;
+
+ //
+ // Enumeration values for CameraCharacteristics#SCALER_CROPPING_TYPE
+ //
+
+ /**
+ * <p>The camera device only supports centered crop regions.</p>
+ * @see CameraCharacteristics#SCALER_CROPPING_TYPE
+ */
+ public static final int SCALER_CROPPING_TYPE_CENTER_ONLY = 0;
+
+ /**
+ * <p>The camera device supports arbitrarily chosen crop regions.</p>
+ * @see CameraCharacteristics#SCALER_CROPPING_TYPE
+ */
+ public static final int SCALER_CROPPING_TYPE_FREEFORM = 1;
+
+ //
+ // Enumeration values for CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ //
+
+ /**
+ * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
+ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB = 0;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
+ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG = 1;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
+ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG = 2;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
+ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3;
+
+ /**
+ * <p>Sensor is not Bayer; output has 3 16-bit
+ * values for each pixel, instead of just 1 16-bit value
+ * per pixel.</p>
+ * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
+ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4;
+
+ //
+ // Enumeration values for CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
+ //
+
+ /**
+ * <p>Timestamps from {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} are in nanoseconds and monotonic,
+ * but can not be compared to timestamps from other subsystems
+ * (e.g. accelerometer, gyro etc.), or other instances of the same or different
+ * camera devices in the same system. Timestamps between streams and results for
+ * a single camera instance are comparable, and the timestamps for all buffers
+ * and the result metadata generated by a single capture are identical.</p>
+ *
+ * @see CaptureResult#SENSOR_TIMESTAMP
+ * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
+ */
+ public static final int SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN = 0;
+
+ /**
+ * <p>Timestamps from {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} are in the same timebase as
+ * {@link android.os.SystemClock#elapsedRealtimeNanos },
+ * and they can be compared to other timestamps using that base.</p>
+ *
+ * @see CaptureResult#SENSOR_TIMESTAMP
+ * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
+ */
+ public static final int SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME = 1;
+
+ //
+ // Enumeration values for CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ //
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2;
+
+ /**
+ * <p>Incandescent light</p>
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11;
+
+ /**
+ * <p>D 5700 - 7100K</p>
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12;
+
+ /**
+ * <p>N 4600 - 5400K</p>
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13;
+
+ /**
+ * <p>W 3900 - 4500K</p>
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14;
+
+ /**
+ * <p>WW 3200 - 3700K</p>
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_D55 = 20;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_D65 = 21;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_D75 = 22;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_D50 = 23;
+
+ /**
+ * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1
+ */
+ public static final int SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24;
+
+ //
+ // Enumeration values for CameraCharacteristics#LED_AVAILABLE_LEDS
+ //
+
+ /**
+ * <p>android.led.transmit control is used.</p>
+ * @see CameraCharacteristics#LED_AVAILABLE_LEDS
+ * @hide
+ */
+ public static final int LED_AVAILABLE_LEDS_TRANSMIT = 0;
+
+ //
+ // Enumeration values for CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ //
+
+ /**
+ * <p>This camera device does not have enough capabilities to qualify as a <code>FULL</code> device or
+ * better.</p>
+ * <p>Only the stream configurations listed in the <code>LEGACY</code> and <code>LIMITED</code> tables in the
+ * {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+ * <p>All <code>LIMITED</code> devices support the <code>BACKWARDS_COMPATIBLE</code> capability, indicating basic
+ * support for color image capture. The only exception is that the device may
+ * alternatively support only the <code>DEPTH_OUTPUT</code> capability, if it can only output depth
+ * measurements and not color images.</p>
+ * <p><code>LIMITED</code> devices and above require the use of {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}
+ * to lock exposure metering (and calculate flash power, for cameras with flash) before
+ * capturing a high-quality still image.</p>
+ * <p>A <code>LIMITED</code> device that only lists the <code>BACKWARDS_COMPATIBLE</code> capability is only
+ * required to support full-automatic operation and post-processing (<code>OFF</code> is not
+ * supported for {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}, or
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode})</p>
+ * <p>Additional capabilities may optionally be supported by a <code>LIMITED</code>-level device, and
+ * can be checked for in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0;
+
+ /**
+ * <p>This camera device is capable of supporting advanced imaging applications.</p>
+ * <p>The stream configurations listed in the <code>FULL</code>, <code>LEGACY</code> and <code>LIMITED</code> tables in the
+ * {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession} documentation are guaranteed to be supported.</p>
+ * <p>A <code>FULL</code> device will support below capabilities:</p>
+ * <ul>
+ * <li><code>BURST_CAPTURE</code> capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
+ * <code>BURST_CAPTURE</code>)</li>
+ * <li>Per frame control ({@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} <code>==</code> PER_FRAME_CONTROL)</li>
+ * <li>Manual sensor control ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains <code>MANUAL_SENSOR</code>)</li>
+ * <li>Manual post-processing control ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
+ * <code>MANUAL_POST_PROCESSING</code>)</li>
+ * <li>The required exposure time range defined in {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}</li>
+ * <li>The required maxFrameDuration defined in {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}</li>
+ * </ul>
+ * <p>Note:
+ * Pre-API level 23, FULL devices also supported arbitrary cropping region
+ * ({@link CameraCharacteristics#SCALER_CROPPING_TYPE android.scaler.croppingType} <code>== FREEFORM</code>); this requirement was relaxed in API level
+ * 23, and <code>FULL</code> devices may only support <code>CENTERED</code> cropping.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#SCALER_CROPPING_TYPE
+ * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE
+ * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
+ * @see CameraCharacteristics#SYNC_MAX_LATENCY
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_FULL = 1;
+
+ /**
+ * <p>This camera device is running in backward compatibility mode.</p>
+ * <p>Only the stream configurations listed in the <code>LEGACY</code> table in the {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
+ * documentation are supported.</p>
+ * <p>A <code>LEGACY</code> device does not support per-frame control, manual sensor control, manual
+ * post-processing, arbitrary cropping regions, and has relaxed performance constraints.
+ * No additional capabilities beyond <code>BACKWARD_COMPATIBLE</code> will ever be listed by a
+ * <code>LEGACY</code> device in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.</p>
+ * <p>In addition, the {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is not functional on <code>LEGACY</code>
+ * devices. Instead, every request that includes a JPEG-format output target is treated
+ * as triggering a still capture, internally executing a precapture trigger. This may
+ * fire the flash for flash power metering during precapture, and then fire the flash
+ * for the final capture, if a flash is available on the device and the AE mode is set to
+ * enable the flash.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY = 2;
+
+ /**
+ * <p>This camera device is capable of YUV reprocessing and RAW data capture, in addition to
+ * FULL-level capabilities.</p>
+ * <p>The stream configurations listed in the <code>LEVEL_3</code>, <code>RAW</code>, <code>FULL</code>, <code>LEGACY</code> and
+ * <code>LIMITED</code> tables in the {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession}
+ * documentation are guaranteed to be supported.</p>
+ * <p>The following additional capabilities are guaranteed to be supported:</p>
+ * <ul>
+ * <li><code>YUV_REPROCESSING</code> capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
+ * <code>YUV_REPROCESSING</code>)</li>
+ * <li><code>RAW</code> capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains
+ * <code>RAW</code>)</li>
+ * </ul>
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_3 = 3;
+
+ //
+ // Enumeration values for CameraCharacteristics#SYNC_MAX_LATENCY
+ //
+
+ /**
+ * <p>Every frame has the requests immediately applied.</p>
+ * <p>Changing controls over multiple requests one after another will
+ * produce results that have those controls applied atomically
+ * each frame.</p>
+ * <p>All FULL capability devices will have this as their maxLatency.</p>
+ * @see CameraCharacteristics#SYNC_MAX_LATENCY
+ */
+ public static final int SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0;
+
+ /**
+ * <p>Each new frame has some subset (potentially the entire set)
+ * of the past requests applied to the camera settings.</p>
+ * <p>By submitting a series of identical requests, the camera device
+ * will eventually have the camera settings applied, but it is
+ * unknown when that exact point will be.</p>
+ * <p>All LEGACY capability devices will have this as their maxLatency.</p>
+ * @see CameraCharacteristics#SYNC_MAX_LATENCY
+ */
+ public static final int SYNC_MAX_LATENCY_UNKNOWN = -1;
+
+ //
+ // Enumeration values for CaptureRequest#COLOR_CORRECTION_MODE
+ //
+
+ /**
+ * <p>Use the {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} matrix
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} to do color conversion.</p>
+ * <p>All advanced white balance adjustments (not specified
+ * by our white balance pipeline) must be disabled.</p>
+ * <p>If AWB is enabled with <code>{@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != OFF</code>, then
+ * TRANSFORM_MATRIX is ignored. The camera device will override
+ * this value to either FAST or HIGH_QUALITY.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ */
+ public static final int COLOR_CORRECTION_MODE_TRANSFORM_MATRIX = 0;
+
+ /**
+ * <p>Color correction processing must not slow down
+ * capture rate relative to sensor raw output.</p>
+ * <p>Advanced white balance adjustments above and beyond
+ * the specified white balance pipeline may be applied.</p>
+ * <p>If AWB is enabled with <code>{@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != OFF</code>, then
+ * the camera device uses the last frame's AWB values
+ * (or defaults if AWB has never been run).</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ */
+ public static final int COLOR_CORRECTION_MODE_FAST = 1;
+
+ /**
+ * <p>Color correction processing operates at improved
+ * quality but the capture rate might be reduced (relative to sensor
+ * raw output rate)</p>
+ * <p>Advanced white balance adjustments above and beyond
+ * the specified white balance pipeline may be applied.</p>
+ * <p>If AWB is enabled with <code>{@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != OFF</code>, then
+ * the camera device uses the last frame's AWB values
+ * (or defaults if AWB has never been run).</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ */
+ public static final int COLOR_CORRECTION_MODE_HIGH_QUALITY = 2;
+
+ //
+ // Enumeration values for CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE
+ //
+
+ /**
+ * <p>No aberration correction is applied.</p>
+ * @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE
+ */
+ public static final int COLOR_CORRECTION_ABERRATION_MODE_OFF = 0;
+
+ /**
+ * <p>Aberration correction will not slow down capture rate
+ * relative to sensor raw output.</p>
+ * @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE
+ */
+ public static final int COLOR_CORRECTION_ABERRATION_MODE_FAST = 1;
+
+ /**
+ * <p>Aberration correction operates at improved quality but the capture rate might be
+ * reduced (relative to sensor raw output rate)</p>
+ * @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE
+ */
+ public static final int COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY = 2;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
+ //
+
+ /**
+ * <p>The camera device will not adjust exposure duration to
+ * avoid banding problems.</p>
+ * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
+ */
+ public static final int CONTROL_AE_ANTIBANDING_MODE_OFF = 0;
+
+ /**
+ * <p>The camera device will adjust exposure duration to
+ * avoid banding problems with 50Hz illumination sources.</p>
+ * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
+ */
+ public static final int CONTROL_AE_ANTIBANDING_MODE_50HZ = 1;
+
+ /**
+ * <p>The camera device will adjust exposure duration to
+ * avoid banding problems with 60Hz illumination
+ * sources.</p>
+ * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
+ */
+ public static final int CONTROL_AE_ANTIBANDING_MODE_60HZ = 2;
+
+ /**
+ * <p>The camera device will automatically adapt its
+ * antibanding routine to the current illumination
+ * condition. This is the default mode if AUTO is
+ * available on given camera device.</p>
+ * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
+ */
+ public static final int CONTROL_AE_ANTIBANDING_MODE_AUTO = 3;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_AE_MODE
+ //
+
+ /**
+ * <p>The camera device's autoexposure routine is disabled.</p>
+ * <p>The application-selected {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
+ * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} and
+ * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} are used by the camera
+ * device, along with android.flash.* fields, if there's
+ * a flash unit for this camera device.</p>
+ * <p>Note that auto-white balance (AWB) and auto-focus (AF)
+ * behavior is device dependent when AE is in OFF mode.
+ * To have consistent behavior across different devices,
+ * it is recommended to either set AWB and AF to OFF mode
+ * or lock AWB and AF before setting AE to OFF.
+ * See {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode},
+ * {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}, and {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}
+ * for more details.</p>
+ * <p>LEGACY devices do not support the OFF mode and will
+ * override attempts to use this value to ON.</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureRequest#CONTROL_AWB_LOCK
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ * @see CaptureRequest#CONTROL_AE_MODE
+ */
+ public static final int CONTROL_AE_MODE_OFF = 0;
+
+ /**
+ * <p>The camera device's autoexposure routine is active,
+ * with no flash control.</p>
+ * <p>The application's values for
+ * {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
+ * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and
+ * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} are ignored. The
+ * application has control over the various
+ * android.flash.* fields.</p>
+ *
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ * @see CaptureRequest#CONTROL_AE_MODE
+ */
+ public static final int CONTROL_AE_MODE_ON = 1;
+
+ /**
+ * <p>Like ON, except that the camera device also controls
+ * the camera's flash unit, firing it in low-light
+ * conditions.</p>
+ * <p>The flash may be fired during a precapture sequence
+ * (triggered by {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) and
+ * may be fired for captures for which the
+ * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} field is set to
+ * STILL_CAPTURE</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ * @see CaptureRequest#CONTROL_AE_MODE
+ */
+ public static final int CONTROL_AE_MODE_ON_AUTO_FLASH = 2;
+
+ /**
+ * <p>Like ON, except that the camera device also controls
+ * the camera's flash unit, always firing it for still
+ * captures.</p>
+ * <p>The flash may be fired during a precapture sequence
+ * (triggered by {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) and
+ * will always be fired for captures for which the
+ * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} field is set to
+ * STILL_CAPTURE</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ * @see CaptureRequest#CONTROL_AE_MODE
+ */
+ public static final int CONTROL_AE_MODE_ON_ALWAYS_FLASH = 3;
+
+ /**
+ * <p>Like ON_AUTO_FLASH, but with automatic red eye
+ * reduction.</p>
+ * <p>If deemed necessary by the camera device, a red eye
+ * reduction flash will fire during the precapture
+ * sequence.</p>
+ * @see CaptureRequest#CONTROL_AE_MODE
+ */
+ public static final int CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE = 4;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ //
+
+ /**
+ * <p>The trigger is idle.</p>
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ */
+ public static final int CONTROL_AE_PRECAPTURE_TRIGGER_IDLE = 0;
+
+ /**
+ * <p>The precapture metering sequence will be started
+ * by the camera device.</p>
+ * <p>The exact effect of the precapture trigger depends on
+ * the current AE mode and state.</p>
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ */
+ public static final int CONTROL_AE_PRECAPTURE_TRIGGER_START = 1;
+
+ /**
+ * <p>The camera device will cancel any currently active or completed
+ * precapture metering sequence, the auto-exposure routine will return to its
+ * initial state.</p>
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ */
+ public static final int CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL = 2;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_AF_MODE
+ //
+
+ /**
+ * <p>The auto-focus routine does not control the lens;
+ * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} is controlled by the
+ * application.</p>
+ *
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CaptureRequest#CONTROL_AF_MODE
+ */
+ public static final int CONTROL_AF_MODE_OFF = 0;
+
+ /**
+ * <p>Basic automatic focus mode.</p>
+ * <p>In this mode, the lens does not move unless
+ * the autofocus trigger action is called. When that trigger
+ * is activated, AF will transition to ACTIVE_SCAN, then to
+ * the outcome of the scan (FOCUSED or NOT_FOCUSED).</p>
+ * <p>Always supported if lens is not fixed focus.</p>
+ * <p>Use {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} to determine if lens
+ * is fixed-focus.</p>
+ * <p>Triggering AF_CANCEL resets the lens position to default,
+ * and sets the AF state to INACTIVE.</p>
+ *
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ * @see CaptureRequest#CONTROL_AF_MODE
+ */
+ public static final int CONTROL_AF_MODE_AUTO = 1;
+
+ /**
+ * <p>Close-up focusing mode.</p>
+ * <p>In this mode, the lens does not move unless the
+ * autofocus trigger action is called. When that trigger is
+ * activated, AF will transition to ACTIVE_SCAN, then to
+ * the outcome of the scan (FOCUSED or NOT_FOCUSED). This
+ * mode is optimized for focusing on objects very close to
+ * the camera.</p>
+ * <p>When that trigger is activated, AF will transition to
+ * ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or
+ * NOT_FOCUSED). Triggering cancel AF resets the lens
+ * position to default, and sets the AF state to
+ * INACTIVE.</p>
+ * @see CaptureRequest#CONTROL_AF_MODE
+ */
+ public static final int CONTROL_AF_MODE_MACRO = 2;
+
+ /**
+ * <p>In this mode, the AF algorithm modifies the lens
+ * position continually to attempt to provide a
+ * constantly-in-focus image stream.</p>
+ * <p>The focusing behavior should be suitable for good quality
+ * video recording; typically this means slower focus
+ * movement and no overshoots. When the AF trigger is not
+ * involved, the AF algorithm should start in INACTIVE state,
+ * and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED
+ * states as appropriate. When the AF trigger is activated,
+ * the algorithm should immediately transition into
+ * AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+ * lens position until a cancel AF trigger is received.</p>
+ * <p>Once cancel is received, the algorithm should transition
+ * back to INACTIVE and resume passive scan. Note that this
+ * behavior is not identical to CONTINUOUS_PICTURE, since an
+ * ongoing PASSIVE_SCAN must immediately be
+ * canceled.</p>
+ * @see CaptureRequest#CONTROL_AF_MODE
+ */
+ public static final int CONTROL_AF_MODE_CONTINUOUS_VIDEO = 3;
+
+ /**
+ * <p>In this mode, the AF algorithm modifies the lens
+ * position continually to attempt to provide a
+ * constantly-in-focus image stream.</p>
+ * <p>The focusing behavior should be suitable for still image
+ * capture; typically this means focusing as fast as
+ * possible. When the AF trigger is not involved, the AF
+ * algorithm should start in INACTIVE state, and then
+ * transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as
+ * appropriate as it attempts to maintain focus. When the AF
+ * trigger is activated, the algorithm should finish its
+ * PASSIVE_SCAN if active, and then transition into
+ * AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the
+ * lens position until a cancel AF trigger is received.</p>
+ * <p>When the AF cancel trigger is activated, the algorithm
+ * should transition back to INACTIVE and then act as if it
+ * has just been started.</p>
+ * @see CaptureRequest#CONTROL_AF_MODE
+ */
+ public static final int CONTROL_AF_MODE_CONTINUOUS_PICTURE = 4;
+
+ /**
+ * <p>Extended depth of field (digital focus) mode.</p>
+ * <p>The camera device will produce images with an extended
+ * depth of field automatically; no special focusing
+ * operations need to be done before taking a picture.</p>
+ * <p>AF triggers are ignored, and the AF state will always be
+ * INACTIVE.</p>
+ * @see CaptureRequest#CONTROL_AF_MODE
+ */
+ public static final int CONTROL_AF_MODE_EDOF = 5;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_AF_TRIGGER
+ //
+
+ /**
+ * <p>The trigger is idle.</p>
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ */
+ public static final int CONTROL_AF_TRIGGER_IDLE = 0;
+
+ /**
+ * <p>Autofocus will trigger now.</p>
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ */
+ public static final int CONTROL_AF_TRIGGER_START = 1;
+
+ /**
+ * <p>Autofocus will return to its initial
+ * state, and cancel any currently active trigger.</p>
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ */
+ public static final int CONTROL_AF_TRIGGER_CANCEL = 2;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_AWB_MODE
+ //
+
+ /**
+ * <p>The camera device's auto-white balance routine is disabled.</p>
+ * <p>The application-selected color transform matrix
+ * ({@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}) and gains
+ * ({@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}) are used by the camera
+ * device for manual white balance control.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_OFF = 0;
+
+ /**
+ * <p>The camera device's auto-white balance routine is active.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_AUTO = 1;
+
+ /**
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses incandescent light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant A.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_INCANDESCENT = 2;
+
+ /**
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses fluorescent light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant F2.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_FLUORESCENT = 3;
+
+ /**
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses warm fluorescent light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant F4.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_WARM_FLUORESCENT = 4;
+
+ /**
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses daylight light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>While the exact white balance transforms are up to the
+ * camera device, they will approximately match the CIE
+ * standard illuminant D65.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_DAYLIGHT = 5;
+
+ /**
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses cloudy daylight light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_CLOUDY_DAYLIGHT = 6;
+
+ /**
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses twilight light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_TWILIGHT = 7;
+
+ /**
+ * <p>The camera device's auto-white balance routine is disabled;
+ * the camera device uses shade light as the assumed scene
+ * illumination for white balance.</p>
+ * <p>The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
+ * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored.
+ * For devices that support the MANUAL_POST_PROCESSING capability, the
+ * values used by the camera device for the transform and gains
+ * will be available in the capture result for this request.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ public static final int CONTROL_AWB_MODE_SHADE = 8;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_CAPTURE_INTENT
+ //
+
+ /**
+ * <p>The goal of this request doesn't fall into the other
+ * categories. The camera device will default to preview-like
+ * behavior.</p>
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ */
+ public static final int CONTROL_CAPTURE_INTENT_CUSTOM = 0;
+
+ /**
+ * <p>This request is for a preview-like use case.</p>
+ * <p>The precapture trigger may be used to start off a metering
+ * w/flash sequence.</p>
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ */
+ public static final int CONTROL_CAPTURE_INTENT_PREVIEW = 1;
+
+ /**
+ * <p>This request is for a still capture-type
+ * use case.</p>
+ * <p>If the flash unit is under automatic control, it may fire as needed.</p>
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ */
+ public static final int CONTROL_CAPTURE_INTENT_STILL_CAPTURE = 2;
+
+ /**
+ * <p>This request is for a video recording
+ * use case.</p>
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ */
+ public static final int CONTROL_CAPTURE_INTENT_VIDEO_RECORD = 3;
+
+ /**
+ * <p>This request is for a video snapshot (still
+ * image while recording video) use case.</p>
+ * <p>The camera device should take the highest-quality image
+ * possible (given the other settings) without disrupting the
+ * frame rate of video recording. </p>
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ */
+ public static final int CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT = 4;
+
+ /**
+ * <p>This request is for a ZSL usecase; the
+ * application will stream full-resolution images and
+ * reprocess one or several later for a final
+ * capture.</p>
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ */
+ public static final int CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG = 5;
+
+ /**
+ * <p>This request is for manual capture use case where
+ * the applications want to directly control the capture parameters.</p>
+ * <p>For example, the application may wish to manually control
+ * {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, etc.</p>
+ *
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ */
+ public static final int CONTROL_CAPTURE_INTENT_MANUAL = 6;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_EFFECT_MODE
+ //
+
+ /**
+ * <p>No color effect will be applied.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_OFF = 0;
+
+ /**
+ * <p>A "monocolor" effect where the image is mapped into
+ * a single color.</p>
+ * <p>This will typically be grayscale.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_MONO = 1;
+
+ /**
+ * <p>A "photo-negative" effect where the image's colors
+ * are inverted.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_NEGATIVE = 2;
+
+ /**
+ * <p>A "solarisation" effect (Sabattier effect) where the
+ * image is wholly or partially reversed in
+ * tone.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_SOLARIZE = 3;
+
+ /**
+ * <p>A "sepia" effect where the image is mapped into warm
+ * gray, red, and brown tones.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_SEPIA = 4;
+
+ /**
+ * <p>A "posterization" effect where the image uses
+ * discrete regions of tone rather than a continuous
+ * gradient of tones.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_POSTERIZE = 5;
+
+ /**
+ * <p>A "whiteboard" effect where the image is typically displayed
+ * as regions of white, with black or grey details.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_WHITEBOARD = 6;
+
+ /**
+ * <p>A "blackboard" effect where the image is typically displayed
+ * as regions of black, with white or grey details.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_BLACKBOARD = 7;
+
+ /**
+ * <p>An "aqua" effect where a blue hue is added to the image.</p>
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ */
+ public static final int CONTROL_EFFECT_MODE_AQUA = 8;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_MODE
+ //
+
+ /**
+ * <p>Full application control of pipeline.</p>
+ * <p>All control by the device's metering and focusing (3A)
+ * routines is disabled, and no other settings in
+ * android.control.* have any effect, except that
+ * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} may be used by the camera
+ * device to select post-processing values for processing
+ * blocks that do not allow for manual control, or are not
+ * exposed by the camera API.</p>
+ * <p>However, the camera device's 3A routines may continue to
+ * collect statistics and update their internal state so that
+ * when control is switched to AUTO mode, good control values
+ * can be immediately applied.</p>
+ *
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ * @see CaptureRequest#CONTROL_MODE
+ */
+ public static final int CONTROL_MODE_OFF = 0;
+
+ /**
+ * <p>Use settings for each individual 3A routine.</p>
+ * <p>Manual control of capture parameters is disabled. All
+ * controls in android.control.* besides sceneMode take
+ * effect.</p>
+ * @see CaptureRequest#CONTROL_MODE
+ */
+ public static final int CONTROL_MODE_AUTO = 1;
+
+ /**
+ * <p>Use a specific scene mode.</p>
+ * <p>Enabling this disables control.aeMode, control.awbMode and
+ * control.afMode controls; the camera device will ignore
+ * those settings while USE_SCENE_MODE is active (except for
+ * FACE_PRIORITY scene mode). Other control entries are still active.
+ * This setting can only be used if scene mode is supported (i.e.
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}
+ * contain some modes other than DISABLED).</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
+ * @see CaptureRequest#CONTROL_MODE
+ */
+ public static final int CONTROL_MODE_USE_SCENE_MODE = 2;
+
+ /**
+ * <p>Same as OFF mode, except that this capture will not be
+ * used by camera device background auto-exposure, auto-white balance and
+ * auto-focus algorithms (3A) to update their statistics.</p>
+ * <p>Specifically, the 3A routines are locked to the last
+ * values set from a request with AUTO, OFF, or
+ * USE_SCENE_MODE, and any statistics or state updates
+ * collected from manual captures with OFF_KEEP_STATE will be
+ * discarded by the camera device.</p>
+ * @see CaptureRequest#CONTROL_MODE
+ */
+ public static final int CONTROL_MODE_OFF_KEEP_STATE = 3;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_SCENE_MODE
+ //
+
+ /**
+ * <p>Indicates that no scene modes are set for a given capture request.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_DISABLED = 0;
+
+ /**
+ * <p>If face detection support exists, use face
+ * detection data for auto-focus, auto-white balance, and
+ * auto-exposure routines.</p>
+ * <p>If face detection statistics are disabled
+ * (i.e. {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} is set to OFF),
+ * this should still operate correctly (but will not return
+ * face detection statistics to the framework).</p>
+ * <p>Unlike the other scene modes, {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode},
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}
+ * remain active when FACE_PRIORITY is set.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_FACE_PRIORITY = 1;
+
+ /**
+ * <p>Optimized for photos of quickly moving objects.</p>
+ * <p>Similar to SPORTS.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_ACTION = 2;
+
+ /**
+ * <p>Optimized for still photos of people.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_PORTRAIT = 3;
+
+ /**
+ * <p>Optimized for photos of distant macroscopic objects.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_LANDSCAPE = 4;
+
+ /**
+ * <p>Optimized for low-light settings.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_NIGHT = 5;
+
+ /**
+ * <p>Optimized for still photos of people in low-light
+ * settings.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_NIGHT_PORTRAIT = 6;
+
+ /**
+ * <p>Optimized for dim, indoor settings where flash must
+ * remain off.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_THEATRE = 7;
+
+ /**
+ * <p>Optimized for bright, outdoor beach settings.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_BEACH = 8;
+
+ /**
+ * <p>Optimized for bright, outdoor settings containing snow.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_SNOW = 9;
+
+ /**
+ * <p>Optimized for scenes of the setting sun.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_SUNSET = 10;
+
+ /**
+ * <p>Optimized to avoid blurry photos due to small amounts of
+ * device motion (for example: due to hand shake).</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_STEADYPHOTO = 11;
+
+ /**
+ * <p>Optimized for nighttime photos of fireworks.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_FIREWORKS = 12;
+
+ /**
+ * <p>Optimized for photos of quickly moving people.</p>
+ * <p>Similar to ACTION.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_SPORTS = 13;
+
+ /**
+ * <p>Optimized for dim, indoor settings with multiple moving
+ * people.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_PARTY = 14;
+
+ /**
+ * <p>Optimized for dim settings where the main light source
+ * is a flame.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_CANDLELIGHT = 15;
+
+ /**
+ * <p>Optimized for accurately capturing a photo of barcode
+ * for use by camera applications that wish to read the
+ * barcode value.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_BARCODE = 16;
+
+ /**
+ * <p>This is deprecated, please use {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }
+ * and {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList }
+ * for high speed video recording.</p>
+ * <p>Optimized for high speed video recording (frame rate &gt;=60fps) use case.</p>
+ * <p>The supported high speed video sizes and fps ranges are specified in
+ * android.control.availableHighSpeedVideoConfigurations. To get desired
+ * output frame rates, the application is only allowed to select video size
+ * and fps range combinations listed in this static metadata. The fps range
+ * can be control via {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}.</p>
+ * <p>In this mode, the camera device will override aeMode, awbMode, and afMode to
+ * ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode
+ * controls will be overridden to be FAST. Therefore, no manual control of capture
+ * and post-processing parameters is possible. All other controls operate the
+ * same as when {@link CaptureRequest#CONTROL_MODE android.control.mode} == AUTO. This means that all other
+ * android.control.* fields continue to work, such as</p>
+ * <ul>
+ * <li>{@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}</li>
+ * <li>{@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation}</li>
+ * <li>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock}</li>
+ * <li>{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}</li>
+ * <li>{@link CaptureRequest#CONTROL_EFFECT_MODE android.control.effectMode}</li>
+ * <li>{@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}</li>
+ * <li>{@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}</li>
+ * <li>{@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}</li>
+ * <li>{@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}</li>
+ * <li>{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}</li>
+ * </ul>
+ * <p>Outside of android.control.*, the following controls will work:</p>
+ * <ul>
+ * <li>{@link CaptureRequest#FLASH_MODE android.flash.mode} (automatic flash for still capture will not work since aeMode is ON)</li>
+ * <li>{@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode} (if it is supported)</li>
+ * <li>{@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}</li>
+ * <li>{@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode}</li>
+ * </ul>
+ * <p>For high speed recording use case, the actual maximum supported frame rate may
+ * be lower than what camera can output, depending on the destination Surfaces for
+ * the image data. For example, if the destination surface is from video encoder,
+ * the application need check if the video encoder is capable of supporting the
+ * high frame rate for a given video size, or it will end up with lower recording
+ * frame rate. If the destination surface is from preview window, the preview frame
+ * rate will be bounded by the screen refresh rate.</p>
+ * <p>The camera device will only support up to 2 output high speed streams
+ * (processed non-stalling format defined in android.request.maxNumOutputStreams)
+ * in this mode. This control will be effective only if all of below conditions are true:</p>
+ * <ul>
+ * <li>The application created no more than maxNumHighSpeedStreams processed non-stalling
+ * format output streams, where maxNumHighSpeedStreams is calculated as
+ * min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).</li>
+ * <li>The stream sizes are selected from the sizes reported by
+ * android.control.availableHighSpeedVideoConfigurations.</li>
+ * <li>No processed non-stalling or raw streams are configured.</li>
+ * </ul>
+ * <p>When above conditions are NOT satistied, the controls of this mode and
+ * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange} will be ignored by the camera device,
+ * the camera device will fall back to {@link CaptureRequest#CONTROL_MODE android.control.mode} <code>==</code> AUTO,
+ * and the returned capture result metadata will give the fps range choosen
+ * by the camera device.</p>
+ * <p>Switching into or out of this mode may trigger some camera ISP/sensor
+ * reconfigurations, which may introduce extra latency. It is recommended that
+ * the application avoids unnecessary scene mode switch as much as possible.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureRequest#CONTROL_AE_REGIONS
+ * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE
+ * @see CaptureRequest#CONTROL_AF_REGIONS
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureRequest#CONTROL_AWB_LOCK
+ * @see CaptureRequest#CONTROL_AWB_REGIONS
+ * @see CaptureRequest#CONTROL_EFFECT_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureRequest#FLASH_MODE
+ * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ * @deprecated Please refer to this API documentation to find the alternatives
+ */
+ @Deprecated
+ public static final int CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO = 17;
+
+ /**
+ * <p>Turn on a device-specific high dynamic range (HDR) mode.</p>
+ * <p>In this scene mode, the camera device captures images
+ * that keep a larger range of scene illumination levels
+ * visible in the final image. For example, when taking a
+ * picture of a object in front of a bright window, both
+ * the object and the scene through the window may be
+ * visible when using HDR mode, while in normal AUTO mode,
+ * one or the other may be poorly exposed. As a tradeoff,
+ * HDR mode generally takes much longer to capture a single
+ * image, has no user control, and may have other artifacts
+ * depending on the HDR method used.</p>
+ * <p>Therefore, HDR captures operate at a much slower rate
+ * than regular captures.</p>
+ * <p>In this mode, on LIMITED or FULL devices, when a request
+ * is made with a {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} of
+ * STILL_CAPTURE, the camera device will capture an image
+ * using a high dynamic range capture technique. On LEGACY
+ * devices, captures that target a JPEG-format output will
+ * be captured with HDR, and the capture intent is not
+ * relevant.</p>
+ * <p>The HDR capture may involve the device capturing a burst
+ * of images internally and combining them into one, or it
+ * may involve the device using specialized high dynamic
+ * range capture hardware. In all cases, a single image is
+ * produced in response to a capture request submitted
+ * while in HDR mode.</p>
+ * <p>Since substantial post-processing is generally needed to
+ * produce an HDR image, only YUV, PRIVATE, and JPEG
+ * outputs are supported for LIMITED/FULL device HDR
+ * captures, and only JPEG outputs are supported for LEGACY
+ * HDR captures. Using a RAW output for HDR capture is not
+ * supported.</p>
+ * <p>Some devices may also support always-on HDR, which
+ * applies HDR processing at full frame rate. For these
+ * devices, intents other than STILL_CAPTURE will also
+ * produce an HDR output with no frame rate impact compared
+ * to normal operation, though the quality may be lower
+ * than for STILL_CAPTURE intents.</p>
+ * <p>If SCENE_MODE_HDR is used with unsupported output types
+ * or capture intents, the images captured will be as if
+ * the SCENE_MODE was not enabled at all.</p>
+ *
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ */
+ public static final int CONTROL_SCENE_MODE_HDR = 18;
+
+ /**
+ * <p>Same as FACE_PRIORITY scene mode, except that the camera
+ * device will choose higher sensitivity values ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity})
+ * under low light conditions.</p>
+ * <p>The camera device may be tuned to expose the images in a reduced
+ * sensitivity range to produce the best quality images. For example,
+ * if the {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange} gives range of [100, 1600],
+ * the camera device auto-exposure routine tuning process may limit the actual
+ * exposure sensitivity range to [100, 1200] to ensure that the noise level isn't
+ * exessive in order to preserve the image quality. Under this situation, the image under
+ * low light may be under-exposed when the sensor max exposure time (bounded by the
+ * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange} when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of the
+ * ON_* modes) and effective max sensitivity are reached. This scene mode allows the
+ * camera device auto-exposure routine to increase the sensitivity up to the max
+ * sensitivity specified by {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange} when the scene is too
+ * dark and the max exposure time is reached. The captured images may be noisier
+ * compared with the images captured in normal FACE_PRIORITY mode; therefore, it is
+ * recommended that the application only use this scene mode when it is capable of
+ * reducing the noise level of the captured images.</p>
+ * <p>Unlike the other scene modes, {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode},
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}
+ * remain active when FACE_PRIORITY_LOW_LIGHT is set.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ * @hide
+ */
+ public static final int CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT = 19;
+
+ /**
+ * <p>Scene mode values within the range of
+ * <code>[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]</code> are reserved for device specific
+ * customized scene modes.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ * @hide
+ */
+ public static final int CONTROL_SCENE_MODE_DEVICE_CUSTOM_START = 100;
+
+ /**
+ * <p>Scene mode values within the range of
+ * <code>[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]</code> are reserved for device specific
+ * customized scene modes.</p>
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ * @hide
+ */
+ public static final int CONTROL_SCENE_MODE_DEVICE_CUSTOM_END = 127;
+
+ //
+ // Enumeration values for CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ //
+
+ /**
+ * <p>Video stabilization is disabled.</p>
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ */
+ public static final int CONTROL_VIDEO_STABILIZATION_MODE_OFF = 0;
+
+ /**
+ * <p>Video stabilization is enabled.</p>
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ */
+ public static final int CONTROL_VIDEO_STABILIZATION_MODE_ON = 1;
+
+ //
+ // Enumeration values for CaptureRequest#EDGE_MODE
+ //
+
+ /**
+ * <p>No edge enhancement is applied.</p>
+ * @see CaptureRequest#EDGE_MODE
+ */
+ public static final int EDGE_MODE_OFF = 0;
+
+ /**
+ * <p>Apply edge enhancement at a quality level that does not slow down frame rate
+ * relative to sensor output. It may be the same as OFF if edge enhancement will
+ * slow down frame rate relative to sensor.</p>
+ * @see CaptureRequest#EDGE_MODE
+ */
+ public static final int EDGE_MODE_FAST = 1;
+
+ /**
+ * <p>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.</p>
+ * @see CaptureRequest#EDGE_MODE
+ */
+ public static final int EDGE_MODE_HIGH_QUALITY = 2;
+
+ /**
+ * <p>Edge enhancement is applied at different levels for different output streams,
+ * based on resolution. Streams at maximum recording resolution (see {@link android.hardware.camera2.CameraDevice#createCaptureSession }) or below have
+ * edge enhancement applied, while higher-resolution streams have no edge enhancement
+ * applied. The level of edge enhancement for low-resolution streams is tuned so that
+ * frame rate is not impacted, and the quality is equal to or better than FAST (since it
+ * is only applied to lower-resolution outputs, quality may improve from FAST).</p>
+ * <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
+ * with YUV or PRIVATE reprocessing, where the application continuously captures
+ * high-resolution intermediate buffers into a circular buffer, from which a final image is
+ * produced via reprocessing when a user takes a picture. For such a use case, the
+ * high-resolution buffers must not have edge enhancement applied to maximize efficiency of
+ * preview and to avoid double-applying enhancement when reprocessed, while low-resolution
+ * buffers (used for recording or preview, generally) need edge enhancement applied for
+ * reasonable preview quality.</p>
+ * <p>This mode is guaranteed to be supported by devices that support either the
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+ * ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} lists either of those capabilities) and it will
+ * be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CaptureRequest#EDGE_MODE
+ */
+ public static final int EDGE_MODE_ZERO_SHUTTER_LAG = 3;
+
+ //
+ // Enumeration values for CaptureRequest#FLASH_MODE
+ //
+
+ /**
+ * <p>Do not fire the flash for this capture.</p>
+ * @see CaptureRequest#FLASH_MODE
+ */
+ public static final int FLASH_MODE_OFF = 0;
+
+ /**
+ * <p>If the flash is available and charged, fire flash
+ * for this capture.</p>
+ * @see CaptureRequest#FLASH_MODE
+ */
+ public static final int FLASH_MODE_SINGLE = 1;
+
+ /**
+ * <p>Transition flash to continuously on.</p>
+ * @see CaptureRequest#FLASH_MODE
+ */
+ public static final int FLASH_MODE_TORCH = 2;
+
+ //
+ // Enumeration values for CaptureRequest#HOT_PIXEL_MODE
+ //
+
+ /**
+ * <p>No hot pixel correction is applied.</p>
+ * <p>The frame rate must not be reduced relative to sensor raw output
+ * for this option.</p>
+ * <p>The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
+ *
+ * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
+ * @see CaptureRequest#HOT_PIXEL_MODE
+ */
+ public static final int HOT_PIXEL_MODE_OFF = 0;
+
+ /**
+ * <p>Hot pixel correction is applied, without reducing frame
+ * rate relative to sensor raw output.</p>
+ * <p>The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
+ *
+ * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
+ * @see CaptureRequest#HOT_PIXEL_MODE
+ */
+ public static final int HOT_PIXEL_MODE_FAST = 1;
+
+ /**
+ * <p>High-quality hot pixel correction is applied, at a cost
+ * of possibly reduced frame rate relative to sensor raw output.</p>
+ * <p>The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.</p>
+ *
+ * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
+ * @see CaptureRequest#HOT_PIXEL_MODE
+ */
+ public static final int HOT_PIXEL_MODE_HIGH_QUALITY = 2;
+
+ //
+ // Enumeration values for CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
+ //
+
+ /**
+ * <p>Optical stabilization is unavailable.</p>
+ * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
+ */
+ public static final int LENS_OPTICAL_STABILIZATION_MODE_OFF = 0;
+
+ /**
+ * <p>Optical stabilization is enabled.</p>
+ * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
+ */
+ public static final int LENS_OPTICAL_STABILIZATION_MODE_ON = 1;
+
+ //
+ // Enumeration values for CaptureRequest#NOISE_REDUCTION_MODE
+ //
+
+ /**
+ * <p>No noise reduction is applied.</p>
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ */
+ public static final int NOISE_REDUCTION_MODE_OFF = 0;
+
+ /**
+ * <p>Noise reduction is applied without reducing frame rate relative to sensor
+ * output. It may be the same as OFF if noise reduction will reduce frame rate
+ * relative to sensor.</p>
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ */
+ public static final int NOISE_REDUCTION_MODE_FAST = 1;
+
+ /**
+ * <p>High-quality noise reduction is applied, at the cost of possibly reduced frame
+ * rate relative to sensor output.</p>
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ */
+ public static final int NOISE_REDUCTION_MODE_HIGH_QUALITY = 2;
+
+ /**
+ * <p>MINIMAL noise reduction is applied without reducing frame rate relative to
+ * sensor output. </p>
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ */
+ public static final int NOISE_REDUCTION_MODE_MINIMAL = 3;
+
+ /**
+ * <p>Noise reduction is applied at different levels for different output streams,
+ * based on resolution. Streams at maximum recording resolution (see {@link android.hardware.camera2.CameraDevice#createCaptureSession }) or below have noise
+ * reduction applied, while higher-resolution streams have MINIMAL (if supported) or no
+ * noise reduction applied (if MINIMAL is not supported.) The degree of noise reduction
+ * for low-resolution streams is tuned so that frame rate is not impacted, and the quality
+ * is equal to or better than FAST (since it is only applied to lower-resolution outputs,
+ * quality may improve from FAST).</p>
+ * <p>This mode is intended to be used by applications operating in a zero-shutter-lag mode
+ * with YUV or PRIVATE reprocessing, where the application continuously captures
+ * high-resolution intermediate buffers into a circular buffer, from which a final image is
+ * produced via reprocessing when a user takes a picture. For such a use case, the
+ * high-resolution buffers must not have noise reduction applied to maximize efficiency of
+ * preview and to avoid over-applying noise filtering when reprocessing, while
+ * low-resolution buffers (used for recording or preview, generally) need noise reduction
+ * applied for reasonable preview quality.</p>
+ * <p>This mode is guaranteed to be supported by devices that support either the
+ * YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities
+ * ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} lists either of those capabilities) and it will
+ * be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ */
+ public static final int NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG = 4;
+
+ //
+ // Enumeration values for CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ //
+
+ /**
+ * <p>No test pattern mode is used, and the camera
+ * device returns captures from the image sensor.</p>
+ * <p>This is the default if the key is not set.</p>
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ public static final int SENSOR_TEST_PATTERN_MODE_OFF = 0;
+
+ /**
+ * <p>Each pixel in <code>[R, G_even, G_odd, B]</code> is replaced by its
+ * respective color channel provided in
+ * {@link CaptureRequest#SENSOR_TEST_PATTERN_DATA android.sensor.testPatternData}.</p>
+ * <p>For example:</p>
+ * <pre><code>android.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
+ * </code></pre>
+ * <p>All green pixels are 100% green. All red/blue pixels are black.</p>
+ * <pre><code>android.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
+ * </code></pre>
+ * <p>All red pixels are 100% red. Only the odd green pixels
+ * are 100% green. All blue pixels are 100% black.</p>
+ *
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_DATA
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ public static final int SENSOR_TEST_PATTERN_MODE_SOLID_COLOR = 1;
+
+ /**
+ * <p>All pixel data is replaced with an 8-bar color pattern.</p>
+ * <p>The vertical bars (left-to-right) are as follows:</p>
+ * <ul>
+ * <li>100% white</li>
+ * <li>yellow</li>
+ * <li>cyan</li>
+ * <li>green</li>
+ * <li>magenta</li>
+ * <li>red</li>
+ * <li>blue</li>
+ * <li>black</li>
+ * </ul>
+ * <p>In general the image would look like the following:</p>
+ * <pre><code>W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * W Y C G M R B K
+ * . . . . . . . .
+ * . . . . . . . .
+ * . . . . . . . .
+ *
+ * (B = Blue, K = Black)
+ * </code></pre>
+ * <p>Each bar should take up 1/8 of the sensor pixel array width.
+ * When this is not possible, the bar size should be rounded
+ * down to the nearest integer and the pattern can repeat
+ * on the right side.</p>
+ * <p>Each bar's height must always take up the full sensor
+ * pixel array height.</p>
+ * <p>Each pixel in this test pattern must be set to either
+ * 0% intensity or 100% intensity.</p>
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ public static final int SENSOR_TEST_PATTERN_MODE_COLOR_BARS = 2;
+
+ /**
+ * <p>The test pattern is similar to COLOR_BARS, except that
+ * each bar should start at its specified color at the top,
+ * and fade to gray at the bottom.</p>
+ * <p>Furthermore each bar is further subdivided into a left and
+ * right half. The left half should have a smooth gradient,
+ * and the right half should have a quantized gradient.</p>
+ * <p>In particular, the right half's should consist of blocks of the
+ * same color for 1/16th active sensor pixel array width.</p>
+ * <p>The least significant bits in the quantized gradient should
+ * be copied from the most significant bits of the smooth gradient.</p>
+ * <p>The height of each bar should always be a multiple of 128.
+ * When this is not the case, the pattern should repeat at the bottom
+ * of the image.</p>
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ public static final int SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY = 3;
+
+ /**
+ * <p>All pixel data is replaced by a pseudo-random sequence
+ * generated from a PN9 512-bit sequence (typically implemented
+ * in hardware with a linear feedback shift register).</p>
+ * <p>The generator should be reset at the beginning of each frame,
+ * and thus each subsequent raw frame with this test pattern should
+ * be exactly the same as the last.</p>
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ public static final int SENSOR_TEST_PATTERN_MODE_PN9 = 4;
+
+ /**
+ * <p>The first custom test pattern. All custom patterns that are
+ * available only on this camera device are at least this numeric
+ * value.</p>
+ * <p>All of the custom test patterns will be static
+ * (that is the raw image must not vary from frame to frame).</p>
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ public static final int SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256;
+
+ //
+ // Enumeration values for CaptureRequest#SHADING_MODE
+ //
+
+ /**
+ * <p>No lens shading correction is applied.</p>
+ * @see CaptureRequest#SHADING_MODE
+ */
+ public static final int SHADING_MODE_OFF = 0;
+
+ /**
+ * <p>Apply lens shading corrections, without slowing
+ * frame rate relative to sensor raw output</p>
+ * @see CaptureRequest#SHADING_MODE
+ */
+ public static final int SHADING_MODE_FAST = 1;
+
+ /**
+ * <p>Apply high-quality lens shading correction, at the
+ * cost of possibly reduced frame rate.</p>
+ * @see CaptureRequest#SHADING_MODE
+ */
+ public static final int SHADING_MODE_HIGH_QUALITY = 2;
+
+ //
+ // Enumeration values for CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ //
+
+ /**
+ * <p>Do not include face detection statistics in capture
+ * results.</p>
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ */
+ public static final int STATISTICS_FACE_DETECT_MODE_OFF = 0;
+
+ /**
+ * <p>Return face rectangle and confidence values only.</p>
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ */
+ public static final int STATISTICS_FACE_DETECT_MODE_SIMPLE = 1;
+
+ /**
+ * <p>Return all face
+ * metadata.</p>
+ * <p>In this mode, face rectangles, scores, landmarks, and face IDs are all valid.</p>
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ */
+ public static final int STATISTICS_FACE_DETECT_MODE_FULL = 2;
+
+ //
+ // Enumeration values for CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
+ //
+
+ /**
+ * <p>Do not include a lens shading map in the capture result.</p>
+ * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
+ */
+ public static final int STATISTICS_LENS_SHADING_MAP_MODE_OFF = 0;
+
+ /**
+ * <p>Include a lens shading map in the capture result.</p>
+ * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
+ */
+ public static final int STATISTICS_LENS_SHADING_MAP_MODE_ON = 1;
+
+ //
+ // Enumeration values for CaptureRequest#TONEMAP_MODE
+ //
+
+ /**
+ * <p>Use the tone mapping curve specified in
+ * the {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}* entries.</p>
+ * <p>All color enhancement and tonemapping must be disabled, except
+ * for applying the tonemapping curve specified by
+ * {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.</p>
+ * <p>Must not slow down frame rate relative to raw
+ * sensor output.</p>
+ *
+ * @see CaptureRequest#TONEMAP_CURVE
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ public static final int TONEMAP_MODE_CONTRAST_CURVE = 0;
+
+ /**
+ * <p>Advanced gamma mapping and color enhancement may be applied, without
+ * reducing frame rate compared to raw sensor output.</p>
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ public static final int TONEMAP_MODE_FAST = 1;
+
+ /**
+ * <p>High-quality gamma mapping and color enhancement will be applied, at
+ * the cost of possibly reduced frame rate compared to raw sensor output.</p>
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ public static final int TONEMAP_MODE_HIGH_QUALITY = 2;
+
+ /**
+ * <p>Use the gamma value specified in {@link CaptureRequest#TONEMAP_GAMMA android.tonemap.gamma} to peform
+ * tonemapping.</p>
+ * <p>All color enhancement and tonemapping must be disabled, except
+ * for applying the tonemapping curve specified by {@link CaptureRequest#TONEMAP_GAMMA android.tonemap.gamma}.</p>
+ * <p>Must not slow down frame rate relative to raw sensor output.</p>
+ *
+ * @see CaptureRequest#TONEMAP_GAMMA
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ public static final int TONEMAP_MODE_GAMMA_VALUE = 3;
+
+ /**
+ * <p>Use the preset tonemapping curve specified in
+ * {@link CaptureRequest#TONEMAP_PRESET_CURVE android.tonemap.presetCurve} to peform tonemapping.</p>
+ * <p>All color enhancement and tonemapping must be disabled, except
+ * for applying the tonemapping curve specified by
+ * {@link CaptureRequest#TONEMAP_PRESET_CURVE android.tonemap.presetCurve}.</p>
+ * <p>Must not slow down frame rate relative to raw sensor output.</p>
+ *
+ * @see CaptureRequest#TONEMAP_PRESET_CURVE
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ public static final int TONEMAP_MODE_PRESET_CURVE = 4;
+
+ //
+ // Enumeration values for CaptureRequest#TONEMAP_PRESET_CURVE
+ //
+
+ /**
+ * <p>Tonemapping curve is defined by sRGB</p>
+ * @see CaptureRequest#TONEMAP_PRESET_CURVE
+ */
+ public static final int TONEMAP_PRESET_CURVE_SRGB = 0;
+
+ /**
+ * <p>Tonemapping curve is defined by ITU-R BT.709</p>
+ * @see CaptureRequest#TONEMAP_PRESET_CURVE
+ */
+ public static final int TONEMAP_PRESET_CURVE_REC709 = 1;
+
+ //
+ // Enumeration values for CaptureResult#CONTROL_AE_STATE
+ //
+
+ /**
+ * <p>AE is off or recently reset.</p>
+ * <p>When a camera device is opened, it starts in
+ * this state. This is a transient state, the camera device may skip reporting
+ * this state in capture result.</p>
+ * @see CaptureResult#CONTROL_AE_STATE
+ */
+ public static final int CONTROL_AE_STATE_INACTIVE = 0;
+
+ /**
+ * <p>AE doesn't yet have a good set of control values
+ * for the current scene.</p>
+ * <p>This is a transient state, the camera device may skip
+ * reporting this state in capture result.</p>
+ * @see CaptureResult#CONTROL_AE_STATE
+ */
+ public static final int CONTROL_AE_STATE_SEARCHING = 1;
+
+ /**
+ * <p>AE has a good set of control values for the
+ * current scene.</p>
+ * @see CaptureResult#CONTROL_AE_STATE
+ */
+ public static final int CONTROL_AE_STATE_CONVERGED = 2;
+
+ /**
+ * <p>AE has been locked.</p>
+ * @see CaptureResult#CONTROL_AE_STATE
+ */
+ public static final int CONTROL_AE_STATE_LOCKED = 3;
+
+ /**
+ * <p>AE has a good set of control values, but flash
+ * needs to be fired for good quality still
+ * capture.</p>
+ * @see CaptureResult#CONTROL_AE_STATE
+ */
+ public static final int CONTROL_AE_STATE_FLASH_REQUIRED = 4;
+
+ /**
+ * <p>AE has been asked to do a precapture sequence
+ * and is currently executing it.</p>
+ * <p>Precapture can be triggered through setting
+ * {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} to START. Currently
+ * active and completed (if it causes camera device internal AE lock) precapture
+ * metering sequence can be canceled through setting
+ * {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} to CANCEL.</p>
+ * <p>Once PRECAPTURE completes, AE will transition to CONVERGED
+ * or FLASH_REQUIRED as appropriate. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureResult#CONTROL_AE_STATE
+ */
+ public static final int CONTROL_AE_STATE_PRECAPTURE = 5;
+
+ //
+ // Enumeration values for CaptureResult#CONTROL_AF_STATE
+ //
+
+ /**
+ * <p>AF is off or has not yet tried to scan/been asked
+ * to scan.</p>
+ * <p>When a camera device is opened, it starts in this
+ * state. This is a transient state, the camera device may
+ * skip reporting this state in capture
+ * result.</p>
+ * @see CaptureResult#CONTROL_AF_STATE
+ */
+ public static final int CONTROL_AF_STATE_INACTIVE = 0;
+
+ /**
+ * <p>AF is currently performing an AF scan initiated the
+ * camera device in a continuous autofocus mode.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
+ * @see CaptureResult#CONTROL_AF_STATE
+ */
+ public static final int CONTROL_AF_STATE_PASSIVE_SCAN = 1;
+
+ /**
+ * <p>AF currently believes it is in focus, but may
+ * restart scanning at any time.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
+ * @see CaptureResult#CONTROL_AF_STATE
+ */
+ public static final int CONTROL_AF_STATE_PASSIVE_FOCUSED = 2;
+
+ /**
+ * <p>AF is performing an AF scan because it was
+ * triggered by AF trigger.</p>
+ * <p>Only used by AUTO or MACRO AF modes. This is a transient
+ * state, the camera device may skip reporting this state in
+ * capture result.</p>
+ * @see CaptureResult#CONTROL_AF_STATE
+ */
+ public static final int CONTROL_AF_STATE_ACTIVE_SCAN = 3;
+
+ /**
+ * <p>AF believes it is focused correctly and has locked
+ * focus.</p>
+ * <p>This state is reached only after an explicit START AF trigger has been
+ * sent ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}), when good focus has been obtained.</p>
+ * <p>The lens will remain stationary until the AF mode ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) is changed or
+ * a new AF trigger is sent to the camera device ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}).</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureResult#CONTROL_AF_STATE
+ */
+ public static final int CONTROL_AF_STATE_FOCUSED_LOCKED = 4;
+
+ /**
+ * <p>AF has failed to focus successfully and has locked
+ * focus.</p>
+ * <p>This state is reached only after an explicit START AF trigger has been
+ * sent ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}), when good focus cannot be obtained.</p>
+ * <p>The lens will remain stationary until the AF mode ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) is changed or
+ * a new AF trigger is sent to the camera device ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}).</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureResult#CONTROL_AF_STATE
+ */
+ public static final int CONTROL_AF_STATE_NOT_FOCUSED_LOCKED = 5;
+
+ /**
+ * <p>AF finished a passive scan without finding focus,
+ * and may restart scanning at any time.</p>
+ * <p>Only used by CONTINUOUS_* AF modes. This is a transient state, the camera
+ * device may skip reporting this state in capture result.</p>
+ * <p>LEGACY camera devices do not support this state. When a passive
+ * scan has finished, it will always go to PASSIVE_FOCUSED.</p>
+ * @see CaptureResult#CONTROL_AF_STATE
+ */
+ public static final int CONTROL_AF_STATE_PASSIVE_UNFOCUSED = 6;
+
+ //
+ // Enumeration values for CaptureResult#CONTROL_AWB_STATE
+ //
+
+ /**
+ * <p>AWB is not in auto mode, or has not yet started metering.</p>
+ * <p>When a camera device is opened, it starts in this
+ * state. This is a transient state, the camera device may
+ * skip reporting this state in capture
+ * result.</p>
+ * @see CaptureResult#CONTROL_AWB_STATE
+ */
+ public static final int CONTROL_AWB_STATE_INACTIVE = 0;
+
+ /**
+ * <p>AWB doesn't yet have a good set of control
+ * values for the current scene.</p>
+ * <p>This is a transient state, the camera device
+ * may skip reporting this state in capture result.</p>
+ * @see CaptureResult#CONTROL_AWB_STATE
+ */
+ public static final int CONTROL_AWB_STATE_SEARCHING = 1;
+
+ /**
+ * <p>AWB has a good set of control values for the
+ * current scene.</p>
+ * @see CaptureResult#CONTROL_AWB_STATE
+ */
+ public static final int CONTROL_AWB_STATE_CONVERGED = 2;
+
+ /**
+ * <p>AWB has been locked.</p>
+ * @see CaptureResult#CONTROL_AWB_STATE
+ */
+ public static final int CONTROL_AWB_STATE_LOCKED = 3;
+
+ //
+ // Enumeration values for CaptureResult#FLASH_STATE
+ //
+
+ /**
+ * <p>No flash on camera.</p>
+ * @see CaptureResult#FLASH_STATE
+ */
+ public static final int FLASH_STATE_UNAVAILABLE = 0;
+
+ /**
+ * <p>Flash is charging and cannot be fired.</p>
+ * @see CaptureResult#FLASH_STATE
+ */
+ public static final int FLASH_STATE_CHARGING = 1;
+
+ /**
+ * <p>Flash is ready to fire.</p>
+ * @see CaptureResult#FLASH_STATE
+ */
+ public static final int FLASH_STATE_READY = 2;
+
+ /**
+ * <p>Flash fired for this capture.</p>
+ * @see CaptureResult#FLASH_STATE
+ */
+ public static final int FLASH_STATE_FIRED = 3;
+
+ /**
+ * <p>Flash partially illuminated this frame.</p>
+ * <p>This is usually due to the next or previous frame having
+ * the flash fire, and the flash spilling into this capture
+ * due to hardware limitations.</p>
+ * @see CaptureResult#FLASH_STATE
+ */
+ public static final int FLASH_STATE_PARTIAL = 4;
+
+ //
+ // Enumeration values for CaptureResult#LENS_STATE
+ //
+
+ /**
+ * <p>The lens parameters ({@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
+ * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}) are not changing.</p>
+ *
+ * @see CaptureRequest#LENS_APERTURE
+ * @see CaptureRequest#LENS_FILTER_DENSITY
+ * @see CaptureRequest#LENS_FOCAL_LENGTH
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CaptureResult#LENS_STATE
+ */
+ public static final int LENS_STATE_STATIONARY = 0;
+
+ /**
+ * <p>One or several of the lens parameters
+ * ({@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
+ * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} or {@link CaptureRequest#LENS_APERTURE android.lens.aperture}) is
+ * currently changing.</p>
+ *
+ * @see CaptureRequest#LENS_APERTURE
+ * @see CaptureRequest#LENS_FILTER_DENSITY
+ * @see CaptureRequest#LENS_FOCAL_LENGTH
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CaptureResult#LENS_STATE
+ */
+ public static final int LENS_STATE_MOVING = 1;
+
+ //
+ // Enumeration values for CaptureResult#STATISTICS_SCENE_FLICKER
+ //
+
+ /**
+ * <p>The camera device does not detect any flickering illumination
+ * in the current scene.</p>
+ * @see CaptureResult#STATISTICS_SCENE_FLICKER
+ */
+ public static final int STATISTICS_SCENE_FLICKER_NONE = 0;
+
+ /**
+ * <p>The camera device detects illumination flickering at 50Hz
+ * in the current scene.</p>
+ * @see CaptureResult#STATISTICS_SCENE_FLICKER
+ */
+ public static final int STATISTICS_SCENE_FLICKER_50HZ = 1;
+
+ /**
+ * <p>The camera device detects illumination flickering at 60Hz
+ * in the current scene.</p>
+ * @see CaptureResult#STATISTICS_SCENE_FLICKER
+ */
+ public static final int STATISTICS_SCENE_FLICKER_60HZ = 2;
+
+ //
+ // Enumeration values for CaptureResult#SYNC_FRAME_NUMBER
+ //
+
+ /**
+ * <p>The current result is not yet fully synchronized to any request.</p>
+ * <p>Synchronization is in progress, and reading metadata from this
+ * result may include a mix of data that have taken effect since the
+ * last synchronization time.</p>
+ * <p>In some future result, within {@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} frames,
+ * this value will update to the actual frame number frame number
+ * the result is guaranteed to be synchronized to (as long as the
+ * request settings remain constant).</p>
+ *
+ * @see CameraCharacteristics#SYNC_MAX_LATENCY
+ * @see CaptureResult#SYNC_FRAME_NUMBER
+ * @hide
+ */
+ public static final int SYNC_FRAME_NUMBER_CONVERGING = -1;
+
+ /**
+ * <p>The current result's synchronization status is unknown.</p>
+ * <p>The result may have already converged, or it may be in
+ * progress. Reading from this result may include some mix
+ * of settings from past requests.</p>
+ * <p>After a settings change, the new settings will eventually all
+ * take effect for the output buffers and results. However, this
+ * value will not change when that happens. Altering settings
+ * rapidly may provide outcomes using mixes of settings from recent
+ * requests.</p>
+ * <p>This value is intended primarily for backwards compatibility with
+ * the older camera implementations (for android.hardware.Camera).</p>
+ * @see CaptureResult#SYNC_FRAME_NUMBER
+ * @hide
+ */
+ public static final int SYNC_FRAME_NUMBER_UNKNOWN = -2;
+
+ /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+ * End generated code
+ *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+}
diff --git a/android/hardware/camera2/CaptureFailure.java b/android/hardware/camera2/CaptureFailure.java
new file mode 100644
index 00000000..fbe0839d
--- /dev/null
+++ b/android/hardware/camera2/CaptureFailure.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.annotation.IntDef;
+
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+
+/**
+ * A report of failed capture for a single image capture from the image sensor.
+ *
+ * <p>CaptureFailures are produced by a {@link CameraDevice} if processing a
+ * {@link CaptureRequest} fails, either partially or fully. Use {@link #getReason}
+ * to determine the specific nature of the failed capture.</p>
+ *
+ * <p>Receiving a CaptureFailure means that the metadata associated with that frame number
+ * has been dropped -- no {@link CaptureResult} with the same frame number will be
+ * produced.</p>
+ */
+public class CaptureFailure {
+ /**
+ * The {@link CaptureResult} has been dropped this frame only due to an error
+ * in the framework.
+ *
+ * @see #getReason()
+ */
+ public static final int REASON_ERROR = 0;
+
+ /**
+ * The capture has failed due to a {@link CameraCaptureSession#abortCaptures} call from the
+ * application.
+ *
+ * @see #getReason()
+ */
+ public static final int REASON_FLUSHED = 1;
+
+ /** @hide */
+ @Retention(RetentionPolicy.SOURCE)
+ @IntDef(prefix = {"REASON_"}, value =
+ {REASON_ERROR,
+ REASON_FLUSHED })
+ public @interface FailureReason {};
+
+ private final CaptureRequest mRequest;
+ private final int mReason;
+ private final boolean mDropped;
+ private final int mSequenceId;
+ private final long mFrameNumber;
+
+ /**
+ * @hide
+ */
+ public CaptureFailure(CaptureRequest request, int reason,
+ boolean dropped, int sequenceId, long frameNumber) {
+ mRequest = request;
+ mReason = reason;
+ mDropped = dropped;
+ mSequenceId = sequenceId;
+ mFrameNumber = frameNumber;
+ }
+
+ /**
+ * Get the request associated with this failed capture.
+ *
+ * <p>Whenever a request is unsuccessfully captured, with
+ * {@link CameraCaptureSession.CaptureCallback#onCaptureFailed},
+ * the {@code failed capture}'s {@code getRequest()} will return that {@code request}.
+ * </p>
+ *
+ * <p>In particular,
+ * <code><pre>cameraDevice.capture(someRequest, new CaptureCallback() {
+ * {@literal @}Override
+ * void onCaptureFailed(CaptureRequest myRequest, CaptureFailure myFailure) {
+ * assert(myFailure.getRequest.equals(myRequest) == true);
+ * }
+ * };
+ * </code></pre>
+ * </p>
+ *
+ * @return The request associated with this failed capture. Never {@code null}.
+ */
+ @NonNull
+ public CaptureRequest getRequest() {
+ return mRequest;
+ }
+
+ /**
+ * Get the frame number associated with this failed capture.
+ *
+ * <p>Whenever a request has been processed, regardless of failed capture or success,
+ * it gets a unique frame number assigned to its future result/failed capture.</p>
+ *
+ * <p>This value monotonically increments, starting with 0,
+ * for every new result or failure; and the scope is the lifetime of the
+ * {@link CameraDevice}.</p>
+ *
+ * @return long frame number
+ */
+ public long getFrameNumber() {
+ return mFrameNumber;
+ }
+
+ /**
+ * Determine why the request was dropped, whether due to an error or to a user
+ * action.
+ *
+ * @return int The reason code.
+ *
+ * @see #REASON_ERROR
+ * @see #REASON_FLUSHED
+ */
+ @FailureReason
+ public int getReason() {
+ return mReason;
+ }
+
+ /**
+ * Determine if the image was captured from the camera.
+ *
+ * <p>If the image was not captured, no image buffers will be available.
+ * If the image was captured, then image buffers may be available.</p>
+ *
+ * @return boolean True if the image was captured, false otherwise.
+ */
+ public boolean wasImageCaptured() {
+ return !mDropped;
+ }
+
+ /**
+ * The sequence ID for this failed capture that was returned by the
+ * {@link CameraCaptureSession#capture} family of functions.
+ *
+ * <p>The sequence ID is a unique monotonically increasing value starting from 0,
+ * incremented every time a new group of requests is submitted to the CameraDevice.</p>
+ *
+ * @return int The ID for the sequence of requests that this capture failure is the result of
+ *
+ * @see CameraDevice.CaptureCallback#onCaptureSequenceCompleted
+ */
+ public int getSequenceId() {
+ return mSequenceId;
+ }
+}
diff --git a/android/hardware/camera2/CaptureRequest.java b/android/hardware/camera2/CaptureRequest.java
new file mode 100644
index 00000000..c41fc020
--- /dev/null
+++ b/android/hardware/camera2/CaptureRequest.java
@@ -0,0 +1,2901 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.impl.PublicKey;
+import android.hardware.camera2.impl.SyntheticKey;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.hardware.camera2.utils.TypeReference;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.view.Surface;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Objects;
+
+
+/**
+ * <p>An immutable package of settings and outputs needed to capture a single
+ * image from the camera device.</p>
+ *
+ * <p>Contains the configuration for the capture hardware (sensor, lens, flash),
+ * the processing pipeline, the control algorithms, and the output buffers. Also
+ * contains the list of target Surfaces to send image data to for this
+ * capture.</p>
+ *
+ * <p>CaptureRequests can be created by using a {@link Builder} instance,
+ * obtained by calling {@link CameraDevice#createCaptureRequest}</p>
+ *
+ * <p>CaptureRequests are given to {@link CameraCaptureSession#capture} or
+ * {@link CameraCaptureSession#setRepeatingRequest} to capture images from a camera.</p>
+ *
+ * <p>Each request can specify a different subset of target Surfaces for the
+ * camera to send the captured data to. All the surfaces used in a request must
+ * be part of the surface list given to the last call to
+ * {@link CameraDevice#createCaptureSession}, when the request is submitted to the
+ * session.</p>
+ *
+ * <p>For example, a request meant for repeating preview might only include the
+ * Surface for the preview SurfaceView or SurfaceTexture, while a
+ * high-resolution still capture would also include a Surface from a ImageReader
+ * configured for high-resolution JPEG images.</p>
+ *
+ * <p>A reprocess capture request allows a previously-captured image from the camera device to be
+ * sent back to the device for further processing. It can be created with
+ * {@link CameraDevice#createReprocessCaptureRequest}, and used with a reprocessable capture session
+ * created with {@link CameraDevice#createReprocessableCaptureSession}.</p>
+ *
+ * @see CameraCaptureSession#capture
+ * @see CameraCaptureSession#setRepeatingRequest
+ * @see CameraCaptureSession#captureBurst
+ * @see CameraCaptureSession#setRepeatingBurst
+ * @see CameraDevice#createCaptureRequest
+ * @see CameraDevice#createReprocessCaptureRequest
+ */
+public final class CaptureRequest extends CameraMetadata<CaptureRequest.Key<?>>
+ implements Parcelable {
+
+ /**
+ * A {@code Key} is used to do capture request field lookups with
+ * {@link CaptureResult#get} or to set fields with
+ * {@link CaptureRequest.Builder#set(Key, Object)}.
+ *
+ * <p>For example, to set the crop rectangle for the next capture:
+ * <code><pre>
+ * Rect cropRectangle = new Rect(0, 0, 640, 480);
+ * captureRequestBuilder.set(SCALER_CROP_REGION, cropRectangle);
+ * </pre></code>
+ * </p>
+ *
+ * <p>To enumerate over all possible keys for {@link CaptureResult}, see
+ * {@link CameraCharacteristics#getAvailableCaptureResultKeys}.</p>
+ *
+ * @see CaptureResult#get
+ * @see CameraCharacteristics#getAvailableCaptureResultKeys
+ */
+ public final static class Key<T> {
+ private final CameraMetadataNative.Key<T> mKey;
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, Class<T> type, long vendorId) {
+ mKey = new CameraMetadataNative.Key<T>(name, type, vendorId);
+ }
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, Class<T> type) {
+ mKey = new CameraMetadataNative.Key<T>(name, type);
+ }
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, TypeReference<T> typeReference) {
+ mKey = new CameraMetadataNative.Key<T>(name, typeReference);
+ }
+
+ /**
+ * Return a camelCase, period separated name formatted like:
+ * {@code "root.section[.subsections].name"}.
+ *
+ * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."};
+ * keys that are device/platform-specific are prefixed with {@code "com."}.</p>
+ *
+ * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would
+ * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device
+ * specific key might look like {@code "com.google.nexus.data.private"}.</p>
+ *
+ * @return String representation of the key name
+ */
+ @NonNull
+ public String getName() {
+ return mKey.getName();
+ }
+
+ /**
+ * Return vendor tag id.
+ *
+ * @hide
+ */
+ public long getVendorId() {
+ return mKey.getVendorId();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public final int hashCode() {
+ return mKey.hashCode();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ public final boolean equals(Object o) {
+ return o instanceof Key && ((Key<T>)o).mKey.equals(mKey);
+ }
+
+ /**
+ * Return this {@link Key} as a string representation.
+ *
+ * <p>{@code "CaptureRequest.Key(%s)"}, where {@code %s} represents
+ * the name of this key as returned by {@link #getName}.</p>
+ *
+ * @return string representation of {@link Key}
+ */
+ @NonNull
+ @Override
+ public String toString() {
+ return String.format("CaptureRequest.Key(%s)", mKey.getName());
+ }
+
+ /**
+ * Visible for CameraMetadataNative implementation only; do not use.
+ *
+ * TODO: Make this private or remove it altogether.
+ *
+ * @hide
+ */
+ public CameraMetadataNative.Key<T> getNativeKey() {
+ return mKey;
+ }
+
+ @SuppressWarnings({ "unchecked" })
+ /*package*/ Key(CameraMetadataNative.Key<?> nativeKey) {
+ mKey = (CameraMetadataNative.Key<T>) nativeKey;
+ }
+ }
+
+ private final HashSet<Surface> mSurfaceSet;
+ private final CameraMetadataNative mSettings;
+ private boolean mIsReprocess;
+ // If this request is part of constrained high speed request list that was created by
+ // {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
+ private boolean mIsPartOfCHSRequestList = false;
+ // Each reprocess request must be tied to a reprocessable session ID.
+ // Valid only for reprocess requests (mIsReprocess == true).
+ private int mReprocessableSessionId;
+
+ private Object mUserTag;
+
+ /**
+ * Construct empty request.
+ *
+ * Used by Binder to unparcel this object only.
+ */
+ private CaptureRequest() {
+ mSettings = new CameraMetadataNative();
+ setNativeInstance(mSettings);
+ mSurfaceSet = new HashSet<Surface>();
+ mIsReprocess = false;
+ mReprocessableSessionId = CameraCaptureSession.SESSION_ID_NONE;
+ }
+
+ /**
+ * Clone from source capture request.
+ *
+ * Used by the Builder to create an immutable copy.
+ */
+ @SuppressWarnings("unchecked")
+ private CaptureRequest(CaptureRequest source) {
+ mSettings = new CameraMetadataNative(source.mSettings);
+ setNativeInstance(mSettings);
+ mSurfaceSet = (HashSet<Surface>) source.mSurfaceSet.clone();
+ mIsReprocess = source.mIsReprocess;
+ mIsPartOfCHSRequestList = source.mIsPartOfCHSRequestList;
+ mReprocessableSessionId = source.mReprocessableSessionId;
+ mUserTag = source.mUserTag;
+ }
+
+ /**
+ * Take ownership of passed-in settings.
+ *
+ * Used by the Builder to create a mutable CaptureRequest.
+ *
+ * @param settings Settings for this capture request.
+ * @param isReprocess Indicates whether to create a reprocess capture request. {@code true}
+ * to create a reprocess capture request. {@code false} to create a regular
+ * capture request.
+ * @param reprocessableSessionId The ID of the camera capture session this capture is created
+ * for. This is used to validate if the application submits a
+ * reprocess capture request to the same session where
+ * the {@link TotalCaptureResult}, used to create the reprocess
+ * capture, came from.
+ *
+ * @throws IllegalArgumentException If creating a reprocess capture request with an invalid
+ * reprocessableSessionId.
+ *
+ * @see CameraDevice#createReprocessCaptureRequest
+ */
+ private CaptureRequest(CameraMetadataNative settings, boolean isReprocess,
+ int reprocessableSessionId) {
+ mSettings = CameraMetadataNative.move(settings);
+ setNativeInstance(mSettings);
+ mSurfaceSet = new HashSet<Surface>();
+ mIsReprocess = isReprocess;
+ if (isReprocess) {
+ if (reprocessableSessionId == CameraCaptureSession.SESSION_ID_NONE) {
+ throw new IllegalArgumentException("Create a reprocess capture request with an " +
+ "invalid session ID: " + reprocessableSessionId);
+ }
+ mReprocessableSessionId = reprocessableSessionId;
+ } else {
+ mReprocessableSessionId = CameraCaptureSession.SESSION_ID_NONE;
+ }
+ }
+
+ /**
+ * Get a capture request field value.
+ *
+ * <p>The field definitions can be found in {@link CaptureRequest}.</p>
+ *
+ * <p>Querying the value for the same key more than once will return a value
+ * which is equal to the previous queried value.</p>
+ *
+ * @throws IllegalArgumentException if the key was not valid
+ *
+ * @param key The result field to read.
+ * @return The value of that key, or {@code null} if the field is not set.
+ */
+ @Nullable
+ public <T> T get(Key<T> key) {
+ return mSettings.get(key);
+ }
+
+ /**
+ * {@inheritDoc}
+ * @hide
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ protected <T> T getProtected(Key<?> key) {
+ return (T) mSettings.get(key);
+ }
+
+ /**
+ * {@inheritDoc}
+ * @hide
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ protected Class<Key<?>> getKeyClass() {
+ Object thisClass = Key.class;
+ return (Class<Key<?>>)thisClass;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ @NonNull
+ public List<Key<?>> getKeys() {
+ // Force the javadoc for this function to show up on the CaptureRequest page
+ return super.getKeys();
+ }
+
+ /**
+ * Retrieve the tag for this request, if any.
+ *
+ * <p>This tag is not used for anything by the camera device, but can be
+ * used by an application to easily identify a CaptureRequest when it is
+ * returned by
+ * {@link CameraCaptureSession.CaptureCallback#onCaptureCompleted CaptureCallback.onCaptureCompleted}
+ * </p>
+ *
+ * @return the last tag Object set on this request, or {@code null} if
+ * no tag has been set.
+ * @see Builder#setTag
+ */
+ @Nullable
+ public Object getTag() {
+ return mUserTag;
+ }
+
+ /**
+ * Determine if this is a reprocess capture request.
+ *
+ * <p>A reprocess capture request produces output images from an input buffer from the
+ * {@link CameraCaptureSession}'s input {@link Surface}. A reprocess capture request can be
+ * created by {@link CameraDevice#createReprocessCaptureRequest}.</p>
+ *
+ * @return {@code true} if this is a reprocess capture request. {@code false} if this is not a
+ * reprocess capture request.
+ *
+ * @see CameraDevice#createReprocessCaptureRequest
+ */
+ public boolean isReprocess() {
+ return mIsReprocess;
+ }
+
+ /**
+ * <p>Determine if this request is part of a constrained high speed request list that was
+ * created by
+ * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+ * A constrained high speed request list contains some constrained high speed capture requests
+ * with certain interleaved pattern that is suitable for high speed preview/video streaming. An
+ * active constrained high speed capture session only accepts constrained high speed request
+ * lists. This method can be used to do the sanity check when a constrained high speed capture
+ * session receives a request list via {@link CameraCaptureSession#setRepeatingBurst} or
+ * {@link CameraCaptureSession#captureBurst}. </p>
+ *
+ *
+ * @return {@code true} if this request is part of a constrained high speed request list,
+ * {@code false} otherwise.
+ *
+ * @hide
+ */
+ public boolean isPartOfCRequestList() {
+ return mIsPartOfCHSRequestList;
+ }
+
+ /**
+ * Returns a copy of the underlying {@link CameraMetadataNative}.
+ * @hide
+ */
+ public CameraMetadataNative getNativeCopy() {
+ return new CameraMetadataNative(mSettings);
+ }
+
+ /**
+ * Get the reprocessable session ID this reprocess capture request is associated with.
+ *
+ * @return the reprocessable session ID this reprocess capture request is associated with
+ *
+ * @throws IllegalStateException if this capture request is not a reprocess capture request.
+ * @hide
+ */
+ public int getReprocessableSessionId() {
+ if (mIsReprocess == false ||
+ mReprocessableSessionId == CameraCaptureSession.SESSION_ID_NONE) {
+ throw new IllegalStateException("Getting the reprocessable session ID for a "+
+ "non-reprocess capture request is illegal.");
+ }
+ return mReprocessableSessionId;
+ }
+
+ /**
+ * Determine whether this CaptureRequest is equal to another CaptureRequest.
+ *
+ * <p>A request is considered equal to another is if it's set of key/values is equal, it's
+ * list of output surfaces is equal, the user tag is equal, and the return values of
+ * isReprocess() are equal.</p>
+ *
+ * @param other Another instance of CaptureRequest.
+ *
+ * @return True if the requests are the same, false otherwise.
+ */
+ @Override
+ public boolean equals(Object other) {
+ return other instanceof CaptureRequest
+ && equals((CaptureRequest)other);
+ }
+
+ private boolean equals(CaptureRequest other) {
+ return other != null
+ && Objects.equals(mUserTag, other.mUserTag)
+ && mSurfaceSet.equals(other.mSurfaceSet)
+ && mSettings.equals(other.mSettings)
+ && mIsReprocess == other.mIsReprocess
+ && mReprocessableSessionId == other.mReprocessableSessionId;
+ }
+
+ @Override
+ public int hashCode() {
+ return HashCodeHelpers.hashCodeGeneric(mSettings, mSurfaceSet, mUserTag);
+ }
+
+ public static final Parcelable.Creator<CaptureRequest> CREATOR =
+ new Parcelable.Creator<CaptureRequest>() {
+ @Override
+ public CaptureRequest createFromParcel(Parcel in) {
+ CaptureRequest request = new CaptureRequest();
+ request.readFromParcel(in);
+
+ return request;
+ }
+
+ @Override
+ public CaptureRequest[] newArray(int size) {
+ return new CaptureRequest[size];
+ }
+ };
+
+ /**
+ * Expand this object from a Parcel.
+ * Hidden since this breaks the immutability of CaptureRequest, but is
+ * needed to receive CaptureRequests with aidl.
+ *
+ * @param in The parcel from which the object should be read
+ * @hide
+ */
+ private void readFromParcel(Parcel in) {
+ mSettings.readFromParcel(in);
+ setNativeInstance(mSettings);
+
+ mSurfaceSet.clear();
+
+ Parcelable[] parcelableArray = in.readParcelableArray(Surface.class.getClassLoader());
+
+ if (parcelableArray == null) {
+ return;
+ }
+
+ for (Parcelable p : parcelableArray) {
+ Surface s = (Surface) p;
+ mSurfaceSet.add(s);
+ }
+
+ mIsReprocess = (in.readInt() == 0) ? false : true;
+ mReprocessableSessionId = CameraCaptureSession.SESSION_ID_NONE;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ mSettings.writeToParcel(dest, flags);
+ dest.writeParcelableArray(mSurfaceSet.toArray(new Surface[mSurfaceSet.size()]), flags);
+ dest.writeInt(mIsReprocess ? 1 : 0);
+ }
+
+ /**
+ * @hide
+ */
+ public boolean containsTarget(Surface surface) {
+ return mSurfaceSet.contains(surface);
+ }
+
+ /**
+ * @hide
+ */
+ public Collection<Surface> getTargets() {
+ return Collections.unmodifiableCollection(mSurfaceSet);
+ }
+
+ /**
+ * A builder for capture requests.
+ *
+ * <p>To obtain a builder instance, use the
+ * {@link CameraDevice#createCaptureRequest} method, which initializes the
+ * request fields to one of the templates defined in {@link CameraDevice}.
+ *
+ * @see CameraDevice#createCaptureRequest
+ * @see CameraDevice#TEMPLATE_PREVIEW
+ * @see CameraDevice#TEMPLATE_RECORD
+ * @see CameraDevice#TEMPLATE_STILL_CAPTURE
+ * @see CameraDevice#TEMPLATE_VIDEO_SNAPSHOT
+ * @see CameraDevice#TEMPLATE_MANUAL
+ */
+ public final static class Builder {
+
+ private final CaptureRequest mRequest;
+
+ /**
+ * Initialize the builder using the template; the request takes
+ * ownership of the template.
+ *
+ * @param template Template settings for this capture request.
+ * @param reprocess Indicates whether to create a reprocess capture request. {@code true}
+ * to create a reprocess capture request. {@code false} to create a regular
+ * capture request.
+ * @param reprocessableSessionId The ID of the camera capture session this capture is
+ * created for. This is used to validate if the application
+ * submits a reprocess capture request to the same session
+ * where the {@link TotalCaptureResult}, used to create the
+ * reprocess capture, came from.
+ *
+ * @throws IllegalArgumentException If creating a reprocess capture request with an invalid
+ * reprocessableSessionId.
+ * @hide
+ */
+ public Builder(CameraMetadataNative template, boolean reprocess,
+ int reprocessableSessionId) {
+ mRequest = new CaptureRequest(template, reprocess, reprocessableSessionId);
+ }
+
+ /**
+ * <p>Add a surface to the list of targets for this request</p>
+ *
+ * <p>The Surface added must be one of the surfaces included in the most
+ * recent call to {@link CameraDevice#createCaptureSession}, when the
+ * request is given to the camera device.</p>
+ *
+ * <p>Adding a target more than once has no effect.</p>
+ *
+ * @param outputTarget Surface to use as an output target for this request
+ */
+ public void addTarget(@NonNull Surface outputTarget) {
+ mRequest.mSurfaceSet.add(outputTarget);
+ }
+
+ /**
+ * <p>Remove a surface from the list of targets for this request.</p>
+ *
+ * <p>Removing a target that is not currently added has no effect.</p>
+ *
+ * @param outputTarget Surface to use as an output target for this request
+ */
+ public void removeTarget(@NonNull Surface outputTarget) {
+ mRequest.mSurfaceSet.remove(outputTarget);
+ }
+
+ /**
+ * Set a capture request field to a value. The field definitions can be
+ * found in {@link CaptureRequest}.
+ *
+ * <p>Setting a field to {@code null} will remove that field from the capture request.
+ * Unless the field is optional, removing it will likely produce an error from the camera
+ * device when the request is submitted.</p>
+ *
+ * @param key The metadata field to write.
+ * @param value The value to set the field to, which must be of a matching
+ * type to the key.
+ */
+ public <T> void set(@NonNull Key<T> key, T value) {
+ mRequest.mSettings.set(key, value);
+ }
+
+ /**
+ * Get a capture request field value. The field definitions can be
+ * found in {@link CaptureRequest}.
+ *
+ * @throws IllegalArgumentException if the key was not valid
+ *
+ * @param key The metadata field to read.
+ * @return The value of that key, or {@code null} if the field is not set.
+ */
+ @Nullable
+ public <T> T get(Key<T> key) {
+ return mRequest.mSettings.get(key);
+ }
+
+ /**
+ * Set a tag for this request.
+ *
+ * <p>This tag is not used for anything by the camera device, but can be
+ * used by an application to easily identify a CaptureRequest when it is
+ * returned by
+ * {@link CameraCaptureSession.CaptureCallback#onCaptureCompleted CaptureCallback.onCaptureCompleted}
+ *
+ * @param tag an arbitrary Object to store with this request
+ * @see CaptureRequest#getTag
+ */
+ public void setTag(@Nullable Object tag) {
+ mRequest.mUserTag = tag;
+ }
+
+ /**
+ * <p>Mark this request as part of a constrained high speed request list created by
+ * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+ * A constrained high speed request list contains some constrained high speed capture
+ * requests with certain interleaved pattern that is suitable for high speed preview/video
+ * streaming.</p>
+ *
+ * @hide
+ */
+ public void setPartOfCHSRequestList(boolean partOfCHSList) {
+ mRequest.mIsPartOfCHSRequestList = partOfCHSList;
+ }
+
+ /**
+ * Build a request using the current target Surfaces and settings.
+ * <p>Note that, although it is possible to create a {@code CaptureRequest} with no target
+ * {@link Surface}s, passing such a request into {@link CameraCaptureSession#capture},
+ * {@link CameraCaptureSession#captureBurst},
+ * {@link CameraCaptureSession#setRepeatingBurst}, or
+ * {@link CameraCaptureSession#setRepeatingRequest} will cause that method to throw an
+ * {@link IllegalArgumentException}.</p>
+ *
+ * @return A new capture request instance, ready for submission to the
+ * camera device.
+ */
+ @NonNull
+ public CaptureRequest build() {
+ return new CaptureRequest(mRequest);
+ }
+
+ /**
+ * @hide
+ */
+ public boolean isEmpty() {
+ return mRequest.mSettings.isEmpty();
+ }
+
+ }
+
+ /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+ * The key entries below this point are generated from metadata
+ * definitions in /system/media/camera/docs. Do not modify by hand or
+ * modify the comment blocks at the start or end.
+ *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
+
+ /**
+ * <p>The mode control selects how the image data is converted from the
+ * sensor's native color into linear sRGB color.</p>
+ * <p>When auto-white balance (AWB) is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, this
+ * control is overridden by the AWB routine. When AWB is disabled, the
+ * application controls how the color mapping is performed.</p>
+ * <p>We define the expected processing pipeline below. For consistency
+ * across devices, this is always the case with TRANSFORM_MATRIX.</p>
+ * <p>When either FULL or HIGH_QUALITY is used, the camera device may
+ * do additional processing but {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} will still be provided by the
+ * camera device (in the results) and be roughly correct.</p>
+ * <p>Switching to TRANSFORM_MATRIX and using the data provided from
+ * FAST or HIGH_QUALITY will yield a picture with the same white point
+ * as what was produced by the camera device in the earlier frame.</p>
+ * <p>The expected processing pipeline is as follows:</p>
+ * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
+ * <p>The white balance is encoded by two values, a 4-channel white-balance
+ * gain vector (applied in the Bayer domain), and a 3x3 color transform
+ * matrix (applied after demosaic).</p>
+ * <p>The 4-channel white-balance gains are defined as:</p>
+ * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} = [ R G_even G_odd B ]
+ * </code></pre>
+ * <p>where <code>G_even</code> is the gain for green pixels on even rows of the
+ * output, and <code>G_odd</code> is the gain for green pixels on the odd rows.
+ * These may be identical for a given camera device implementation; if
+ * the camera device does not support a separate gain for even/odd green
+ * channels, it will use the <code>G_even</code> value, and write <code>G_odd</code> equal to
+ * <code>G_even</code> in the output result metadata.</p>
+ * <p>The matrices for color transforms are defined as a 9-entry vector:</p>
+ * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
+ * </code></pre>
+ * <p>which define a transform from input sensor colors, <code>P_in = [ r g b ]</code>,
+ * to output linear sRGB, <code>P_out = [ r' g' b' ]</code>,</p>
+ * <p>with colors as follows:</p>
+ * <pre><code>r' = I0r + I1g + I2b
+ * g' = I3r + I4g + I5b
+ * b' = I6r + I7g + I8b
+ * </code></pre>
+ * <p>Both the input and output value ranges must match. Overflow/underflow
+ * values are clipped to fit within the range.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX TRANSFORM_MATRIX}</li>
+ * <li>{@link #COLOR_CORRECTION_MODE_FAST FAST}</li>
+ * <li>{@link #COLOR_CORRECTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX
+ * @see #COLOR_CORRECTION_MODE_FAST
+ * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY
+ */
+ @PublicKey
+ public static final Key<Integer> COLOR_CORRECTION_MODE =
+ new Key<Integer>("android.colorCorrection.mode", int.class);
+
+ /**
+ * <p>A color transform matrix to use to transform
+ * from sensor RGB color space to output linear sRGB color space.</p>
+ * <p>This matrix is either set by the camera device when the request
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or
+ * directly by the application in the request when the
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p>
+ * <p>In the latter case, the camera device may round the matrix to account
+ * for precision issues; the final rounded matrix should be reported back
+ * in this matrix result metadata. The transform should keep the magnitude
+ * of the output color values within <code>[0, 1.0]</code> (assuming input color
+ * values is within the normalized range <code>[0, 1.0]</code>), or clipping may occur.</p>
+ * <p>The valid range of each matrix element varies on different devices, but
+ * values within [-1.5, 3.0] are guaranteed not to be clipped.</p>
+ * <p><b>Units</b>: Unitless scale factors</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.ColorSpaceTransform> COLOR_CORRECTION_TRANSFORM =
+ new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.colorCorrection.transform", android.hardware.camera2.params.ColorSpaceTransform.class);
+
+ /**
+ * <p>Gains applying to Bayer raw color channels for
+ * white-balance.</p>
+ * <p>These per-channel gains are either set by the camera device
+ * when the request {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not
+ * TRANSFORM_MATRIX, or directly by the application in the
+ * request when the {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is
+ * TRANSFORM_MATRIX.</p>
+ * <p>The gains in the result metadata are the gains actually
+ * applied by the camera device to the current frame.</p>
+ * <p>The valid range of gains varies on different devices, but gains
+ * between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
+ * device allows gains below 1.0, this is usually not recommended because
+ * this can create color artifacts.</p>
+ * <p><b>Units</b>: Unitless gain factors</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.RggbChannelVector> COLOR_CORRECTION_GAINS =
+ new Key<android.hardware.camera2.params.RggbChannelVector>("android.colorCorrection.gains", android.hardware.camera2.params.RggbChannelVector.class);
+
+ /**
+ * <p>Mode of operation for the chromatic aberration correction algorithm.</p>
+ * <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
+ * can not focus on the same point after exiting from the lens. This metadata defines
+ * the high level control of chromatic aberration correction algorithm, which aims to
+ * minimize the chromatic artifacts that may occur along the object boundaries in an
+ * image.</p>
+ * <p>FAST/HIGH_QUALITY both mean that camera device determined aberration
+ * correction will be applied. HIGH_QUALITY mode indicates that the camera device will
+ * use the highest-quality aberration correction algorithms, even if it slows down
+ * capture rate. FAST means the camera device will not slow down capture rate when
+ * applying aberration correction.</p>
+ * <p>LEGACY devices will always be in FAST mode.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_OFF OFF}</li>
+ * <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_FAST FAST}</li>
+ * <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES android.colorCorrection.availableAberrationModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES
+ * @see #COLOR_CORRECTION_ABERRATION_MODE_OFF
+ * @see #COLOR_CORRECTION_ABERRATION_MODE_FAST
+ * @see #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
+ */
+ @PublicKey
+ public static final Key<Integer> COLOR_CORRECTION_ABERRATION_MODE =
+ new Key<Integer>("android.colorCorrection.aberrationMode", int.class);
+
+ /**
+ * <p>The desired setting for the camera device's auto-exposure
+ * algorithm's antibanding compensation.</p>
+ * <p>Some kinds of lighting fixtures, such as some fluorescent
+ * lights, flicker at the rate of the power supply frequency
+ * (60Hz or 50Hz, depending on country). While this is
+ * typically not noticeable to a person, it can be visible to
+ * a camera device. If a camera sets its exposure time to the
+ * wrong value, the flicker may become visible in the
+ * viewfinder as flicker or in a final captured image, as a
+ * set of variable-brightness bands across the image.</p>
+ * <p>Therefore, the auto-exposure routines of camera devices
+ * include antibanding routines that ensure that the chosen
+ * exposure value will not cause such banding. The choice of
+ * exposure time depends on the rate of flicker, which the
+ * camera device can detect automatically, or the expected
+ * rate can be selected by the application using this
+ * control.</p>
+ * <p>A given camera device may not support all of the possible
+ * options for the antibanding mode. The
+ * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes} key contains
+ * the available modes for a given camera device.</p>
+ * <p>AUTO mode is the default if it is available on given
+ * camera device. When AUTO mode is not available, the
+ * default will be either 50HZ or 60HZ, and both 50HZ
+ * and 60HZ will be available.</p>
+ * <p>If manual exposure control is enabled (by setting
+ * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} to OFF),
+ * then this setting has no effect, and the application must
+ * ensure it selects exposure times that do not cause banding
+ * issues. The {@link CaptureResult#STATISTICS_SCENE_FLICKER android.statistics.sceneFlicker} key can assist
+ * the application in this.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AE_ANTIBANDING_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_AE_ANTIBANDING_MODE_50HZ 50HZ}</li>
+ * <li>{@link #CONTROL_AE_ANTIBANDING_MODE_60HZ 60HZ}</li>
+ * <li>{@link #CONTROL_AE_ANTIBANDING_MODE_AUTO AUTO}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br></p>
+ * <p>{@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureResult#STATISTICS_SCENE_FLICKER
+ * @see #CONTROL_AE_ANTIBANDING_MODE_OFF
+ * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ
+ * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ
+ * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_ANTIBANDING_MODE =
+ new Key<Integer>("android.control.aeAntibandingMode", int.class);
+
+ /**
+ * <p>Adjustment to auto-exposure (AE) target image
+ * brightness.</p>
+ * <p>The adjustment is measured as a count of steps, with the
+ * step size defined by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep} and the
+ * allowed range by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}.</p>
+ * <p>For example, if the exposure value (EV) step is 0.333, '6'
+ * will mean an exposure compensation of +2 EV; -3 will mean an
+ * exposure compensation of -1 EV. One EV represents a doubling
+ * of image brightness. Note that this control will only be
+ * effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>!=</code> OFF. This control
+ * will take effect even when {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} <code>== true</code>.</p>
+ * <p>In the event of exposure compensation value being changed, camera device
+ * may take several frames to reach the newly requested exposure target.
+ * During that time, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} field will be in the SEARCHING
+ * state. Once the new exposure target is reached, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} will
+ * change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
+ * FLASH_REQUIRED (if the scene is too dark for still capture).</p>
+ * <p><b>Units</b>: Compensation steps</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE
+ * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureResult#CONTROL_AE_STATE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_EXPOSURE_COMPENSATION =
+ new Key<Integer>("android.control.aeExposureCompensation", int.class);
+
+ /**
+ * <p>Whether auto-exposure (AE) is currently locked to its latest
+ * calculated values.</p>
+ * <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
+ * and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
+ * <p>Note that even when AE is locked, the flash may be fired if
+ * the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_AUTO_FLASH /
+ * ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.</p>
+ * <p>When {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation} is changed, even if the AE lock
+ * is ON, the camera device will still adjust its exposure value.</p>
+ * <p>If AE precapture is triggered (see {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger})
+ * when AE is already locked, the camera device will not change the exposure time
+ * ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}) and sensitivity ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity})
+ * parameters. The flash may be fired if the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}
+ * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
+ * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_ALWAYS_FLASH, the scene may become overexposed.
+ * Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.</p>
+ * <p>When an AE precapture sequence is triggered, AE unlock will not be able to unlock
+ * the AE if AE is locked by the camera device internally during precapture metering
+ * sequence In other words, submitting requests with AE unlock has no effect for an
+ * ongoing precapture metering sequence. Otherwise, the precapture metering sequence
+ * will never succeed in a sequence of preview requests where AE lock is always set
+ * to <code>false</code>.</p>
+ * <p>Since the camera device has a pipeline of in-flight requests, the settings that
+ * get locked do not necessarily correspond to the settings that were present in the
+ * latest capture result received from the camera device, since additional captures
+ * and AE updates may have occurred even before the result was sent out. If an
+ * application is switching between automatic and manual control and wishes to eliminate
+ * any flicker during the switch, the following procedure is recommended:</p>
+ * <ol>
+ * <li>Starting in auto-AE mode:</li>
+ * <li>Lock AE</li>
+ * <li>Wait for the first result to be output that has the AE locked</li>
+ * <li>Copy exposure settings from that result into a request, set the request to manual AE</li>
+ * <li>Submit the capture request, proceed to run manual AE as desired.</li>
+ * </ol>
+ * <p>See {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE lock related state transition details.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureResult#CONTROL_AE_STATE
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Boolean> CONTROL_AE_LOCK =
+ new Key<Boolean>("android.control.aeLock", boolean.class);
+
+ /**
+ * <p>The desired mode for the camera device's
+ * auto-exposure routine.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is
+ * AUTO.</p>
+ * <p>When set to any of the ON modes, the camera device's
+ * auto-exposure routine is enabled, overriding the
+ * application's selected exposure time, sensor sensitivity,
+ * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
+ * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and
+ * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes
+ * is selected, the camera device's flash unit controls are
+ * also overridden.</p>
+ * <p>The FLASH modes are only available if the camera device
+ * has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is <code>true</code>).</p>
+ * <p>If flash TORCH mode is desired, this field must be set to
+ * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.</p>
+ * <p>When set to any of the ON modes, the values chosen by the
+ * camera device auto-exposure routine for the overridden
+ * fields for a given capture will be available in its
+ * CaptureResult.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AE_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_AE_MODE_ON ON}</li>
+ * <li>{@link #CONTROL_AE_MODE_ON_AUTO_FLASH ON_AUTO_FLASH}</li>
+ * <li>{@link #CONTROL_AE_MODE_ON_ALWAYS_FLASH ON_ALWAYS_FLASH}</li>
+ * <li>{@link #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE ON_AUTO_FLASH_REDEYE}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES android.control.aeAvailableModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
+ * @see CaptureRequest#FLASH_MODE
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ * @see #CONTROL_AE_MODE_OFF
+ * @see #CONTROL_AE_MODE_ON
+ * @see #CONTROL_AE_MODE_ON_AUTO_FLASH
+ * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH
+ * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_MODE =
+ new Key<Integer>("android.control.aeMode", int.class);
+
+ /**
+ * <p>List of metering areas to use for auto-exposure adjustment.</p>
+ * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe} is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of regions supported by the device is determined by the value
+ * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe}.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other exposure metering regions, so if only one
+ * region is used, all non-zero weights will have the same effect. A region with 0
+ * weight is ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Range of valid values:</b><br>
+ * Coordinates must be between <code>[(0,0), (width, height))</code> of
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AE
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AE_REGIONS =
+ new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.aeRegions", android.hardware.camera2.params.MeteringRectangle[].class);
+
+ /**
+ * <p>Range over which the auto-exposure routine can
+ * adjust the capture frame rate to maintain good
+ * exposure.</p>
+ * <p>Only constrains auto-exposure (AE) algorithm, not
+ * manual control of {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime} and
+ * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}.</p>
+ * <p><b>Units</b>: Frames per second (FPS)</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any of the entries in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ */
+ @PublicKey
+ public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE =
+ new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }});
+
+ /**
+ * <p>Whether the camera device will trigger a precapture
+ * metering sequence when it processes this request.</p>
+ * <p>This entry is normally set to IDLE, or is not
+ * included at all in the request settings. When included and
+ * set to START, the camera device will trigger the auto-exposure (AE)
+ * precapture metering sequence.</p>
+ * <p>When set to CANCEL, the camera device will cancel any active
+ * precapture metering trigger, and return to its initial AE state.
+ * If a precapture metering sequence is already completed, and the camera
+ * device has implicitly locked the AE for subsequent still capture, the
+ * CANCEL trigger will unlock the AE and return to its initial AE state.</p>
+ * <p>The precapture sequence should be triggered before starting a
+ * high-quality still capture for final metering decisions to
+ * be made, and for firing pre-capture flash pulses to estimate
+ * scene brightness and required final capture flash power, when
+ * the flash is enabled.</p>
+ * <p>Normally, this entry should be set to START for only a
+ * single request, and the application should wait until the
+ * sequence completes before starting a new one.</p>
+ * <p>When a precapture metering sequence is finished, the camera device
+ * may lock the auto-exposure routine internally to be able to accurately expose the
+ * subsequent still capture image (<code>{@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE</code>).
+ * For this case, the AE may not resume normal scan if no subsequent still capture is
+ * submitted. To ensure that the AE routine restarts normal scan, the application should
+ * submit a request with <code>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == true</code>, followed by a request
+ * with <code>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == false</code>, if the application decides not to submit a
+ * still capture request after the precapture sequence completes. Alternatively, for
+ * API level 23 or newer devices, the CANCEL can be used to unlock the camera device
+ * internally locked AE if the application doesn't submit a still capture request after
+ * the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
+ * be used in devices that have earlier API levels.</p>
+ * <p>The exact effect of auto-exposure (AE) precapture trigger
+ * depends on the current AE mode and state; see
+ * {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture state transition
+ * details.</p>
+ * <p>On LEGACY-level devices, the precapture trigger is not supported;
+ * capturing a high-resolution JPEG image will automatically trigger a
+ * precapture sequence before the high-resolution capture, including
+ * potentially firing a pre-capture flash.</p>
+ * <p>Using the precapture trigger and the auto-focus trigger {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}
+ * simultaneously is allowed. However, since these triggers often require cooperation between
+ * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ * focus sweep), the camera device may delay acting on a later trigger until the previous
+ * trigger has been fully handled. This may lead to longer intervals between the trigger and
+ * changes to {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} indicating the start of the precapture sequence, for
+ * example.</p>
+ * <p>If both the precapture and the auto-focus trigger are activated on the same request, then
+ * the camera device will complete them in the optimal order for that device.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE IDLE}</li>
+ * <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_START START}</li>
+ * <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL CANCEL}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ * @see CaptureResult#CONTROL_AE_STATE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE
+ * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START
+ * @see #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_PRECAPTURE_TRIGGER =
+ new Key<Integer>("android.control.aePrecaptureTrigger", int.class);
+
+ /**
+ * <p>Whether auto-focus (AF) is currently enabled, and what
+ * mode it is set to.</p>
+ * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO and the lens is not fixed focus
+ * (i.e. <code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} &gt; 0</code>). Also note that
+ * when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, the behavior of AF is device
+ * dependent. It is recommended to lock AF by using {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger} before
+ * setting {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} to OFF, or set AF mode to OFF when AE is OFF.</p>
+ * <p>If the lens is controlled by the camera device auto-focus algorithm,
+ * the camera device will report the current AF status in {@link CaptureResult#CONTROL_AF_STATE android.control.afState}
+ * in result metadata.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AF_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_AF_MODE_AUTO AUTO}</li>
+ * <li>{@link #CONTROL_AF_MODE_MACRO MACRO}</li>
+ * <li>{@link #CONTROL_AF_MODE_CONTINUOUS_VIDEO CONTINUOUS_VIDEO}</li>
+ * <li>{@link #CONTROL_AF_MODE_CONTINUOUS_PICTURE CONTINUOUS_PICTURE}</li>
+ * <li>{@link #CONTROL_AF_MODE_EDOF EDOF}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES android.control.afAvailableModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES
+ * @see CaptureResult#CONTROL_AF_STATE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ * @see #CONTROL_AF_MODE_OFF
+ * @see #CONTROL_AF_MODE_AUTO
+ * @see #CONTROL_AF_MODE_MACRO
+ * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO
+ * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE
+ * @see #CONTROL_AF_MODE_EDOF
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AF_MODE =
+ new Key<Integer>("android.control.afMode", int.class);
+
+ /**
+ * <p>List of metering areas to use for auto-focus.</p>
+ * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf} is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of focus areas supported by the device is determined by the value
+ * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf}.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other metering regions, so if only one region
+ * is used, all non-zero weights will have the same effect. A region with 0 weight is
+ * ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Range of valid values:</b><br>
+ * Coordinates must be between <code>[(0,0), (width, height))</code> of
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AF
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AF_REGIONS =
+ new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.afRegions", android.hardware.camera2.params.MeteringRectangle[].class);
+
+ /**
+ * <p>Whether the camera device will trigger autofocus for this request.</p>
+ * <p>This entry is normally set to IDLE, or is not
+ * included at all in the request settings.</p>
+ * <p>When included and set to START, the camera device will trigger the
+ * autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p>
+ * <p>When set to CANCEL, the camera device will cancel any active trigger,
+ * and return to its initial AF state.</p>
+ * <p>Generally, applications should set this entry to START or CANCEL for only a
+ * single capture, and then return it to IDLE (or not set at all). Specifying
+ * START for multiple captures in a row means restarting the AF operation over
+ * and over again.</p>
+ * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what the trigger means for each AF mode.</p>
+ * <p>Using the autofocus trigger and the precapture trigger {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}
+ * simultaneously is allowed. However, since these triggers often require cooperation between
+ * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ * focus sweep), the camera device may delay acting on a later trigger until the previous
+ * trigger has been fully handled. This may lead to longer intervals between the trigger and
+ * changes to {@link CaptureResult#CONTROL_AF_STATE android.control.afState}, for example.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AF_TRIGGER_IDLE IDLE}</li>
+ * <li>{@link #CONTROL_AF_TRIGGER_START START}</li>
+ * <li>{@link #CONTROL_AF_TRIGGER_CANCEL CANCEL}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureResult#CONTROL_AF_STATE
+ * @see #CONTROL_AF_TRIGGER_IDLE
+ * @see #CONTROL_AF_TRIGGER_START
+ * @see #CONTROL_AF_TRIGGER_CANCEL
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AF_TRIGGER =
+ new Key<Integer>("android.control.afTrigger", int.class);
+
+ /**
+ * <p>Whether auto-white balance (AWB) is currently locked to its
+ * latest calculated values.</p>
+ * <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
+ * and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
+ * <p>Since the camera device has a pipeline of in-flight requests, the settings that
+ * get locked do not necessarily correspond to the settings that were present in the
+ * latest capture result received from the camera device, since additional captures
+ * and AWB updates may have occurred even before the result was sent out. If an
+ * application is switching between automatic and manual control and wishes to eliminate
+ * any flicker during the switch, the following procedure is recommended:</p>
+ * <ol>
+ * <li>Starting in auto-AWB mode:</li>
+ * <li>Lock AWB</li>
+ * <li>Wait for the first result to be output that has the AWB locked</li>
+ * <li>Copy AWB settings from that result into a request, set the request to manual AWB</li>
+ * <li>Submit the capture request, proceed to run manual AWB as desired.</li>
+ * </ol>
+ * <p>Note that AWB lock is only meaningful when
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is in the AUTO mode; in other modes,
+ * AWB is already fixed to a specific setting.</p>
+ * <p>Some LEGACY devices may not support ON; the value is then overridden to OFF.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ @PublicKey
+ public static final Key<Boolean> CONTROL_AWB_LOCK =
+ new Key<Boolean>("android.control.awbLock", boolean.class);
+
+ /**
+ * <p>Whether auto-white balance (AWB) is currently setting the color
+ * transform fields, and what its illumination target
+ * is.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p>
+ * <p>When set to the ON mode, the camera device's auto-white balance
+ * routine is enabled, overriding the application's selected
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}. Note that when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}
+ * is OFF, the behavior of AWB is device dependent. It is recommened to
+ * also set AWB mode to OFF or lock AWB by using {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} before
+ * setting AE mode to OFF.</p>
+ * <p>When set to the OFF mode, the camera device's auto-white balance
+ * routine is disabled. The application manually controls the white
+ * balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
+ * and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
+ * <p>When set to any other modes, the camera device's auto-white
+ * balance routine is disabled. The camera device uses each
+ * particular illumination target for white balance
+ * adjustment. The application's values for
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform},
+ * {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} are ignored.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AWB_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_AWB_MODE_AUTO AUTO}</li>
+ * <li>{@link #CONTROL_AWB_MODE_INCANDESCENT INCANDESCENT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_FLUORESCENT FLUORESCENT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_WARM_FLUORESCENT WARM_FLUORESCENT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_DAYLIGHT DAYLIGHT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT CLOUDY_DAYLIGHT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_TWILIGHT TWILIGHT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_SHADE SHADE}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES android.control.awbAvailableModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES
+ * @see CaptureRequest#CONTROL_AWB_LOCK
+ * @see CaptureRequest#CONTROL_MODE
+ * @see #CONTROL_AWB_MODE_OFF
+ * @see #CONTROL_AWB_MODE_AUTO
+ * @see #CONTROL_AWB_MODE_INCANDESCENT
+ * @see #CONTROL_AWB_MODE_FLUORESCENT
+ * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT
+ * @see #CONTROL_AWB_MODE_DAYLIGHT
+ * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT
+ * @see #CONTROL_AWB_MODE_TWILIGHT
+ * @see #CONTROL_AWB_MODE_SHADE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AWB_MODE =
+ new Key<Integer>("android.control.awbMode", int.class);
+
+ /**
+ * <p>List of metering areas to use for auto-white-balance illuminant
+ * estimation.</p>
+ * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb} is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of regions supported by the device is determined by the value
+ * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb}.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must range from 0 to 1000, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other white balance metering regions, so if
+ * only one region is used, all non-zero weights will have the same effect. A region with
+ * 0 weight is ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Range of valid values:</b><br>
+ * Coordinates must be between <code>[(0,0), (width, height))</code> of
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AWB
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AWB_REGIONS =
+ new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.awbRegions", android.hardware.camera2.params.MeteringRectangle[].class);
+
+ /**
+ * <p>Information to the camera device 3A (auto-exposure,
+ * auto-focus, auto-white balance) routines about the purpose
+ * of this capture, to help the camera device to decide optimal 3A
+ * strategy.</p>
+ * <p>This control (except for MANUAL) is only effective if
+ * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> and any 3A routine is active.</p>
+ * <p>ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
+ * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
+ * {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR. Other intent values are
+ * always supported.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_CUSTOM CUSTOM}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_PREVIEW PREVIEW}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_STILL_CAPTURE STILL_CAPTURE}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_VIDEO_RECORD VIDEO_RECORD}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT VIDEO_SNAPSHOT}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_MANUAL MANUAL}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see #CONTROL_CAPTURE_INTENT_CUSTOM
+ * @see #CONTROL_CAPTURE_INTENT_PREVIEW
+ * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE
+ * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD
+ * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT
+ * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG
+ * @see #CONTROL_CAPTURE_INTENT_MANUAL
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_CAPTURE_INTENT =
+ new Key<Integer>("android.control.captureIntent", int.class);
+
+ /**
+ * <p>A special color effect to apply.</p>
+ * <p>When this mode is set, a color effect will be applied
+ * to images produced by the camera device. The interpretation
+ * and implementation of these color effects is left to the
+ * implementor of the camera device, and should not be
+ * depended on to be consistent (or present) across all
+ * devices.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_EFFECT_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_MONO MONO}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_NEGATIVE NEGATIVE}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_SOLARIZE SOLARIZE}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_SEPIA SEPIA}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_POSTERIZE POSTERIZE}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_WHITEBOARD WHITEBOARD}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_BLACKBOARD BLACKBOARD}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_AQUA AQUA}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS android.control.availableEffects}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS
+ * @see #CONTROL_EFFECT_MODE_OFF
+ * @see #CONTROL_EFFECT_MODE_MONO
+ * @see #CONTROL_EFFECT_MODE_NEGATIVE
+ * @see #CONTROL_EFFECT_MODE_SOLARIZE
+ * @see #CONTROL_EFFECT_MODE_SEPIA
+ * @see #CONTROL_EFFECT_MODE_POSTERIZE
+ * @see #CONTROL_EFFECT_MODE_WHITEBOARD
+ * @see #CONTROL_EFFECT_MODE_BLACKBOARD
+ * @see #CONTROL_EFFECT_MODE_AQUA
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_EFFECT_MODE =
+ new Key<Integer>("android.control.effectMode", int.class);
+
+ /**
+ * <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
+ * routines.</p>
+ * <p>This is a top-level 3A control switch. When set to OFF, all 3A control
+ * by the camera device is disabled. The application must set the fields for
+ * capture parameters itself.</p>
+ * <p>When set to AUTO, the individual algorithm controls in
+ * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p>
+ * <p>When set to USE_SCENE_MODE, the individual controls in
+ * android.control.* are mostly disabled, and the camera device implements
+ * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
+ * as it wishes. The camera device scene mode 3A settings are provided by
+ * {@link android.hardware.camera2.CaptureResult capture results}.</p>
+ * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
+ * is that this frame will not be used by camera device background 3A statistics
+ * update, as if this frame is never captured. This mode can be used in the scenario
+ * where the application doesn't want a 3A manual control capture to affect
+ * the subsequent auto 3A capture results.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_MODE_AUTO AUTO}</li>
+ * <li>{@link #CONTROL_MODE_USE_SCENE_MODE USE_SCENE_MODE}</li>
+ * <li>{@link #CONTROL_MODE_OFF_KEEP_STATE OFF_KEEP_STATE}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_MODES android.control.availableModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_MODES
+ * @see #CONTROL_MODE_OFF
+ * @see #CONTROL_MODE_AUTO
+ * @see #CONTROL_MODE_USE_SCENE_MODE
+ * @see #CONTROL_MODE_OFF_KEEP_STATE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_MODE =
+ new Key<Integer>("android.control.mode", int.class);
+
+ /**
+ * <p>Control for which scene mode is currently active.</p>
+ * <p>Scene modes are custom camera modes optimized for a certain set of conditions and
+ * capture settings.</p>
+ * <p>This is the mode that that is active when
+ * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY, these modes will
+ * disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}
+ * while in use.</p>
+ * <p>The interpretation and implementation of these scene modes is left
+ * to the implementor of the camera device. Their behavior will not be
+ * consistent across all devices, and any given device may only implement
+ * a subset of these modes.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_SCENE_MODE_DISABLED DISABLED}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_FACE_PRIORITY FACE_PRIORITY}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_ACTION ACTION}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_PORTRAIT PORTRAIT}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_LANDSCAPE LANDSCAPE}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_NIGHT NIGHT}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_NIGHT_PORTRAIT NIGHT_PORTRAIT}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_THEATRE THEATRE}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_BEACH BEACH}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_SNOW SNOW}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_SUNSET SUNSET}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_STEADYPHOTO STEADYPHOTO}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_FIREWORKS FIREWORKS}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_SPORTS SPORTS}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_PARTY PARTY}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_CANDLELIGHT CANDLELIGHT}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_BARCODE BARCODE}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_HDR HDR}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see #CONTROL_SCENE_MODE_DISABLED
+ * @see #CONTROL_SCENE_MODE_FACE_PRIORITY
+ * @see #CONTROL_SCENE_MODE_ACTION
+ * @see #CONTROL_SCENE_MODE_PORTRAIT
+ * @see #CONTROL_SCENE_MODE_LANDSCAPE
+ * @see #CONTROL_SCENE_MODE_NIGHT
+ * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT
+ * @see #CONTROL_SCENE_MODE_THEATRE
+ * @see #CONTROL_SCENE_MODE_BEACH
+ * @see #CONTROL_SCENE_MODE_SNOW
+ * @see #CONTROL_SCENE_MODE_SUNSET
+ * @see #CONTROL_SCENE_MODE_STEADYPHOTO
+ * @see #CONTROL_SCENE_MODE_FIREWORKS
+ * @see #CONTROL_SCENE_MODE_SPORTS
+ * @see #CONTROL_SCENE_MODE_PARTY
+ * @see #CONTROL_SCENE_MODE_CANDLELIGHT
+ * @see #CONTROL_SCENE_MODE_BARCODE
+ * @see #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO
+ * @see #CONTROL_SCENE_MODE_HDR
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_SCENE_MODE =
+ new Key<Integer>("android.control.sceneMode", int.class);
+
+ /**
+ * <p>Whether video stabilization is
+ * active.</p>
+ * <p>Video stabilization automatically warps images from
+ * the camera in order to stabilize motion between consecutive frames.</p>
+ * <p>If enabled, video stabilization can modify the
+ * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream stabilized.</p>
+ * <p>Switching between different video stabilization modes may take several
+ * frames to initialize, the camera device will report the current mode
+ * in capture result metadata. For example, When "ON" mode is requested,
+ * the video stabilization modes in the first several capture results may
+ * still be "OFF", and it will become "ON" when the initialization is
+ * done.</p>
+ * <p>In addition, not all recording sizes or frame rates may be supported for
+ * stabilization by a device that reports stabilization support. It is guaranteed
+ * that an output targeting a MediaRecorder or MediaCodec will be stabilized if
+ * the recording resolution is less than or equal to 1920 x 1080 (width less than
+ * or equal to 1920, height less than or equal to 1080), and the recording
+ * frame rate is less than or equal to 30fps. At other sizes, the CaptureResult
+ * {@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode} field will return
+ * OFF if the recording output is not stabilized, or if there are no output
+ * Surface types that can be stabilized.</p>
+ * <p>If a camera device supports both this mode and OIS
+ * ({@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}), turning both modes on may
+ * produce undesirable interaction, so it is recommended not to enable
+ * both at the same time.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_VIDEO_STABILIZATION_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_VIDEO_STABILIZATION_MODE_ON ON}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see #CONTROL_VIDEO_STABILIZATION_MODE_OFF
+ * @see #CONTROL_VIDEO_STABILIZATION_MODE_ON
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_VIDEO_STABILIZATION_MODE =
+ new Key<Integer>("android.control.videoStabilizationMode", int.class);
+
+ /**
+ * <p>The amount of additional sensitivity boost applied to output images
+ * after RAW sensor data is captured.</p>
+ * <p>Some camera devices support additional digital sensitivity boosting in the
+ * camera processing pipeline after sensor RAW image is captured.
+ * Such a boost will be applied to YUV/JPEG format output images but will not
+ * have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.</p>
+ * <p>This key will be <code>null</code> for devices that do not support any RAW format
+ * outputs. For devices that do support RAW format outputs, this key will always
+ * present, and if a device does not support post RAW sensitivity boost, it will
+ * list <code>100</code> in this key.</p>
+ * <p>If the camera device cannot apply the exact boost requested, it will reduce the
+ * boost to the nearest supported value.
+ * The final boost value used will be available in the output capture result.</p>
+ * <p>For devices that support post RAW sensitivity boost, the YUV/JPEG output images
+ * of such device will have the total sensitivity of
+ * <code>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} * {@link CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST android.control.postRawSensitivityBoost} / 100</code>
+ * The sensitivity of RAW format images will always be <code>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</code></p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><b>Units</b>: ISO arithmetic units, the same as {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE android.control.postRawSensitivityBoostRange}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST
+ * @see CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_POST_RAW_SENSITIVITY_BOOST =
+ new Key<Integer>("android.control.postRawSensitivityBoost", int.class);
+
+ /**
+ * <p>Allow camera device to enable zero-shutter-lag mode for requests with
+ * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE.</p>
+ * <p>If enableZsl is <code>true</code>, the camera device may enable zero-shutter-lag mode for requests with
+ * STILL_CAPTURE capture intent. The camera device may use images captured in the past to
+ * produce output images for a zero-shutter-lag request. The result metadata including the
+ * {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} reflects the source frames used to produce output images.
+ * Therefore, the contents of the output images and the result metadata may be out of order
+ * compared to previous regular requests. enableZsl does not affect requests with other
+ * capture intents.</p>
+ * <p>For example, when requests are submitted in the following order:
+ * Request A: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is PREVIEW
+ * Request B: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is STILL_CAPTURE</p>
+ * <p>The output images for request B may have contents captured before the output images for
+ * request A, and the result metadata for request B may be older than the result metadata for
+ * request A.</p>
+ * <p>Note that when enableZsl is <code>true</code>, it is not guaranteed to get output images captured in
+ * the past for requests with STILL_CAPTURE capture intent.</p>
+ * <p>For applications targeting SDK versions O and newer, the value of enableZsl in
+ * TEMPLATE_STILL_CAPTURE template may be <code>true</code>. The value in other templates is always
+ * <code>false</code> if present.</p>
+ * <p>For applications targeting SDK versions older than O, the value of enableZsl in all
+ * capture templates is always <code>false</code> if present.</p>
+ * <p>For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ * @see CaptureResult#SENSOR_TIMESTAMP
+ */
+ @PublicKey
+ public static final Key<Boolean> CONTROL_ENABLE_ZSL =
+ new Key<Boolean>("android.control.enableZsl", boolean.class);
+
+ /**
+ * <p>Operation mode for edge
+ * enhancement.</p>
+ * <p>Edge enhancement improves sharpness and details in the captured image. OFF means
+ * no enhancement will be applied by the camera device.</p>
+ * <p>FAST/HIGH_QUALITY both mean camera device determined enhancement
+ * will be applied. HIGH_QUALITY mode indicates that the
+ * camera device will use the highest-quality enhancement algorithms,
+ * even if it slows down capture rate. FAST means the camera device will
+ * not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
+ * edge enhancement will slow down capture rate. Every output stream will have a similar
+ * amount of enhancement applied.</p>
+ * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ * buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ * into a final capture when triggered by the user. In this mode, the camera device applies
+ * edge enhancement to low-resolution streams (below maximum recording resolution) to
+ * maximize preview quality, but does not apply edge enhancement to high-resolution streams,
+ * since those will be reprocessed later if necessary.</p>
+ * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
+ * device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
+ * The camera device may adjust its internal edge enhancement parameters for best
+ * image quality based on the {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor}, if it is set.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #EDGE_MODE_OFF OFF}</li>
+ * <li>{@link #EDGE_MODE_FAST FAST}</li>
+ * <li>{@link #EDGE_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * <li>{@link #EDGE_MODE_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
+ * @see #EDGE_MODE_OFF
+ * @see #EDGE_MODE_FAST
+ * @see #EDGE_MODE_HIGH_QUALITY
+ * @see #EDGE_MODE_ZERO_SHUTTER_LAG
+ */
+ @PublicKey
+ public static final Key<Integer> EDGE_MODE =
+ new Key<Integer>("android.edge.mode", int.class);
+
+ /**
+ * <p>The desired mode for for the camera device's flash control.</p>
+ * <p>This control is only effective when flash unit is available
+ * (<code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == true</code>).</p>
+ * <p>When this control is used, the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} must be set to ON or OFF.
+ * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
+ * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.</p>
+ * <p>When set to OFF, the camera device will not fire flash for this capture.</p>
+ * <p>When set to SINGLE, the camera device will fire flash regardless of the camera
+ * device's auto-exposure routine's result. When used in still capture case, this
+ * control should be used along with auto-exposure (AE) precapture metering sequence
+ * ({@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}), otherwise, the image may be incorrectly exposed.</p>
+ * <p>When set to TORCH, the flash will be on continuously. This mode can be used
+ * for use cases such as preview, auto-focus assist, still capture, or video recording.</p>
+ * <p>The flash status will be reported by {@link CaptureResult#FLASH_STATE android.flash.state} in the capture result metadata.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #FLASH_MODE_OFF OFF}</li>
+ * <li>{@link #FLASH_MODE_SINGLE SINGLE}</li>
+ * <li>{@link #FLASH_MODE_TORCH TORCH}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
+ * @see CaptureResult#FLASH_STATE
+ * @see #FLASH_MODE_OFF
+ * @see #FLASH_MODE_SINGLE
+ * @see #FLASH_MODE_TORCH
+ */
+ @PublicKey
+ public static final Key<Integer> FLASH_MODE =
+ new Key<Integer>("android.flash.mode", int.class);
+
+ /**
+ * <p>Operational mode for hot pixel correction.</p>
+ * <p>Hotpixel correction interpolates out, or otherwise removes, pixels
+ * that do not accurately measure the incoming light (i.e. pixels that
+ * are stuck at an arbitrary value or are oversensitive).</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #HOT_PIXEL_MODE_OFF OFF}</li>
+ * <li>{@link #HOT_PIXEL_MODE_FAST FAST}</li>
+ * <li>{@link #HOT_PIXEL_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES android.hotPixel.availableHotPixelModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES
+ * @see #HOT_PIXEL_MODE_OFF
+ * @see #HOT_PIXEL_MODE_FAST
+ * @see #HOT_PIXEL_MODE_HIGH_QUALITY
+ */
+ @PublicKey
+ public static final Key<Integer> HOT_PIXEL_MODE =
+ new Key<Integer>("android.hotPixel.mode", int.class);
+
+ /**
+ * <p>A location object to use when generating image GPS metadata.</p>
+ * <p>Setting a location object in a request will include the GPS coordinates of the location
+ * into any JPEG images captured based on the request. These coordinates can then be
+ * viewed by anyone who receives the JPEG image.</p>
+ * <p>This key is available on all devices.</p>
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<android.location.Location> JPEG_GPS_LOCATION =
+ new Key<android.location.Location>("android.jpeg.gpsLocation", android.location.Location.class);
+
+ /**
+ * <p>GPS coordinates to include in output JPEG
+ * EXIF.</p>
+ * <p><b>Range of valid values:</b><br>
+ * (-180 - 180], [-90,90], [-inf, inf]</p>
+ * <p>This key is available on all devices.</p>
+ * @hide
+ */
+ public static final Key<double[]> JPEG_GPS_COORDINATES =
+ new Key<double[]>("android.jpeg.gpsCoordinates", double[].class);
+
+ /**
+ * <p>32 characters describing GPS algorithm to
+ * include in EXIF.</p>
+ * <p><b>Units</b>: UTF-8 null-terminated string</p>
+ * <p>This key is available on all devices.</p>
+ * @hide
+ */
+ public static final Key<String> JPEG_GPS_PROCESSING_METHOD =
+ new Key<String>("android.jpeg.gpsProcessingMethod", String.class);
+
+ /**
+ * <p>Time GPS fix was made to include in
+ * EXIF.</p>
+ * <p><b>Units</b>: UTC in seconds since January 1, 1970</p>
+ * <p>This key is available on all devices.</p>
+ * @hide
+ */
+ public static final Key<Long> JPEG_GPS_TIMESTAMP =
+ new Key<Long>("android.jpeg.gpsTimestamp", long.class);
+
+ /**
+ * <p>The orientation for a JPEG image.</p>
+ * <p>The clockwise rotation angle in degrees, relative to the orientation
+ * to the camera, that the JPEG picture needs to be rotated by, to be viewed
+ * upright.</p>
+ * <p>Camera devices may either encode this value into the JPEG EXIF header, or
+ * rotate the image data to match this orientation. When the image data is rotated,
+ * the thumbnail data will also be rotated.</p>
+ * <p>Note that this orientation is relative to the orientation of the camera sensor, given
+ * by {@link CameraCharacteristics#SENSOR_ORIENTATION android.sensor.orientation}.</p>
+ * <p>To translate from the device orientation given by the Android sensor APIs, the following
+ * sample code may be used:</p>
+ * <pre><code>private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
+ * if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
+ * int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ *
+ * // Round device orientation to a multiple of 90
+ * deviceOrientation = (deviceOrientation + 45) / 90 * 90;
+ *
+ * // Reverse device orientation for front-facing cameras
+ * boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
+ * if (facingFront) deviceOrientation = -deviceOrientation;
+ *
+ * // Calculate desired JPEG orientation relative to camera orientation to make
+ * // the image upright relative to the device orientation
+ * int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
+ *
+ * return jpegOrientation;
+ * }
+ * </code></pre>
+ * <p><b>Units</b>: Degrees in multiples of 90</p>
+ * <p><b>Range of valid values:</b><br>
+ * 0, 90, 180, 270</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_ORIENTATION
+ */
+ @PublicKey
+ public static final Key<Integer> JPEG_ORIENTATION =
+ new Key<Integer>("android.jpeg.orientation", int.class);
+
+ /**
+ * <p>Compression quality of the final JPEG
+ * image.</p>
+ * <p>85-95 is typical usage range.</p>
+ * <p><b>Range of valid values:</b><br>
+ * 1-100; larger is higher quality</p>
+ * <p>This key is available on all devices.</p>
+ */
+ @PublicKey
+ public static final Key<Byte> JPEG_QUALITY =
+ new Key<Byte>("android.jpeg.quality", byte.class);
+
+ /**
+ * <p>Compression quality of JPEG
+ * thumbnail.</p>
+ * <p><b>Range of valid values:</b><br>
+ * 1-100; larger is higher quality</p>
+ * <p>This key is available on all devices.</p>
+ */
+ @PublicKey
+ public static final Key<Byte> JPEG_THUMBNAIL_QUALITY =
+ new Key<Byte>("android.jpeg.thumbnailQuality", byte.class);
+
+ /**
+ * <p>Resolution of embedded JPEG thumbnail.</p>
+ * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
+ * but the captured JPEG will still be a valid image.</p>
+ * <p>For best results, when issuing a request for a JPEG image, the thumbnail size selected
+ * should have the same aspect ratio as the main JPEG output.</p>
+ * <p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
+ * ratio, the camera device creates the thumbnail by cropping it from the primary image.
+ * For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
+ * 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
+ * generate the thumbnail image. The thumbnail image will always have a smaller Field
+ * Of View (FOV) than the primary image when aspect ratios differ.</p>
+ * <p>When an {@link CaptureRequest#JPEG_ORIENTATION android.jpeg.orientation} of non-zero degree is requested,
+ * the camera device will handle thumbnail rotation in one of the following ways:</p>
+ * <ul>
+ * <li>Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
+ * and keep jpeg and thumbnail image data unrotated.</li>
+ * <li>Rotate the jpeg and thumbnail image data and not set
+ * {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
+ * case, LIMITED or FULL hardware level devices will report rotated thumnail size in
+ * capture result, so the width and height will be interchanged if 90 or 270 degree
+ * orientation is requested. LEGACY device will always report unrotated thumbnail
+ * size.</li>
+ * </ul>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES android.jpeg.availableThumbnailSizes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES
+ * @see CaptureRequest#JPEG_ORIENTATION
+ */
+ @PublicKey
+ public static final Key<android.util.Size> JPEG_THUMBNAIL_SIZE =
+ new Key<android.util.Size>("android.jpeg.thumbnailSize", android.util.Size.class);
+
+ /**
+ * <p>The desired lens aperture size, as a ratio of lens focal length to the
+ * effective aperture diameter.</p>
+ * <p>Setting this value is only supported on the camera devices that have a variable
+ * aperture lens.</p>
+ * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF,
+ * this can be set along with {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
+ * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
+ * to achieve manual exposure control.</p>
+ * <p>The requested aperture value may take several frames to reach the
+ * requested value; the camera device will report the current (intermediate)
+ * aperture size in capture result metadata while the aperture is changing.
+ * While the aperture is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
+ * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of
+ * the ON modes, this will be overridden by the camera device
+ * auto-exposure algorithm, the overridden values are then provided
+ * back to the user in the corresponding result.</p>
+ * <p><b>Units</b>: The f-number (f/N)</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES
+ * @see CaptureResult#LENS_STATE
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Float> LENS_APERTURE =
+ new Key<Float>("android.lens.aperture", float.class);
+
+ /**
+ * <p>The desired setting for the lens neutral density filter(s).</p>
+ * <p>This control will not be supported on most camera devices.</p>
+ * <p>Lens filters are typically used to lower the amount of light the
+ * sensor is exposed to (measured in steps of EV). As used here, an EV
+ * step is the standard logarithmic representation, which are
+ * non-negative, and inversely proportional to the amount of light
+ * hitting the sensor. For example, setting this to 0 would result
+ * in no reduction of the incoming light, and setting this to 2 would
+ * mean that the filter is set to reduce incoming light by two stops
+ * (allowing 1/4 of the prior amount of light to the sensor).</p>
+ * <p>It may take several frames before the lens filter density changes
+ * to the requested value. While the filter density is still changing,
+ * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
+ * <p><b>Units</b>: Exposure Value (EV)</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES
+ * @see CaptureResult#LENS_STATE
+ */
+ @PublicKey
+ public static final Key<Float> LENS_FILTER_DENSITY =
+ new Key<Float>("android.lens.filterDensity", float.class);
+
+ /**
+ * <p>The desired lens focal length; used for optical zoom.</p>
+ * <p>This setting controls the physical focal length of the camera
+ * device's lens. Changing the focal length changes the field of
+ * view of the camera device, and is usually used for optical zoom.</p>
+ * <p>Like {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, this
+ * setting won't be applied instantaneously, and it may take several
+ * frames before the lens can change to the requested focal length.
+ * While the focal length is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will
+ * be set to MOVING.</p>
+ * <p>Optical zoom will not be supported on most devices.</p>
+ * <p><b>Units</b>: Millimeters</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS android.lens.info.availableFocalLengths}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#LENS_APERTURE
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS
+ * @see CaptureResult#LENS_STATE
+ */
+ @PublicKey
+ public static final Key<Float> LENS_FOCAL_LENGTH =
+ new Key<Float>("android.lens.focalLength", float.class);
+
+ /**
+ * <p>Desired distance to plane of sharpest focus,
+ * measured from frontmost surface of the lens.</p>
+ * <p>This control can be used for setting manual focus, on devices that support
+ * the MANUAL_SENSOR capability and have a variable-focus lens (see
+ * {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}).</p>
+ * <p>A value of <code>0.0f</code> means infinity focus. The value set will be clamped to
+ * <code>[0.0f, {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}]</code>.</p>
+ * <p>Like {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, this setting won't be applied
+ * instantaneously, and it may take several frames before the lens
+ * can move to the requested focus distance. While the lens is still moving,
+ * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
+ * <p>LEGACY devices support at most setting this to <code>0.0f</code>
+ * for infinity focus.</p>
+ * <p><b>Units</b>: See {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#LENS_FOCAL_LENGTH
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ * @see CaptureResult#LENS_STATE
+ */
+ @PublicKey
+ public static final Key<Float> LENS_FOCUS_DISTANCE =
+ new Key<Float>("android.lens.focusDistance", float.class);
+
+ /**
+ * <p>Sets whether the camera device uses optical image stabilization (OIS)
+ * when capturing images.</p>
+ * <p>OIS is used to compensate for motion blur due to small
+ * movements of the camera during capture. Unlike digital image
+ * stabilization ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), OIS
+ * makes use of mechanical elements to stabilize the camera
+ * sensor, and thus allows for longer exposure times before
+ * camera shake becomes apparent.</p>
+ * <p>Switching between different optical stabilization modes may take several
+ * frames to initialize, the camera device will report the current mode in
+ * capture result metadata. For example, When "ON" mode is requested, the
+ * optical stabilization modes in the first several capture results may still
+ * be "OFF", and it will become "ON" when the initialization is done.</p>
+ * <p>If a camera device supports both OIS and digital image stabilization
+ * ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), turning both modes on may produce undesirable
+ * interaction, so it is recommended not to enable both at the same time.</p>
+ * <p>Not all devices will support OIS; see
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization} for
+ * available controls.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #LENS_OPTICAL_STABILIZATION_MODE_OFF OFF}</li>
+ * <li>{@link #LENS_OPTICAL_STABILIZATION_MODE_ON ON}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
+ * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF
+ * @see #LENS_OPTICAL_STABILIZATION_MODE_ON
+ */
+ @PublicKey
+ public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE =
+ new Key<Integer>("android.lens.opticalStabilizationMode", int.class);
+
+ /**
+ * <p>Mode of operation for the noise reduction algorithm.</p>
+ * <p>The noise reduction algorithm attempts to improve image quality by removing
+ * excessive noise added by the capture process, especially in dark conditions.</p>
+ * <p>OFF means no noise reduction will be applied by the camera device, for both raw and
+ * YUV domain.</p>
+ * <p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
+ * demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
+ * This mode is optional, may not be support by all devices. The application should check
+ * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} before using it.</p>
+ * <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
+ * will be applied. HIGH_QUALITY mode indicates that the camera device
+ * will use the highest-quality noise filtering algorithms,
+ * even if it slows down capture rate. FAST means the camera device will not
+ * slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
+ * MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
+ * Every output stream will have a similar amount of enhancement applied.</p>
+ * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ * buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ * into a final capture when triggered by the user. In this mode, the camera device applies
+ * noise reduction to low-resolution streams (below maximum recording resolution) to maximize
+ * preview quality, but does not apply noise reduction to high-resolution streams, since
+ * those will be reprocessed later if necessary.</p>
+ * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
+ * will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
+ * may adjust the noise reduction parameters for best image quality based on the
+ * {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor} if it is set.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #NOISE_REDUCTION_MODE_OFF OFF}</li>
+ * <li>{@link #NOISE_REDUCTION_MODE_FAST FAST}</li>
+ * <li>{@link #NOISE_REDUCTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * <li>{@link #NOISE_REDUCTION_MODE_MINIMAL MINIMAL}</li>
+ * <li>{@link #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
+ * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
+ * @see #NOISE_REDUCTION_MODE_OFF
+ * @see #NOISE_REDUCTION_MODE_FAST
+ * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY
+ * @see #NOISE_REDUCTION_MODE_MINIMAL
+ * @see #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG
+ */
+ @PublicKey
+ public static final Key<Integer> NOISE_REDUCTION_MODE =
+ new Key<Integer>("android.noiseReduction.mode", int.class);
+
+ /**
+ * <p>An application-specified ID for the current
+ * request. Must be maintained unchanged in output
+ * frame</p>
+ * <p><b>Units</b>: arbitrary integer assigned by application</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any int</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @hide
+ */
+ public static final Key<Integer> REQUEST_ID =
+ new Key<Integer>("android.request.id", int.class);
+
+ /**
+ * <p>The desired region of the sensor to read out for this capture.</p>
+ * <p>This control can be used to implement digital zoom.</p>
+ * <p>The crop region coordinate system is based off
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with <code>(0, 0)</code> being the
+ * top-left corner of the sensor active array.</p>
+ * <p>Output streams use this rectangle to produce their output,
+ * cropping to a smaller region if necessary to maintain the
+ * stream's aspect ratio, then scaling the sensor input to
+ * match the output's configured resolution.</p>
+ * <p>The crop region is applied after the RAW to other color
+ * space (e.g. YUV) conversion. Since raw streams
+ * (e.g. RAW16) don't have the conversion stage, they are not
+ * croppable. The crop region will be ignored by raw streams.</p>
+ * <p>For non-raw streams, any additional per-stream cropping will
+ * be done to maximize the final pixel area of the stream.</p>
+ * <p>For example, if the crop region is set to a 4:3 aspect
+ * ratio, then 4:3 streams will use the exact crop
+ * region. 16:9 streams will further crop vertically
+ * (letterbox).</p>
+ * <p>Conversely, if the crop region is set to a 16:9, then 4:3
+ * outputs will crop horizontally (pillarbox), and 16:9
+ * streams will match exactly. These additional crops will
+ * be centered within the crop region.</p>
+ * <p>The width and height of the crop region cannot
+ * be set to be smaller than
+ * <code>floor( activeArraySize.width / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )</code> and
+ * <code>floor( activeArraySize.height / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )</code>, respectively.</p>
+ * <p>The camera device may adjust the crop region to account
+ * for rounding and other hardware requirements; the final
+ * crop region used will be included in the output capture
+ * result.</p>
+ * <p><b>Units</b>: Pixel coordinates relative to
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.graphics.Rect> SCALER_CROP_REGION =
+ new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class);
+
+ /**
+ * <p>Duration each pixel is exposed to
+ * light.</p>
+ * <p>If the sensor can't expose this exact duration, it will shorten the
+ * duration exposed to the nearest possible value (rather than expose longer).
+ * The final exposure time used will be available in the output capture result.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE
+ */
+ @PublicKey
+ public static final Key<Long> SENSOR_EXPOSURE_TIME =
+ new Key<Long>("android.sensor.exposureTime", long.class);
+
+ /**
+ * <p>Duration from start of frame exposure to
+ * start of next frame exposure.</p>
+ * <p>The maximum frame rate that can be supported by a camera subsystem is
+ * a function of many factors:</p>
+ * <ul>
+ * <li>Requested resolutions of output image streams</li>
+ * <li>Availability of binning / skipping modes on the imager</li>
+ * <li>The bandwidth of the imager interface</li>
+ * <li>The bandwidth of the various ISP processing blocks</li>
+ * </ul>
+ * <p>Since these factors can vary greatly between different ISPs and
+ * sensors, the camera abstraction tries to represent the bandwidth
+ * restrictions with as simple a model as possible.</p>
+ * <p>The model presented has the following characteristics:</p>
+ * <ul>
+ * <li>The image sensor is always configured to output the smallest
+ * resolution possible given the application's requested output stream
+ * sizes. The smallest resolution is defined as being at least as large
+ * as the largest requested output stream size; the camera pipeline must
+ * never digitally upsample sensor data when the crop region covers the
+ * whole sensor. In general, this means that if only small output stream
+ * resolutions are configured, the sensor can provide a higher frame
+ * rate.</li>
+ * <li>Since any request may use any or all the currently configured
+ * output streams, the sensor and ISP must be configured to support
+ * scaling a single capture to all the streams at the same time. This
+ * means the camera pipeline must be ready to produce the largest
+ * requested output size without any delay. Therefore, the overall
+ * frame rate of a given configured stream set is governed only by the
+ * largest requested stream resolution.</li>
+ * <li>Using more than one output stream in a request does not affect the
+ * frame duration.</li>
+ * <li>Certain format-streams may need to do additional background processing
+ * before data is consumed/produced by that stream. These processors
+ * can run concurrently to the rest of the camera pipeline, but
+ * cannot process more than 1 capture at a time.</li>
+ * </ul>
+ * <p>The necessary information for the application, given the model above,
+ * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.
+ * These are used to determine the maximum frame rate / minimum frame
+ * duration that is possible for a given stream configuration.</p>
+ * <p>Specifically, the application can use the following rules to
+ * determine the minimum frame duration it can request from the camera
+ * device:</p>
+ * <ol>
+ * <li>Let the set of currently configured input/output streams
+ * be called <code>S</code>.</li>
+ * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
+ * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
+ * (with its respective size/format). Let this set of frame durations be
+ * called <code>F</code>.</li>
+ * <li>For any given request <code>R</code>, the minimum frame duration allowed
+ * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
+ * used in <code>R</code> be called <code>S_r</code>.</li>
+ * </ol>
+ * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }
+ * using its respective size/format), then the frame duration in <code>F</code>
+ * determines the steady state frame rate that the application will get
+ * if it uses <code>R</code> as a repeating request. Let this special kind of
+ * request be called <code>Rsimple</code>.</p>
+ * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+ * by a single capture of a new request <code>Rstall</code> (which has at least
+ * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+ * same minimum frame duration this will not cause a frame rate loss
+ * if all buffers from the previous <code>Rstall</code> have already been
+ * delivered.</p>
+ * <p>For more details about stalling, see
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration},
+ * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}. The duration
+ * is capped to <code>max(duration, exposureTime + overhead)</code>.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
+ */
+ @PublicKey
+ public static final Key<Long> SENSOR_FRAME_DURATION =
+ new Key<Long>("android.sensor.frameDuration", long.class);
+
+ /**
+ * <p>The amount of gain applied to sensor data
+ * before processing.</p>
+ * <p>The sensitivity is the standard ISO sensitivity value,
+ * as defined in ISO 12232:2006.</p>
+ * <p>The sensitivity must be within {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}, and
+ * if if it less than {@link CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY android.sensor.maxAnalogSensitivity}, the camera device
+ * is guaranteed to use only analog amplification for applying the gain.</p>
+ * <p>If the camera device cannot apply the exact sensitivity
+ * requested, it will reduce the gain to the nearest supported
+ * value. The final sensitivity used will be available in the
+ * output capture result.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><b>Units</b>: ISO arithmetic units</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE
+ * @see CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_SENSITIVITY =
+ new Key<Integer>("android.sensor.sensitivity", int.class);
+
+ /**
+ * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern
+ * when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.</p>
+ * <p>Each color channel is treated as an unsigned 32-bit integer.
+ * The camera device then uses the most significant X bits
+ * that correspond to how many bits are in its Bayer raw sensor
+ * output.</p>
+ * <p>For example, a sensor with RAW10 Bayer output would use the
+ * 10 most significant bits from each color channel.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> SENSOR_TEST_PATTERN_DATA =
+ new Key<int[]>("android.sensor.testPatternData", int[].class);
+
+ /**
+ * <p>When enabled, the sensor sends a test pattern instead of
+ * doing a real exposure from the camera.</p>
+ * <p>When a test pattern is enabled, all manual sensor controls specified
+ * by android.sensor.* will be ignored. All other controls should
+ * work as normal.</p>
+ * <p>For example, if manual flash is enabled, flash firing should still
+ * occur (and that the test pattern remain unmodified, since the flash
+ * would not actually affect it).</p>
+ * <p>Defaults to OFF.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_OFF OFF}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR SOLID_COLOR}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_COLOR_BARS COLOR_BARS}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY COLOR_BARS_FADE_TO_GRAY}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_PN9 PN9}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_CUSTOM1 CUSTOM1}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES android.sensor.availableTestPatternModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES
+ * @see #SENSOR_TEST_PATTERN_MODE_OFF
+ * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR
+ * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS
+ * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY
+ * @see #SENSOR_TEST_PATTERN_MODE_PN9
+ * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_TEST_PATTERN_MODE =
+ new Key<Integer>("android.sensor.testPatternMode", int.class);
+
+ /**
+ * <p>Quality of lens shading correction applied
+ * to the image data.</p>
+ * <p>When set to OFF mode, no lens shading correction will be applied by the
+ * camera device, and an identity lens shading map data will be provided
+ * if <code>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON</code>. For example, for lens
+ * shading map with size of <code>[ 4, 3 ]</code>,
+ * the output {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap} for this case will be an identity
+ * map shown below:</p>
+ * <pre><code>[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ]
+ * </code></pre>
+ * <p>When set to other modes, lens shading correction will be applied by the camera
+ * device. Applications can request lens shading map data by setting
+ * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide lens
+ * shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap}; the returned shading map
+ * data will be the one applied by the camera device for this capture request.</p>
+ * <p>The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
+ * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
+ * AWB are in AUTO modes({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>!=</code> OFF and {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} <code>!=</code>
+ * OFF), to get best results, it is recommended that the applications wait for the AE and AWB
+ * to be converged before using the returned shading map data.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SHADING_MODE_OFF OFF}</li>
+ * <li>{@link #SHADING_MODE_FAST FAST}</li>
+ * <li>{@link #SHADING_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#SHADING_AVAILABLE_MODES android.shading.availableModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SHADING_AVAILABLE_MODES
+ * @see CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP
+ * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
+ * @see #SHADING_MODE_OFF
+ * @see #SHADING_MODE_FAST
+ * @see #SHADING_MODE_HIGH_QUALITY
+ */
+ @PublicKey
+ public static final Key<Integer> SHADING_MODE =
+ new Key<Integer>("android.shading.mode", int.class);
+
+ /**
+ * <p>Operating mode for the face detector
+ * unit.</p>
+ * <p>Whether face detection is enabled, and whether it
+ * should output just the basic fields or the full set of
+ * fields.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #STATISTICS_FACE_DETECT_MODE_OFF OFF}</li>
+ * <li>{@link #STATISTICS_FACE_DETECT_MODE_SIMPLE SIMPLE}</li>
+ * <li>{@link #STATISTICS_FACE_DETECT_MODE_FULL FULL}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
+ * @see #STATISTICS_FACE_DETECT_MODE_OFF
+ * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE
+ * @see #STATISTICS_FACE_DETECT_MODE_FULL
+ */
+ @PublicKey
+ public static final Key<Integer> STATISTICS_FACE_DETECT_MODE =
+ new Key<Integer>("android.statistics.faceDetectMode", int.class);
+
+ /**
+ * <p>Operating mode for hot pixel map generation.</p>
+ * <p>If set to <code>true</code>, a hot pixel map is returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.
+ * If set to <code>false</code>, no hot pixel map will be returned.</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES android.statistics.info.availableHotPixelMapModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
+ * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES
+ */
+ @PublicKey
+ public static final Key<Boolean> STATISTICS_HOT_PIXEL_MAP_MODE =
+ new Key<Boolean>("android.statistics.hotPixelMapMode", boolean.class);
+
+ /**
+ * <p>Whether the camera device will output the lens
+ * shading map in output result metadata.</p>
+ * <p>When set to ON,
+ * android.statistics.lensShadingMap will be provided in
+ * the output result metadata.</p>
+ * <p>ON is always supported on devices with the RAW capability.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #STATISTICS_LENS_SHADING_MAP_MODE_OFF OFF}</li>
+ * <li>{@link #STATISTICS_LENS_SHADING_MAP_MODE_ON ON}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES android.statistics.info.availableLensShadingMapModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES
+ * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
+ * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
+ */
+ @PublicKey
+ public static final Key<Integer> STATISTICS_LENS_SHADING_MAP_MODE =
+ new Key<Integer>("android.statistics.lensShadingMapMode", int.class);
+
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the blue
+ * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * CONTRAST_CURVE.</p>
+ * <p>See android.tonemap.curveRed for more details.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#TONEMAP_MODE
+ * @hide
+ */
+ public static final Key<float[]> TONEMAP_CURVE_BLUE =
+ new Key<float[]>("android.tonemap.curveBlue", float[].class);
+
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the green
+ * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * CONTRAST_CURVE.</p>
+ * <p>See android.tonemap.curveRed for more details.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#TONEMAP_MODE
+ * @hide
+ */
+ public static final Key<float[]> TONEMAP_CURVE_GREEN =
+ new Key<float[]>("android.tonemap.curveGreen", float[].class);
+
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the red
+ * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * CONTRAST_CURVE.</p>
+ * <p>Each channel's curve is defined by an array of control points:</p>
+ * <pre><code>android.tonemap.curveRed =
+ * [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
+ * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
+ * <p>These are sorted in order of increasing <code>Pin</code>; it is
+ * required that input values 0.0 and 1.0 are included in the list to
+ * define a complete mapping. For input values between control points,
+ * the camera device must linearly interpolate between the control
+ * points.</p>
+ * <p>Each curve can have an independent number of points, and the number
+ * of points can be less than max (that is, the request doesn't have to
+ * always provide a curve with number of points equivalent to
+ * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
+ * <p>A few examples, and their corresponding graphical mappings; these
+ * only specify the red channel and the precision is limited to 4
+ * digits, for conciseness.</p>
+ * <p>Linear mapping:</p>
+ * <pre><code>android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
+ * </code></pre>
+ * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p>Invert mapping:</p>
+ * <pre><code>android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
+ * </code></pre>
+ * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
+ * <pre><code>android.tonemap.curveRed = [
+ * 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
+ * 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
+ * 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
+ * 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
+ * </code></pre>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
+ * <pre><code>android.tonemap.curveRed = [
+ * 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
+ * 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
+ * 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
+ * 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
+ * </code></pre>
+ * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><b>Range of valid values:</b><br>
+ * 0-1 on both input and output coordinates, normalized
+ * as a floating-point value such that 0 == black and 1 == white.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
+ * @see CaptureRequest#TONEMAP_MODE
+ * @hide
+ */
+ public static final Key<float[]> TONEMAP_CURVE_RED =
+ new Key<float[]>("android.tonemap.curveRed", float[].class);
+
+ /**
+ * <p>Tonemapping / contrast / gamma curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}
+ * is CONTRAST_CURVE.</p>
+ * <p>The tonemapCurve consist of three curves for each of red, green, and blue
+ * channels respectively. The following example uses the red channel as an
+ * example. The same logic applies to green and blue channel.
+ * Each channel's curve is defined by an array of control points:</p>
+ * <pre><code>curveRed =
+ * [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
+ * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
+ * <p>These are sorted in order of increasing <code>Pin</code>; it is always
+ * guaranteed that input values 0.0 and 1.0 are included in the list to
+ * define a complete mapping. For input values between control points,
+ * the camera device must linearly interpolate between the control
+ * points.</p>
+ * <p>Each curve can have an independent number of points, and the number
+ * of points can be less than max (that is, the request doesn't have to
+ * always provide a curve with number of points equivalent to
+ * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
+ * <p>A few examples, and their corresponding graphical mappings; these
+ * only specify the red channel and the precision is limited to 4
+ * digits, for conciseness.</p>
+ * <p>Linear mapping:</p>
+ * <pre><code>curveRed = [ (0, 0), (1.0, 1.0) ]
+ * </code></pre>
+ * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p>Invert mapping:</p>
+ * <pre><code>curveRed = [ (0, 1.0), (1.0, 0) ]
+ * </code></pre>
+ * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
+ * <pre><code>curveRed = [
+ * (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
+ * (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
+ * (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
+ * (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
+ * </code></pre>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
+ * <pre><code>curveRed = [
+ * (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
+ * (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
+ * (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
+ * (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
+ * </code></pre>
+ * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<android.hardware.camera2.params.TonemapCurve> TONEMAP_CURVE =
+ new Key<android.hardware.camera2.params.TonemapCurve>("android.tonemap.curve", android.hardware.camera2.params.TonemapCurve.class);
+
+ /**
+ * <p>High-level global contrast/gamma/tonemapping control.</p>
+ * <p>When switching to an application-defined contrast curve by setting
+ * {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined
+ * per-channel with a set of <code>(in, out)</code> points that specify the
+ * mapping from input high-bit-depth pixel value to the output
+ * low-bit-depth value. Since the actual pixel ranges of both input
+ * and output may change depending on the camera pipeline, the values
+ * are specified by normalized floating-point numbers.</p>
+ * <p>More-complex color mapping operations such as 3D color look-up
+ * tables, selective chroma enhancement, or other non-linear color
+ * transforms will be disabled when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * CONTRAST_CURVE.</p>
+ * <p>When using either FAST or HIGH_QUALITY, the camera device will
+ * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.
+ * These values are always available, and as close as possible to the
+ * actually used nonlinear/nonglobal transforms.</p>
+ * <p>If a request is sent with CONTRAST_CURVE with the camera device's
+ * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
+ * roughly the same.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #TONEMAP_MODE_CONTRAST_CURVE CONTRAST_CURVE}</li>
+ * <li>{@link #TONEMAP_MODE_FAST FAST}</li>
+ * <li>{@link #TONEMAP_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * <li>{@link #TONEMAP_MODE_GAMMA_VALUE GAMMA_VALUE}</li>
+ * <li>{@link #TONEMAP_MODE_PRESET_CURVE PRESET_CURVE}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES android.tonemap.availableToneMapModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES
+ * @see CaptureRequest#TONEMAP_CURVE
+ * @see CaptureRequest#TONEMAP_MODE
+ * @see #TONEMAP_MODE_CONTRAST_CURVE
+ * @see #TONEMAP_MODE_FAST
+ * @see #TONEMAP_MODE_HIGH_QUALITY
+ * @see #TONEMAP_MODE_GAMMA_VALUE
+ * @see #TONEMAP_MODE_PRESET_CURVE
+ */
+ @PublicKey
+ public static final Key<Integer> TONEMAP_MODE =
+ new Key<Integer>("android.tonemap.mode", int.class);
+
+ /**
+ * <p>Tonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * GAMMA_VALUE</p>
+ * <p>The tonemap curve will be defined the following formula:
+ * * OUT = pow(IN, 1.0 / gamma)
+ * where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
+ * pow is the power function and gamma is the gamma value specified by this
+ * key.</p>
+ * <p>The same curve will be applied to all color channels. The camera device
+ * may clip the input gamma value to its supported range. The actual applied
+ * value will be returned in capture result.</p>
+ * <p>The valid range of gamma value varies on different devices, but values
+ * within [1.0, 5.0] are guaranteed not to be clipped.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ @PublicKey
+ public static final Key<Float> TONEMAP_GAMMA =
+ new Key<Float>("android.tonemap.gamma", float.class);
+
+ /**
+ * <p>Tonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * PRESET_CURVE</p>
+ * <p>The tonemap curve will be defined by specified standard.</p>
+ * <p>sRGB (approximated by 16 control points):</p>
+ * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p>Rec. 709 (approximated by 16 control points):</p>
+ * <p><img alt="Rec. 709 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
+ * <p>Note that above figures show a 16 control points approximation of preset
+ * curves. Camera devices may apply a different approximation to the curve.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #TONEMAP_PRESET_CURVE_SRGB SRGB}</li>
+ * <li>{@link #TONEMAP_PRESET_CURVE_REC709 REC709}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#TONEMAP_MODE
+ * @see #TONEMAP_PRESET_CURVE_SRGB
+ * @see #TONEMAP_PRESET_CURVE_REC709
+ */
+ @PublicKey
+ public static final Key<Integer> TONEMAP_PRESET_CURVE =
+ new Key<Integer>("android.tonemap.presetCurve", int.class);
+
+ /**
+ * <p>This LED is nominally used to indicate to the user
+ * that the camera is powered on and may be streaming images back to the
+ * Application Processor. In certain rare circumstances, the OS may
+ * disable this when video is processed locally and not transmitted to
+ * any untrusted applications.</p>
+ * <p>In particular, the LED <em>must</em> always be on when the data could be
+ * transmitted off the device. The LED <em>should</em> always be on whenever
+ * data is stored locally on the device.</p>
+ * <p>The LED <em>may</em> be off if a trusted application is using the data that
+ * doesn't violate the above rules.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @hide
+ */
+ public static final Key<Boolean> LED_TRANSMIT =
+ new Key<Boolean>("android.led.transmit", boolean.class);
+
+ /**
+ * <p>Whether black-level compensation is locked
+ * to its current values, or is free to vary.</p>
+ * <p>When set to <code>true</code> (ON), the values used for black-level
+ * compensation will not change until the lock is set to
+ * <code>false</code> (OFF).</p>
+ * <p>Since changes to certain capture parameters (such as
+ * exposure time) may require resetting of black level
+ * compensation, the camera device must report whether setting
+ * the black level lock was successful in the output result
+ * metadata.</p>
+ * <p>For example, if a sequence of requests is as follows:</p>
+ * <ul>
+ * <li>Request 1: Exposure = 10ms, Black level lock = OFF</li>
+ * <li>Request 2: Exposure = 10ms, Black level lock = ON</li>
+ * <li>Request 3: Exposure = 10ms, Black level lock = ON</li>
+ * <li>Request 4: Exposure = 20ms, Black level lock = ON</li>
+ * <li>Request 5: Exposure = 20ms, Black level lock = ON</li>
+ * <li>Request 6: Exposure = 20ms, Black level lock = ON</li>
+ * </ul>
+ * <p>And the exposure change in Request 4 requires the camera
+ * device to reset the black level offsets, then the output
+ * result metadata is expected to be:</p>
+ * <ul>
+ * <li>Result 1: Exposure = 10ms, Black level lock = OFF</li>
+ * <li>Result 2: Exposure = 10ms, Black level lock = ON</li>
+ * <li>Result 3: Exposure = 10ms, Black level lock = ON</li>
+ * <li>Result 4: Exposure = 20ms, Black level lock = OFF</li>
+ * <li>Result 5: Exposure = 20ms, Black level lock = ON</li>
+ * <li>Result 6: Exposure = 20ms, Black level lock = ON</li>
+ * </ul>
+ * <p>This indicates to the application that on frame 4, black
+ * levels were reset due to exposure value changes, and pixel
+ * values may not be consistent across captures.</p>
+ * <p>The camera device will maintain the lock to the extent
+ * possible, only overriding the lock to OFF when changes to
+ * other request parameters require a black level recalculation
+ * or reset.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<Boolean> BLACK_LEVEL_LOCK =
+ new Key<Boolean>("android.blackLevel.lock", boolean.class);
+
+ /**
+ * <p>The amount of exposure time increase factor applied to the original output
+ * frame by the application processing before sending for reprocessing.</p>
+ * <p>This is optional, and will be supported if the camera device supports YUV_REPROCESSING
+ * capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains YUV_REPROCESSING).</p>
+ * <p>For some YUV reprocessing use cases, the application may choose to filter the original
+ * output frames to effectively reduce the noise to the same level as a frame that was
+ * captured with longer exposure time. To be more specific, assuming the original captured
+ * images were captured with a sensitivity of S and an exposure time of T, the model in
+ * the camera device is that the amount of noise in the image would be approximately what
+ * would be expected if the original capture parameters had been a sensitivity of
+ * S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
+ * than S and T respectively. If the captured images were processed by the application
+ * before being sent for reprocessing, then the application may have used image processing
+ * algorithms and/or multi-frame image fusion to reduce the noise in the
+ * application-processed images (input images). By using the effectiveExposureFactor
+ * control, the application can communicate to the camera device the actual noise level
+ * improvement in the application-processed image. With this information, the camera
+ * device can select appropriate noise reduction and edge enhancement parameters to avoid
+ * excessive noise reduction ({@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}) and insufficient edge
+ * enhancement ({@link CaptureRequest#EDGE_MODE android.edge.mode}) being applied to the reprocessed frames.</p>
+ * <p>For example, for multi-frame image fusion use case, the application may fuse
+ * multiple output frames together to a final frame for reprocessing. When N image are
+ * fused into 1 image for reprocessing, the exposure time increase factor could be up to
+ * square root of N (based on a simple photon shot noise model). The camera device will
+ * adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
+ * produce the best quality images.</p>
+ * <p>This is relative factor, 1.0 indicates the application hasn't processed the input
+ * buffer in a way that affects its effective exposure time.</p>
+ * <p>This control is only effective for YUV reprocessing capture request. For noise
+ * reduction reprocessing, it is only effective when <code>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode} != OFF</code>.
+ * Similarly, for edge enhancement reprocessing, it is only effective when
+ * <code>{@link CaptureRequest#EDGE_MODE android.edge.mode} != OFF</code>.</p>
+ * <p><b>Units</b>: Relative exposure time increase factor.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 1.0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#EDGE_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ @PublicKey
+ public static final Key<Float> REPROCESS_EFFECTIVE_EXPOSURE_FACTOR =
+ new Key<Float>("android.reprocess.effectiveExposureFactor", float.class);
+
+ /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+ * End generated code
+ *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+
+
+}
diff --git a/android/hardware/camera2/CaptureResult.java b/android/hardware/camera2/CaptureResult.java
new file mode 100644
index 00000000..6d80c20a
--- /dev/null
+++ b/android/hardware/camera2/CaptureResult.java
@@ -0,0 +1,4304 @@
+/*
+ * Copyright (C) 2012 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.impl.CaptureResultExtras;
+import android.hardware.camera2.impl.PublicKey;
+import android.hardware.camera2.impl.SyntheticKey;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.Log;
+import android.util.Rational;
+
+import java.util.List;
+
+/**
+ * <p>The subset of the results of a single image capture from the image sensor.</p>
+ *
+ * <p>Contains a subset of the final configuration for the capture hardware (sensor, lens,
+ * flash), the processing pipeline, the control algorithms, and the output
+ * buffers.</p>
+ *
+ * <p>CaptureResults are produced by a {@link CameraDevice} after processing a
+ * {@link CaptureRequest}. All properties listed for capture requests can also
+ * be queried on the capture result, to determine the final values used for
+ * capture. The result also includes additional metadata about the state of the
+ * camera device during the capture.</p>
+ *
+ * <p>Not all properties returned by {@link CameraCharacteristics#getAvailableCaptureResultKeys()}
+ * are necessarily available. Some results are {@link CaptureResult partial} and will
+ * not have every key set. Only {@link TotalCaptureResult total} results are guaranteed to have
+ * every key available that was enabled by the request.</p>
+ *
+ * <p>{@link CaptureResult} objects are immutable.</p>
+ *
+ */
+public class CaptureResult extends CameraMetadata<CaptureResult.Key<?>> {
+
+ private static final String TAG = "CaptureResult";
+ private static final boolean VERBOSE = false;
+
+ /**
+ * A {@code Key} is used to do capture result field lookups with
+ * {@link CaptureResult#get}.
+ *
+ * <p>For example, to get the timestamp corresponding to the exposure of the first row:
+ * <code><pre>
+ * long timestamp = captureResult.get(CaptureResult.SENSOR_TIMESTAMP);
+ * </pre></code>
+ * </p>
+ *
+ * <p>To enumerate over all possible keys for {@link CaptureResult}, see
+ * {@link CameraCharacteristics#getAvailableCaptureResultKeys}.</p>
+ *
+ * @see CaptureResult#get
+ * @see CameraCharacteristics#getAvailableCaptureResultKeys
+ */
+ public final static class Key<T> {
+ private final CameraMetadataNative.Key<T> mKey;
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, Class<T> type, long vendorId) {
+ mKey = new CameraMetadataNative.Key<T>(name, type, vendorId);
+ }
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, Class<T> type) {
+ mKey = new CameraMetadataNative.Key<T>(name, type);
+ }
+
+ /**
+ * Visible for testing and vendor extensions only.
+ *
+ * @hide
+ */
+ public Key(String name, TypeReference<T> typeReference) {
+ mKey = new CameraMetadataNative.Key<T>(name, typeReference);
+ }
+
+ /**
+ * Return a camelCase, period separated name formatted like:
+ * {@code "root.section[.subsections].name"}.
+ *
+ * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."};
+ * keys that are device/platform-specific are prefixed with {@code "com."}.</p>
+ *
+ * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would
+ * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device
+ * specific key might look like {@code "com.google.nexus.data.private"}.</p>
+ *
+ * @return String representation of the key name
+ */
+ @NonNull
+ public String getName() {
+ return mKey.getName();
+ }
+
+ /**
+ * Return vendor tag id.
+ *
+ * @hide
+ */
+ public long getVendorId() {
+ return mKey.getVendorId();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public final int hashCode() {
+ return mKey.hashCode();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ public final boolean equals(Object o) {
+ return o instanceof Key && ((Key<T>)o).mKey.equals(mKey);
+ }
+
+ /**
+ * Return this {@link Key} as a string representation.
+ *
+ * <p>{@code "CaptureResult.Key(%s)"}, where {@code %s} represents
+ * the name of this key as returned by {@link #getName}.</p>
+ *
+ * @return string representation of {@link Key}
+ */
+ @NonNull
+ @Override
+ public String toString() {
+ return String.format("CaptureResult.Key(%s)", mKey.getName());
+ }
+
+ /**
+ * Visible for CameraMetadataNative implementation only; do not use.
+ *
+ * TODO: Make this private or remove it altogether.
+ *
+ * @hide
+ */
+ public CameraMetadataNative.Key<T> getNativeKey() {
+ return mKey;
+ }
+
+ @SuppressWarnings({ "unchecked" })
+ /*package*/ Key(CameraMetadataNative.Key<?> nativeKey) {
+ mKey = (CameraMetadataNative.Key<T>) nativeKey;
+ }
+ }
+
+ private final CameraMetadataNative mResults;
+ private final CaptureRequest mRequest;
+ private final int mSequenceId;
+ private final long mFrameNumber;
+
+ /**
+ * Takes ownership of the passed-in properties object
+ *
+ * <p>For internal use only</p>
+ * @hide
+ */
+ public CaptureResult(CameraMetadataNative results, CaptureRequest parent,
+ CaptureResultExtras extras) {
+ if (results == null) {
+ throw new IllegalArgumentException("results was null");
+ }
+
+ if (parent == null) {
+ throw new IllegalArgumentException("parent was null");
+ }
+
+ if (extras == null) {
+ throw new IllegalArgumentException("extras was null");
+ }
+
+ mResults = CameraMetadataNative.move(results);
+ if (mResults.isEmpty()) {
+ throw new AssertionError("Results must not be empty");
+ }
+ setNativeInstance(mResults);
+ mRequest = parent;
+ mSequenceId = extras.getRequestId();
+ mFrameNumber = extras.getFrameNumber();
+ }
+
+ /**
+ * Returns a copy of the underlying {@link CameraMetadataNative}.
+ * @hide
+ */
+ public CameraMetadataNative getNativeCopy() {
+ return new CameraMetadataNative(mResults);
+ }
+
+ /**
+ * Creates a request-less result.
+ *
+ * <p><strong>For testing only.</strong></p>
+ * @hide
+ */
+ public CaptureResult(CameraMetadataNative results, int sequenceId) {
+ if (results == null) {
+ throw new IllegalArgumentException("results was null");
+ }
+
+ mResults = CameraMetadataNative.move(results);
+ if (mResults.isEmpty()) {
+ throw new AssertionError("Results must not be empty");
+ }
+
+ setNativeInstance(mResults);
+ mRequest = null;
+ mSequenceId = sequenceId;
+ mFrameNumber = -1;
+ }
+
+ /**
+ * Get a capture result field value.
+ *
+ * <p>The field definitions can be found in {@link CaptureResult}.</p>
+ *
+ * <p>Querying the value for the same key more than once will return a value
+ * which is equal to the previous queried value.</p>
+ *
+ * @throws IllegalArgumentException if the key was not valid
+ *
+ * @param key The result field to read.
+ * @return The value of that key, or {@code null} if the field is not set.
+ */
+ @Nullable
+ public <T> T get(Key<T> key) {
+ T value = mResults.get(key);
+ if (VERBOSE) Log.v(TAG, "#get for Key = " + key.getName() + ", returned value = " + value);
+ return value;
+ }
+
+ /**
+ * {@inheritDoc}
+ * @hide
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ protected <T> T getProtected(Key<?> key) {
+ return (T) mResults.get(key);
+ }
+
+ /**
+ * {@inheritDoc}
+ * @hide
+ */
+ @SuppressWarnings("unchecked")
+ @Override
+ protected Class<Key<?>> getKeyClass() {
+ Object thisClass = Key.class;
+ return (Class<Key<?>>)thisClass;
+ }
+
+ /**
+ * Dumps the native metadata contents to logcat.
+ *
+ * <p>Visibility for testing/debugging only. The results will not
+ * include any synthesized keys, as they are invisible to the native layer.</p>
+ *
+ * @hide
+ */
+ public void dumpToLog() {
+ mResults.dumpToLog();
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ @NonNull
+ public List<Key<?>> getKeys() {
+ // Force the javadoc for this function to show up on the CaptureResult page
+ return super.getKeys();
+ }
+
+ /**
+ * Get the request associated with this result.
+ *
+ * <p>Whenever a request has been fully or partially captured, with
+ * {@link CameraCaptureSession.CaptureCallback#onCaptureCompleted} or
+ * {@link CameraCaptureSession.CaptureCallback#onCaptureProgressed}, the {@code result}'s
+ * {@code getRequest()} will return that {@code request}.
+ * </p>
+ *
+ * <p>For example,
+ * <code><pre>cameraDevice.capture(someRequest, new CaptureCallback() {
+ * {@literal @}Override
+ * void onCaptureCompleted(CaptureRequest myRequest, CaptureResult myResult) {
+ * assert(myResult.getRequest.equals(myRequest) == true);
+ * }
+ * }, null);
+ * </code></pre>
+ * </p>
+ *
+ * @return The request associated with this result. Never {@code null}.
+ */
+ @NonNull
+ public CaptureRequest getRequest() {
+ return mRequest;
+ }
+
+ /**
+ * Get the frame number associated with this result.
+ *
+ * <p>Whenever a request has been processed, regardless of failure or success,
+ * it gets a unique frame number assigned to its future result/failure.</p>
+ *
+ * <p>For the same type of request (capturing from the camera device or reprocessing), this
+ * value monotonically increments, starting with 0, for every new result or failure and the
+ * scope is the lifetime of the {@link CameraDevice}. Between different types of requests,
+ * the frame number may not monotonically increment. For example, the frame number of a newer
+ * reprocess result may be smaller than the frame number of an older result of capturing new
+ * images from the camera device, but the frame number of a newer reprocess result will never be
+ * smaller than the frame number of an older reprocess result.</p>
+ *
+ * @return The frame number
+ *
+ * @see CameraDevice#createCaptureRequest
+ * @see CameraDevice#createReprocessCaptureRequest
+ */
+ public long getFrameNumber() {
+ return mFrameNumber;
+ }
+
+ /**
+ * The sequence ID for this failure that was returned by the
+ * {@link CameraCaptureSession#capture} family of functions.
+ *
+ * <p>The sequence ID is a unique monotonically increasing value starting from 0,
+ * incremented every time a new group of requests is submitted to the CameraDevice.</p>
+ *
+ * @return int The ID for the sequence of requests that this capture result is a part of
+ *
+ * @see CameraDevice.CaptureCallback#onCaptureSequenceCompleted
+ * @see CameraDevice.CaptureCallback#onCaptureSequenceAborted
+ */
+ public int getSequenceId() {
+ return mSequenceId;
+ }
+
+ /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+ * The key entries below this point are generated from metadata
+ * definitions in /system/media/camera/docs. Do not modify by hand or
+ * modify the comment blocks at the start or end.
+ *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/
+
+ /**
+ * <p>The mode control selects how the image data is converted from the
+ * sensor's native color into linear sRGB color.</p>
+ * <p>When auto-white balance (AWB) is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, this
+ * control is overridden by the AWB routine. When AWB is disabled, the
+ * application controls how the color mapping is performed.</p>
+ * <p>We define the expected processing pipeline below. For consistency
+ * across devices, this is always the case with TRANSFORM_MATRIX.</p>
+ * <p>When either FULL or HIGH_QUALITY is used, the camera device may
+ * do additional processing but {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} will still be provided by the
+ * camera device (in the results) and be roughly correct.</p>
+ * <p>Switching to TRANSFORM_MATRIX and using the data provided from
+ * FAST or HIGH_QUALITY will yield a picture with the same white point
+ * as what was produced by the camera device in the earlier frame.</p>
+ * <p>The expected processing pipeline is as follows:</p>
+ * <p><img alt="White balance processing pipeline" src="../../../../images/camera2/metadata/android.colorCorrection.mode/processing_pipeline.png" /></p>
+ * <p>The white balance is encoded by two values, a 4-channel white-balance
+ * gain vector (applied in the Bayer domain), and a 3x3 color transform
+ * matrix (applied after demosaic).</p>
+ * <p>The 4-channel white-balance gains are defined as:</p>
+ * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} = [ R G_even G_odd B ]
+ * </code></pre>
+ * <p>where <code>G_even</code> is the gain for green pixels on even rows of the
+ * output, and <code>G_odd</code> is the gain for green pixels on the odd rows.
+ * These may be identical for a given camera device implementation; if
+ * the camera device does not support a separate gain for even/odd green
+ * channels, it will use the <code>G_even</code> value, and write <code>G_odd</code> equal to
+ * <code>G_even</code> in the output result metadata.</p>
+ * <p>The matrices for color transforms are defined as a 9-entry vector:</p>
+ * <pre><code>{@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ]
+ * </code></pre>
+ * <p>which define a transform from input sensor colors, <code>P_in = [ r g b ]</code>,
+ * to output linear sRGB, <code>P_out = [ r' g' b' ]</code>,</p>
+ * <p>with colors as follows:</p>
+ * <pre><code>r' = I0r + I1g + I2b
+ * g' = I3r + I4g + I5b
+ * b' = I6r + I7g + I8b
+ * </code></pre>
+ * <p>Both the input and output value ranges must match. Overflow/underflow
+ * values are clipped to fit within the range.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX TRANSFORM_MATRIX}</li>
+ * <li>{@link #COLOR_CORRECTION_MODE_FAST FAST}</li>
+ * <li>{@link #COLOR_CORRECTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #COLOR_CORRECTION_MODE_TRANSFORM_MATRIX
+ * @see #COLOR_CORRECTION_MODE_FAST
+ * @see #COLOR_CORRECTION_MODE_HIGH_QUALITY
+ */
+ @PublicKey
+ public static final Key<Integer> COLOR_CORRECTION_MODE =
+ new Key<Integer>("android.colorCorrection.mode", int.class);
+
+ /**
+ * <p>A color transform matrix to use to transform
+ * from sensor RGB color space to output linear sRGB color space.</p>
+ * <p>This matrix is either set by the camera device when the request
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not TRANSFORM_MATRIX, or
+ * directly by the application in the request when the
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is TRANSFORM_MATRIX.</p>
+ * <p>In the latter case, the camera device may round the matrix to account
+ * for precision issues; the final rounded matrix should be reported back
+ * in this matrix result metadata. The transform should keep the magnitude
+ * of the output color values within <code>[0, 1.0]</code> (assuming input color
+ * values is within the normalized range <code>[0, 1.0]</code>), or clipping may occur.</p>
+ * <p>The valid range of each matrix element varies on different devices, but
+ * values within [-1.5, 3.0] are guaranteed not to be clipped.</p>
+ * <p><b>Units</b>: Unitless scale factors</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.ColorSpaceTransform> COLOR_CORRECTION_TRANSFORM =
+ new Key<android.hardware.camera2.params.ColorSpaceTransform>("android.colorCorrection.transform", android.hardware.camera2.params.ColorSpaceTransform.class);
+
+ /**
+ * <p>Gains applying to Bayer raw color channels for
+ * white-balance.</p>
+ * <p>These per-channel gains are either set by the camera device
+ * when the request {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is not
+ * TRANSFORM_MATRIX, or directly by the application in the
+ * request when the {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} is
+ * TRANSFORM_MATRIX.</p>
+ * <p>The gains in the result metadata are the gains actually
+ * applied by the camera device to the current frame.</p>
+ * <p>The valid range of gains varies on different devices, but gains
+ * between [1.0, 3.0] are guaranteed not to be clipped. Even if a given
+ * device allows gains below 1.0, this is usually not recommended because
+ * this can create color artifacts.</p>
+ * <p><b>Units</b>: Unitless gain factors</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.RggbChannelVector> COLOR_CORRECTION_GAINS =
+ new Key<android.hardware.camera2.params.RggbChannelVector>("android.colorCorrection.gains", android.hardware.camera2.params.RggbChannelVector.class);
+
+ /**
+ * <p>Mode of operation for the chromatic aberration correction algorithm.</p>
+ * <p>Chromatic (color) aberration is caused by the fact that different wavelengths of light
+ * can not focus on the same point after exiting from the lens. This metadata defines
+ * the high level control of chromatic aberration correction algorithm, which aims to
+ * minimize the chromatic artifacts that may occur along the object boundaries in an
+ * image.</p>
+ * <p>FAST/HIGH_QUALITY both mean that camera device determined aberration
+ * correction will be applied. HIGH_QUALITY mode indicates that the camera device will
+ * use the highest-quality aberration correction algorithms, even if it slows down
+ * capture rate. FAST means the camera device will not slow down capture rate when
+ * applying aberration correction.</p>
+ * <p>LEGACY devices will always be in FAST mode.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_OFF OFF}</li>
+ * <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_FAST FAST}</li>
+ * <li>{@link #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES android.colorCorrection.availableAberrationModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES
+ * @see #COLOR_CORRECTION_ABERRATION_MODE_OFF
+ * @see #COLOR_CORRECTION_ABERRATION_MODE_FAST
+ * @see #COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY
+ */
+ @PublicKey
+ public static final Key<Integer> COLOR_CORRECTION_ABERRATION_MODE =
+ new Key<Integer>("android.colorCorrection.aberrationMode", int.class);
+
+ /**
+ * <p>The desired setting for the camera device's auto-exposure
+ * algorithm's antibanding compensation.</p>
+ * <p>Some kinds of lighting fixtures, such as some fluorescent
+ * lights, flicker at the rate of the power supply frequency
+ * (60Hz or 50Hz, depending on country). While this is
+ * typically not noticeable to a person, it can be visible to
+ * a camera device. If a camera sets its exposure time to the
+ * wrong value, the flicker may become visible in the
+ * viewfinder as flicker or in a final captured image, as a
+ * set of variable-brightness bands across the image.</p>
+ * <p>Therefore, the auto-exposure routines of camera devices
+ * include antibanding routines that ensure that the chosen
+ * exposure value will not cause such banding. The choice of
+ * exposure time depends on the rate of flicker, which the
+ * camera device can detect automatically, or the expected
+ * rate can be selected by the application using this
+ * control.</p>
+ * <p>A given camera device may not support all of the possible
+ * options for the antibanding mode. The
+ * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes} key contains
+ * the available modes for a given camera device.</p>
+ * <p>AUTO mode is the default if it is available on given
+ * camera device. When AUTO mode is not available, the
+ * default will be either 50HZ or 60HZ, and both 50HZ
+ * and 60HZ will be available.</p>
+ * <p>If manual exposure control is enabled (by setting
+ * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} to OFF),
+ * then this setting has no effect, and the application must
+ * ensure it selects exposure times that do not cause banding
+ * issues. The {@link CaptureResult#STATISTICS_SCENE_FLICKER android.statistics.sceneFlicker} key can assist
+ * the application in this.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AE_ANTIBANDING_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_AE_ANTIBANDING_MODE_50HZ 50HZ}</li>
+ * <li>{@link #CONTROL_AE_ANTIBANDING_MODE_60HZ 60HZ}</li>
+ * <li>{@link #CONTROL_AE_ANTIBANDING_MODE_AUTO AUTO}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br></p>
+ * <p>{@link CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES android.control.aeAvailableAntibandingModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_ANTIBANDING_MODES
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureResult#STATISTICS_SCENE_FLICKER
+ * @see #CONTROL_AE_ANTIBANDING_MODE_OFF
+ * @see #CONTROL_AE_ANTIBANDING_MODE_50HZ
+ * @see #CONTROL_AE_ANTIBANDING_MODE_60HZ
+ * @see #CONTROL_AE_ANTIBANDING_MODE_AUTO
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_ANTIBANDING_MODE =
+ new Key<Integer>("android.control.aeAntibandingMode", int.class);
+
+ /**
+ * <p>Adjustment to auto-exposure (AE) target image
+ * brightness.</p>
+ * <p>The adjustment is measured as a count of steps, with the
+ * step size defined by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP android.control.aeCompensationStep} and the
+ * allowed range by {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}.</p>
+ * <p>For example, if the exposure value (EV) step is 0.333, '6'
+ * will mean an exposure compensation of +2 EV; -3 will mean an
+ * exposure compensation of -1 EV. One EV represents a doubling
+ * of image brightness. Note that this control will only be
+ * effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>!=</code> OFF. This control
+ * will take effect even when {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} <code>== true</code>.</p>
+ * <p>In the event of exposure compensation value being changed, camera device
+ * may take several frames to reach the newly requested exposure target.
+ * During that time, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} field will be in the SEARCHING
+ * state. Once the new exposure target is reached, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} will
+ * change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or
+ * FLASH_REQUIRED (if the scene is too dark for still capture).</p>
+ * <p><b>Units</b>: Compensation steps</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE android.control.aeCompensationRange}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_RANGE
+ * @see CameraCharacteristics#CONTROL_AE_COMPENSATION_STEP
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureResult#CONTROL_AE_STATE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_EXPOSURE_COMPENSATION =
+ new Key<Integer>("android.control.aeExposureCompensation", int.class);
+
+ /**
+ * <p>Whether auto-exposure (AE) is currently locked to its latest
+ * calculated values.</p>
+ * <p>When set to <code>true</code> (ON), the AE algorithm is locked to its latest parameters,
+ * and will not change exposure settings until the lock is set to <code>false</code> (OFF).</p>
+ * <p>Note that even when AE is locked, the flash may be fired if
+ * the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_AUTO_FLASH /
+ * ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE.</p>
+ * <p>When {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation} is changed, even if the AE lock
+ * is ON, the camera device will still adjust its exposure value.</p>
+ * <p>If AE precapture is triggered (see {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger})
+ * when AE is already locked, the camera device will not change the exposure time
+ * ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}) and sensitivity ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity})
+ * parameters. The flash may be fired if the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}
+ * is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the
+ * {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is ON_ALWAYS_FLASH, the scene may become overexposed.
+ * Similarly, AE precapture trigger CANCEL has no effect when AE is already locked.</p>
+ * <p>When an AE precapture sequence is triggered, AE unlock will not be able to unlock
+ * the AE if AE is locked by the camera device internally during precapture metering
+ * sequence In other words, submitting requests with AE unlock has no effect for an
+ * ongoing precapture metering sequence. Otherwise, the precapture metering sequence
+ * will never succeed in a sequence of preview requests where AE lock is always set
+ * to <code>false</code>.</p>
+ * <p>Since the camera device has a pipeline of in-flight requests, the settings that
+ * get locked do not necessarily correspond to the settings that were present in the
+ * latest capture result received from the camera device, since additional captures
+ * and AE updates may have occurred even before the result was sent out. If an
+ * application is switching between automatic and manual control and wishes to eliminate
+ * any flicker during the switch, the following procedure is recommended:</p>
+ * <ol>
+ * <li>Starting in auto-AE mode:</li>
+ * <li>Lock AE</li>
+ * <li>Wait for the first result to be output that has the AE locked</li>
+ * <li>Copy exposure settings from that result into a request, set the request to manual AE</li>
+ * <li>Submit the capture request, proceed to run manual AE as desired.</li>
+ * </ol>
+ * <p>See {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE lock related state transition details.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureResult#CONTROL_AE_STATE
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Boolean> CONTROL_AE_LOCK =
+ new Key<Boolean>("android.control.aeLock", boolean.class);
+
+ /**
+ * <p>The desired mode for the camera device's
+ * auto-exposure routine.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is
+ * AUTO.</p>
+ * <p>When set to any of the ON modes, the camera device's
+ * auto-exposure routine is enabled, overriding the
+ * application's selected exposure time, sensor sensitivity,
+ * and frame duration ({@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
+ * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and
+ * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}). If one of the FLASH modes
+ * is selected, the camera device's flash unit controls are
+ * also overridden.</p>
+ * <p>The FLASH modes are only available if the camera device
+ * has a flash unit ({@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} is <code>true</code>).</p>
+ * <p>If flash TORCH mode is desired, this field must be set to
+ * ON or OFF, and {@link CaptureRequest#FLASH_MODE android.flash.mode} set to TORCH.</p>
+ * <p>When set to any of the ON modes, the values chosen by the
+ * camera device auto-exposure routine for the overridden
+ * fields for a given capture will be available in its
+ * CaptureResult.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AE_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_AE_MODE_ON ON}</li>
+ * <li>{@link #CONTROL_AE_MODE_ON_AUTO_FLASH ON_AUTO_FLASH}</li>
+ * <li>{@link #CONTROL_AE_MODE_ON_ALWAYS_FLASH ON_ALWAYS_FLASH}</li>
+ * <li>{@link #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE ON_AUTO_FLASH_REDEYE}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES android.control.aeAvailableModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_MODES
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
+ * @see CaptureRequest#FLASH_MODE
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ * @see #CONTROL_AE_MODE_OFF
+ * @see #CONTROL_AE_MODE_ON
+ * @see #CONTROL_AE_MODE_ON_AUTO_FLASH
+ * @see #CONTROL_AE_MODE_ON_ALWAYS_FLASH
+ * @see #CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_MODE =
+ new Key<Integer>("android.control.aeMode", int.class);
+
+ /**
+ * <p>List of metering areas to use for auto-exposure adjustment.</p>
+ * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe} is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of regions supported by the device is determined by the value
+ * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AE android.control.maxRegionsAe}.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other exposure metering regions, so if only one
+ * region is used, all non-zero weights will have the same effect. A region with 0
+ * weight is ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Range of valid values:</b><br>
+ * Coordinates must be between <code>[(0,0), (width, height))</code> of
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AE
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AE_REGIONS =
+ new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.aeRegions", android.hardware.camera2.params.MeteringRectangle[].class);
+
+ /**
+ * <p>Range over which the auto-exposure routine can
+ * adjust the capture frame rate to maintain good
+ * exposure.</p>
+ * <p>Only constrains auto-exposure (AE) algorithm, not
+ * manual control of {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime} and
+ * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}.</p>
+ * <p><b>Units</b>: Frames per second (FPS)</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any of the entries in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ */
+ @PublicKey
+ public static final Key<android.util.Range<Integer>> CONTROL_AE_TARGET_FPS_RANGE =
+ new Key<android.util.Range<Integer>>("android.control.aeTargetFpsRange", new TypeReference<android.util.Range<Integer>>() {{ }});
+
+ /**
+ * <p>Whether the camera device will trigger a precapture
+ * metering sequence when it processes this request.</p>
+ * <p>This entry is normally set to IDLE, or is not
+ * included at all in the request settings. When included and
+ * set to START, the camera device will trigger the auto-exposure (AE)
+ * precapture metering sequence.</p>
+ * <p>When set to CANCEL, the camera device will cancel any active
+ * precapture metering trigger, and return to its initial AE state.
+ * If a precapture metering sequence is already completed, and the camera
+ * device has implicitly locked the AE for subsequent still capture, the
+ * CANCEL trigger will unlock the AE and return to its initial AE state.</p>
+ * <p>The precapture sequence should be triggered before starting a
+ * high-quality still capture for final metering decisions to
+ * be made, and for firing pre-capture flash pulses to estimate
+ * scene brightness and required final capture flash power, when
+ * the flash is enabled.</p>
+ * <p>Normally, this entry should be set to START for only a
+ * single request, and the application should wait until the
+ * sequence completes before starting a new one.</p>
+ * <p>When a precapture metering sequence is finished, the camera device
+ * may lock the auto-exposure routine internally to be able to accurately expose the
+ * subsequent still capture image (<code>{@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE</code>).
+ * For this case, the AE may not resume normal scan if no subsequent still capture is
+ * submitted. To ensure that the AE routine restarts normal scan, the application should
+ * submit a request with <code>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == true</code>, followed by a request
+ * with <code>{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} == false</code>, if the application decides not to submit a
+ * still capture request after the precapture sequence completes. Alternatively, for
+ * API level 23 or newer devices, the CANCEL can be used to unlock the camera device
+ * internally locked AE if the application doesn't submit a still capture request after
+ * the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not
+ * be used in devices that have earlier API levels.</p>
+ * <p>The exact effect of auto-exposure (AE) precapture trigger
+ * depends on the current AE mode and state; see
+ * {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} for AE precapture state transition
+ * details.</p>
+ * <p>On LEGACY-level devices, the precapture trigger is not supported;
+ * capturing a high-resolution JPEG image will automatically trigger a
+ * precapture sequence before the high-resolution capture, including
+ * potentially firing a pre-capture flash.</p>
+ * <p>Using the precapture trigger and the auto-focus trigger {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}
+ * simultaneously is allowed. However, since these triggers often require cooperation between
+ * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ * focus sweep), the camera device may delay acting on a later trigger until the previous
+ * trigger has been fully handled. This may lead to longer intervals between the trigger and
+ * changes to {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} indicating the start of the precapture sequence, for
+ * example.</p>
+ * <p>If both the precapture and the auto-focus trigger are activated on the same request, then
+ * the camera device will complete them in the optimal order for that device.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE IDLE}</li>
+ * <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_START START}</li>
+ * <li>{@link #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL CANCEL}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ * @see CaptureResult#CONTROL_AE_STATE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #CONTROL_AE_PRECAPTURE_TRIGGER_IDLE
+ * @see #CONTROL_AE_PRECAPTURE_TRIGGER_START
+ * @see #CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_PRECAPTURE_TRIGGER =
+ new Key<Integer>("android.control.aePrecaptureTrigger", int.class);
+
+ /**
+ * <p>Current state of the auto-exposure (AE) algorithm.</p>
+ * <p>Switching between or enabling AE modes ({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}) always
+ * resets the AE state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
+ * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
+ * the algorithm states to INACTIVE.</p>
+ * <p>The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. For example: INACTIVE may never actually be
+ * seen in a result.</p>
+ * <p>The state in the result is the state for this image (in sync with this image): if
+ * AE state becomes CONVERGED, then the image data associated with this result should
+ * be good to use.</p>
+ * <p>Below are state transition tables for different AE modes.</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center"></td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device auto exposure algorithm is disabled</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is AE_MODE_ON_*:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device initiates AE scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Camera device finishes AE scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Good values, not changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Camera device finishes AE scan</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Converged but too dark w/o flash</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Camera device initiates AE scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Camera device initiates AE scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values not good after unlock</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Values good after unlock</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Exposure good, but too dark</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PRECAPTURE</td>
+ * <td align="center">Sequence done. {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is OFF</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Ready for high-quality capture</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PRECAPTURE</td>
+ * <td align="center">Sequence done. {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock} is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Ready for high-quality capture</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">aeLock is ON and aePrecaptureTrigger is START</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Precapture trigger is ignored when AE is already locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">aeLock is ON and aePrecaptureTrigger is CANCEL</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Precapture trigger is ignored when AE is already locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START</td>
+ * <td align="center">PRECAPTURE</td>
+ * <td align="center">Start AE precapture metering sequence</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Currently active precapture metering sequence is canceled</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.</p>
+ * <p>For example, for above AE modes (AE_MODE_ON_*), in addition to the state transitions
+ * listed in above table, it is also legal for the camera device to skip one or more
+ * transient states between two results. See below table for examples:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device finished AE scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Values are already good, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START, sequence done</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is START, sequence done</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Converged after a precapture sequence, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL, converged</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state (excluding LOCKED)</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is CANCEL, converged</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Converged after a precapture sequenceis canceled, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Camera device finished AE scan</td>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Converged but too dark w/o flash after a new scan, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FLASH_REQUIRED</td>
+ * <td align="center">Camera device finished AE scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Converged after a new scan, transient states are skipped by camera device.</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AE_STATE_INACTIVE INACTIVE}</li>
+ * <li>{@link #CONTROL_AE_STATE_SEARCHING SEARCHING}</li>
+ * <li>{@link #CONTROL_AE_STATE_CONVERGED CONVERGED}</li>
+ * <li>{@link #CONTROL_AE_STATE_LOCKED LOCKED}</li>
+ * <li>{@link #CONTROL_AE_STATE_FLASH_REQUIRED FLASH_REQUIRED}</li>
+ * <li>{@link #CONTROL_AE_STATE_PRECAPTURE PRECAPTURE}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_LOCK
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #CONTROL_AE_STATE_INACTIVE
+ * @see #CONTROL_AE_STATE_SEARCHING
+ * @see #CONTROL_AE_STATE_CONVERGED
+ * @see #CONTROL_AE_STATE_LOCKED
+ * @see #CONTROL_AE_STATE_FLASH_REQUIRED
+ * @see #CONTROL_AE_STATE_PRECAPTURE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AE_STATE =
+ new Key<Integer>("android.control.aeState", int.class);
+
+ /**
+ * <p>Whether auto-focus (AF) is currently enabled, and what
+ * mode it is set to.</p>
+ * <p>Only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} = AUTO and the lens is not fixed focus
+ * (i.e. <code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} &gt; 0</code>). Also note that
+ * when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF, the behavior of AF is device
+ * dependent. It is recommended to lock AF by using {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger} before
+ * setting {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} to OFF, or set AF mode to OFF when AE is OFF.</p>
+ * <p>If the lens is controlled by the camera device auto-focus algorithm,
+ * the camera device will report the current AF status in {@link CaptureResult#CONTROL_AF_STATE android.control.afState}
+ * in result metadata.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AF_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_AF_MODE_AUTO AUTO}</li>
+ * <li>{@link #CONTROL_AF_MODE_MACRO MACRO}</li>
+ * <li>{@link #CONTROL_AF_MODE_CONTINUOUS_VIDEO CONTINUOUS_VIDEO}</li>
+ * <li>{@link #CONTROL_AF_MODE_CONTINUOUS_PICTURE CONTINUOUS_PICTURE}</li>
+ * <li>{@link #CONTROL_AF_MODE_EDOF EDOF}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES android.control.afAvailableModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CameraCharacteristics#CONTROL_AF_AVAILABLE_MODES
+ * @see CaptureResult#CONTROL_AF_STATE
+ * @see CaptureRequest#CONTROL_AF_TRIGGER
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ * @see #CONTROL_AF_MODE_OFF
+ * @see #CONTROL_AF_MODE_AUTO
+ * @see #CONTROL_AF_MODE_MACRO
+ * @see #CONTROL_AF_MODE_CONTINUOUS_VIDEO
+ * @see #CONTROL_AF_MODE_CONTINUOUS_PICTURE
+ * @see #CONTROL_AF_MODE_EDOF
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AF_MODE =
+ new Key<Integer>("android.control.afMode", int.class);
+
+ /**
+ * <p>List of metering areas to use for auto-focus.</p>
+ * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf} is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of focus areas supported by the device is determined by the value
+ * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AF android.control.maxRegionsAf}.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other metering regions, so if only one region
+ * is used, all non-zero weights will have the same effect. A region with 0 weight is
+ * ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Range of valid values:</b><br>
+ * Coordinates must be between <code>[(0,0), (width, height))</code> of
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AF
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AF_REGIONS =
+ new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.afRegions", android.hardware.camera2.params.MeteringRectangle[].class);
+
+ /**
+ * <p>Whether the camera device will trigger autofocus for this request.</p>
+ * <p>This entry is normally set to IDLE, or is not
+ * included at all in the request settings.</p>
+ * <p>When included and set to START, the camera device will trigger the
+ * autofocus algorithm. If autofocus is disabled, this trigger has no effect.</p>
+ * <p>When set to CANCEL, the camera device will cancel any active trigger,
+ * and return to its initial AF state.</p>
+ * <p>Generally, applications should set this entry to START or CANCEL for only a
+ * single capture, and then return it to IDLE (or not set at all). Specifying
+ * START for multiple captures in a row means restarting the AF operation over
+ * and over again.</p>
+ * <p>See {@link CaptureResult#CONTROL_AF_STATE android.control.afState} for what the trigger means for each AF mode.</p>
+ * <p>Using the autofocus trigger and the precapture trigger {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}
+ * simultaneously is allowed. However, since these triggers often require cooperation between
+ * the auto-focus and auto-exposure routines (for example, the may need to be enabled for a
+ * focus sweep), the camera device may delay acting on a later trigger until the previous
+ * trigger has been fully handled. This may lead to longer intervals between the trigger and
+ * changes to {@link CaptureResult#CONTROL_AF_STATE android.control.afState}, for example.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AF_TRIGGER_IDLE IDLE}</li>
+ * <li>{@link #CONTROL_AF_TRIGGER_START START}</li>
+ * <li>{@link #CONTROL_AF_TRIGGER_CANCEL CANCEL}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CaptureResult#CONTROL_AF_STATE
+ * @see #CONTROL_AF_TRIGGER_IDLE
+ * @see #CONTROL_AF_TRIGGER_START
+ * @see #CONTROL_AF_TRIGGER_CANCEL
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AF_TRIGGER =
+ new Key<Integer>("android.control.afTrigger", int.class);
+
+ /**
+ * <p>Current state of auto-focus (AF) algorithm.</p>
+ * <p>Switching between or enabling AF modes ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) always
+ * resets the AF state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
+ * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
+ * the algorithm states to INACTIVE.</p>
+ * <p>The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. For example: INACTIVE may never actually be
+ * seen in a result.</p>
+ * <p>The state in the result is the state for this image (in sync with this image): if
+ * AF state becomes FOCUSED, then the image data associated with this result should
+ * be sharp.</p>
+ * <p>Below are state transition tables for different AF modes.</p>
+ * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_OFF or AF_MODE_EDOF:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center"></td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Never changes</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_AUTO or AF_MODE_MACRO:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">Start AF sweep, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">AF sweep done</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Focused, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">AF sweep done</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Not focused, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Cancel/reset AF, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Cancel/reset AF</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">Start new sweep, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Cancel/reset AF</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">ACTIVE_SCAN</td>
+ * <td align="center">Start new sweep, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">Any state</td>
+ * <td align="center">Mode change</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center"></td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.</p>
+ * <p>For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the
+ * state transitions listed in above table, it is also legal for the camera device to skip
+ * one or more transient states between two results. See below table for examples:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Focus failed after a scan, lens is now locked.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Focus is already good or good after a scan, lens is now locked.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Focus is good after a scan, lens is not locked.</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_CONTINUOUS_VIDEO:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF state query, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Camera device completes current scan</td>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">End AF scan, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Camera device fails current scan</td>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">End AF scan, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Immediate transition, if focus is good. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Immediate transition, if focus is bad. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Reset lens position, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Immediate transition, lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Immediate transition, lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">No effect</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Restart AF scan</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">No effect</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Restart AF scan</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} is AF_MODE_CONTINUOUS_PICTURE:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF state query, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Camera device completes current scan</td>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">End AF scan, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Camera device fails current scan</td>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">End AF scan, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Eventual transition once the focus is good. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Eventual transition if cannot find focus. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Reset lens position, Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">Camera device initiates new scan</td>
+ * <td align="center">PASSIVE_SCAN</td>
+ * <td align="center">Start AF scan, Lens now moving</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_FOCUSED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">Immediate trans. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">PASSIVE_UNFOCUSED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">Immediate trans. Lens now locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">No effect</td>
+ * </tr>
+ * <tr>
+ * <td align="center">FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Restart AF scan</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_TRIGGER</td>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">No effect</td>
+ * </tr>
+ * <tr>
+ * <td align="center">NOT_FOCUSED_LOCKED</td>
+ * <td align="center">AF_CANCEL</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Restart AF scan</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO
+ * (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the
+ * camera device. When a trigger is included in a mode switch request, the trigger
+ * will be evaluated in the context of the new mode in the request.
+ * See below table for examples:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">any state</td>
+ * <td align="center">CAF--&gt;AUTO mode switch</td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Mode switch without trigger, initial state must be INACTIVE</td>
+ * </tr>
+ * <tr>
+ * <td align="center">any state</td>
+ * <td align="center">CAF--&gt;AUTO mode switch with AF_TRIGGER</td>
+ * <td align="center">trigger-reachable states from INACTIVE</td>
+ * <td align="center">Mode switch with trigger, INACTIVE is skipped</td>
+ * </tr>
+ * <tr>
+ * <td align="center">any state</td>
+ * <td align="center">AUTO--&gt;CAF mode switch</td>
+ * <td align="center">passively reachable states from INACTIVE</td>
+ * <td align="center">Mode switch without trigger, passive transient state is skipped</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AF_STATE_INACTIVE INACTIVE}</li>
+ * <li>{@link #CONTROL_AF_STATE_PASSIVE_SCAN PASSIVE_SCAN}</li>
+ * <li>{@link #CONTROL_AF_STATE_PASSIVE_FOCUSED PASSIVE_FOCUSED}</li>
+ * <li>{@link #CONTROL_AF_STATE_ACTIVE_SCAN ACTIVE_SCAN}</li>
+ * <li>{@link #CONTROL_AF_STATE_FOCUSED_LOCKED FOCUSED_LOCKED}</li>
+ * <li>{@link #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED NOT_FOCUSED_LOCKED}</li>
+ * <li>{@link #CONTROL_AF_STATE_PASSIVE_UNFOCUSED PASSIVE_UNFOCUSED}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ * @see #CONTROL_AF_STATE_INACTIVE
+ * @see #CONTROL_AF_STATE_PASSIVE_SCAN
+ * @see #CONTROL_AF_STATE_PASSIVE_FOCUSED
+ * @see #CONTROL_AF_STATE_ACTIVE_SCAN
+ * @see #CONTROL_AF_STATE_FOCUSED_LOCKED
+ * @see #CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
+ * @see #CONTROL_AF_STATE_PASSIVE_UNFOCUSED
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AF_STATE =
+ new Key<Integer>("android.control.afState", int.class);
+
+ /**
+ * <p>Whether auto-white balance (AWB) is currently locked to its
+ * latest calculated values.</p>
+ * <p>When set to <code>true</code> (ON), the AWB algorithm is locked to its latest parameters,
+ * and will not change color balance settings until the lock is set to <code>false</code> (OFF).</p>
+ * <p>Since the camera device has a pipeline of in-flight requests, the settings that
+ * get locked do not necessarily correspond to the settings that were present in the
+ * latest capture result received from the camera device, since additional captures
+ * and AWB updates may have occurred even before the result was sent out. If an
+ * application is switching between automatic and manual control and wishes to eliminate
+ * any flicker during the switch, the following procedure is recommended:</p>
+ * <ol>
+ * <li>Starting in auto-AWB mode:</li>
+ * <li>Lock AWB</li>
+ * <li>Wait for the first result to be output that has the AWB locked</li>
+ * <li>Copy AWB settings from that result into a request, set the request to manual AWB</li>
+ * <li>Submit the capture request, proceed to run manual AWB as desired.</li>
+ * </ol>
+ * <p>Note that AWB lock is only meaningful when
+ * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is in the AUTO mode; in other modes,
+ * AWB is already fixed to a specific setting.</p>
+ * <p>Some LEGACY devices may not support ON; the value is then overridden to OFF.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ */
+ @PublicKey
+ public static final Key<Boolean> CONTROL_AWB_LOCK =
+ new Key<Boolean>("android.control.awbLock", boolean.class);
+
+ /**
+ * <p>Whether auto-white balance (AWB) is currently setting the color
+ * transform fields, and what its illumination target
+ * is.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_MODE android.control.mode} is AUTO.</p>
+ * <p>When set to the ON mode, the camera device's auto-white balance
+ * routine is enabled, overriding the application's selected
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}. Note that when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}
+ * is OFF, the behavior of AWB is device dependent. It is recommened to
+ * also set AWB mode to OFF or lock AWB by using {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} before
+ * setting AE mode to OFF.</p>
+ * <p>When set to the OFF mode, the camera device's auto-white balance
+ * routine is disabled. The application manually controls the white
+ * balance by {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}, {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
+ * and {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode}.</p>
+ * <p>When set to any other modes, the camera device's auto-white
+ * balance routine is disabled. The camera device uses each
+ * particular illumination target for white balance
+ * adjustment. The application's values for
+ * {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform},
+ * {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} and
+ * {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} are ignored.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AWB_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_AWB_MODE_AUTO AUTO}</li>
+ * <li>{@link #CONTROL_AWB_MODE_INCANDESCENT INCANDESCENT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_FLUORESCENT FLUORESCENT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_WARM_FLUORESCENT WARM_FLUORESCENT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_DAYLIGHT DAYLIGHT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT CLOUDY_DAYLIGHT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_TWILIGHT TWILIGHT}</li>
+ * <li>{@link #CONTROL_AWB_MODE_SHADE SHADE}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES android.control.awbAvailableModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CameraCharacteristics#CONTROL_AWB_AVAILABLE_MODES
+ * @see CaptureRequest#CONTROL_AWB_LOCK
+ * @see CaptureRequest#CONTROL_MODE
+ * @see #CONTROL_AWB_MODE_OFF
+ * @see #CONTROL_AWB_MODE_AUTO
+ * @see #CONTROL_AWB_MODE_INCANDESCENT
+ * @see #CONTROL_AWB_MODE_FLUORESCENT
+ * @see #CONTROL_AWB_MODE_WARM_FLUORESCENT
+ * @see #CONTROL_AWB_MODE_DAYLIGHT
+ * @see #CONTROL_AWB_MODE_CLOUDY_DAYLIGHT
+ * @see #CONTROL_AWB_MODE_TWILIGHT
+ * @see #CONTROL_AWB_MODE_SHADE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AWB_MODE =
+ new Key<Integer>("android.control.awbMode", int.class);
+
+ /**
+ * <p>List of metering areas to use for auto-white-balance illuminant
+ * estimation.</p>
+ * <p>Not available if {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb} is 0.
+ * Otherwise will always be present.</p>
+ * <p>The maximum number of regions supported by the device is determined by the value
+ * of {@link CameraCharacteristics#CONTROL_MAX_REGIONS_AWB android.control.maxRegionsAwb}.</p>
+ * <p>The coordinate system is based on the active pixel array,
+ * with (0,0) being the top-left pixel in the active pixel array, and
+ * ({@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.width - 1,
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.height - 1) being the
+ * bottom-right pixel in the active pixel array.</p>
+ * <p>The weight must range from 0 to 1000, and represents a weight
+ * for every pixel in the area. This means that a large metering area
+ * with the same weight as a smaller area will have more effect in
+ * the metering result. Metering areas can partially overlap and the
+ * camera device will add the weights in the overlap region.</p>
+ * <p>The weights are relative to weights of other white balance metering regions, so if
+ * only one region is used, all non-zero weights will have the same effect. A region with
+ * 0 weight is ignored.</p>
+ * <p>If all regions have 0 weight, then no specific metering area needs to be used by the
+ * camera device.</p>
+ * <p>If the metering region is outside the used {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} returned in
+ * capture result metadata, the camera device will ignore the sections outside the crop
+ * region and output only the intersection rectangle as the metering region in the result
+ * metadata. If the region is entirely outside the crop region, it will be ignored and
+ * not reported in the result metadata.</p>
+ * <p><b>Units</b>: Pixel coordinates within {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Range of valid values:</b><br>
+ * Coordinates must be between <code>[(0,0), (width, height))</code> of
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_MAX_REGIONS_AWB
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.MeteringRectangle[]> CONTROL_AWB_REGIONS =
+ new Key<android.hardware.camera2.params.MeteringRectangle[]>("android.control.awbRegions", android.hardware.camera2.params.MeteringRectangle[].class);
+
+ /**
+ * <p>Information to the camera device 3A (auto-exposure,
+ * auto-focus, auto-white balance) routines about the purpose
+ * of this capture, to help the camera device to decide optimal 3A
+ * strategy.</p>
+ * <p>This control (except for MANUAL) is only effective if
+ * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} != OFF</code> and any 3A routine is active.</p>
+ * <p>ZERO_SHUTTER_LAG will be supported if {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}
+ * contains PRIVATE_REPROCESSING or YUV_REPROCESSING. MANUAL will be supported if
+ * {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR. Other intent values are
+ * always supported.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_CUSTOM CUSTOM}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_PREVIEW PREVIEW}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_STILL_CAPTURE STILL_CAPTURE}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_VIDEO_RECORD VIDEO_RECORD}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT VIDEO_SNAPSHOT}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
+ * <li>{@link #CONTROL_CAPTURE_INTENT_MANUAL MANUAL}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * @see #CONTROL_CAPTURE_INTENT_CUSTOM
+ * @see #CONTROL_CAPTURE_INTENT_PREVIEW
+ * @see #CONTROL_CAPTURE_INTENT_STILL_CAPTURE
+ * @see #CONTROL_CAPTURE_INTENT_VIDEO_RECORD
+ * @see #CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT
+ * @see #CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG
+ * @see #CONTROL_CAPTURE_INTENT_MANUAL
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_CAPTURE_INTENT =
+ new Key<Integer>("android.control.captureIntent", int.class);
+
+ /**
+ * <p>Current state of auto-white balance (AWB) algorithm.</p>
+ * <p>Switching between or enabling AWB modes ({@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}) always
+ * resets the AWB state to INACTIVE. Similarly, switching between {@link CaptureRequest#CONTROL_MODE android.control.mode},
+ * or {@link CaptureRequest#CONTROL_SCENE_MODE android.control.sceneMode} if <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code> resets all
+ * the algorithm states to INACTIVE.</p>
+ * <p>The camera device can do several state transitions between two results, if it is
+ * allowed by the state transition table. So INACTIVE may never actually be seen in
+ * a result.</p>
+ * <p>The state in the result is the state for this image (in sync with this image): if
+ * AWB state becomes CONVERGED, then the image data associated with this result should
+ * be good to use.</p>
+ * <p>Below are state transition tables for different AWB modes.</p>
+ * <p>When <code>{@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != AWB_MODE_AUTO</code>:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center"></td>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device auto white balance algorithm is disabled</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>When {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} is AWB_MODE_AUTO:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device initiates AWB scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Camera device finishes AWB scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Good values, not changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Camera device initiates AWB scan</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values changing</td>
+ * </tr>
+ * <tr>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is ON</td>
+ * <td align="center">LOCKED</td>
+ * <td align="center">Values locked</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is OFF</td>
+ * <td align="center">SEARCHING</td>
+ * <td align="center">Values not good after unlock</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p>For the above table, the camera device may skip reporting any state changes that happen
+ * without application intervention (i.e. mode switch, trigger, locking). Any state that
+ * can be skipped in that manner is called a transient state.</p>
+ * <p>For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions
+ * listed in above table, it is also legal for the camera device to skip one or more
+ * transient states between two results. See below table for examples:</p>
+ * <table>
+ * <thead>
+ * <tr>
+ * <th align="center">State</th>
+ * <th align="center">Transition Cause</th>
+ * <th align="center">New State</th>
+ * <th align="center">Notes</th>
+ * </tr>
+ * </thead>
+ * <tbody>
+ * <tr>
+ * <td align="center">INACTIVE</td>
+ * <td align="center">Camera device finished AWB scan</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Values are already good, transient states are skipped by camera device.</td>
+ * </tr>
+ * <tr>
+ * <td align="center">LOCKED</td>
+ * <td align="center">{@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock} is OFF</td>
+ * <td align="center">CONVERGED</td>
+ * <td align="center">Values good after unlock, transient states are skipped by camera device.</td>
+ * </tr>
+ * </tbody>
+ * </table>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_AWB_STATE_INACTIVE INACTIVE}</li>
+ * <li>{@link #CONTROL_AWB_STATE_SEARCHING SEARCHING}</li>
+ * <li>{@link #CONTROL_AWB_STATE_CONVERGED CONVERGED}</li>
+ * <li>{@link #CONTROL_AWB_STATE_LOCKED LOCKED}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AWB_LOCK
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureRequest#CONTROL_SCENE_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #CONTROL_AWB_STATE_INACTIVE
+ * @see #CONTROL_AWB_STATE_SEARCHING
+ * @see #CONTROL_AWB_STATE_CONVERGED
+ * @see #CONTROL_AWB_STATE_LOCKED
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_AWB_STATE =
+ new Key<Integer>("android.control.awbState", int.class);
+
+ /**
+ * <p>A special color effect to apply.</p>
+ * <p>When this mode is set, a color effect will be applied
+ * to images produced by the camera device. The interpretation
+ * and implementation of these color effects is left to the
+ * implementor of the camera device, and should not be
+ * depended on to be consistent (or present) across all
+ * devices.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_EFFECT_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_MONO MONO}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_NEGATIVE NEGATIVE}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_SOLARIZE SOLARIZE}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_SEPIA SEPIA}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_POSTERIZE POSTERIZE}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_WHITEBOARD WHITEBOARD}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_BLACKBOARD BLACKBOARD}</li>
+ * <li>{@link #CONTROL_EFFECT_MODE_AQUA AQUA}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS android.control.availableEffects}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_EFFECTS
+ * @see #CONTROL_EFFECT_MODE_OFF
+ * @see #CONTROL_EFFECT_MODE_MONO
+ * @see #CONTROL_EFFECT_MODE_NEGATIVE
+ * @see #CONTROL_EFFECT_MODE_SOLARIZE
+ * @see #CONTROL_EFFECT_MODE_SEPIA
+ * @see #CONTROL_EFFECT_MODE_POSTERIZE
+ * @see #CONTROL_EFFECT_MODE_WHITEBOARD
+ * @see #CONTROL_EFFECT_MODE_BLACKBOARD
+ * @see #CONTROL_EFFECT_MODE_AQUA
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_EFFECT_MODE =
+ new Key<Integer>("android.control.effectMode", int.class);
+
+ /**
+ * <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
+ * routines.</p>
+ * <p>This is a top-level 3A control switch. When set to OFF, all 3A control
+ * by the camera device is disabled. The application must set the fields for
+ * capture parameters itself.</p>
+ * <p>When set to AUTO, the individual algorithm controls in
+ * android.control.* are in effect, such as {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}.</p>
+ * <p>When set to USE_SCENE_MODE, the individual controls in
+ * android.control.* are mostly disabled, and the camera device implements
+ * one of the scene mode settings (such as ACTION, SUNSET, or PARTY)
+ * as it wishes. The camera device scene mode 3A settings are provided by
+ * {@link android.hardware.camera2.CaptureResult capture results}.</p>
+ * <p>When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference
+ * is that this frame will not be used by camera device background 3A statistics
+ * update, as if this frame is never captured. This mode can be used in the scenario
+ * where the application doesn't want a 3A manual control capture to affect
+ * the subsequent auto 3A capture results.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_MODE_AUTO AUTO}</li>
+ * <li>{@link #CONTROL_MODE_USE_SCENE_MODE USE_SCENE_MODE}</li>
+ * <li>{@link #CONTROL_MODE_OFF_KEEP_STATE OFF_KEEP_STATE}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_MODES android.control.availableModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_MODES
+ * @see #CONTROL_MODE_OFF
+ * @see #CONTROL_MODE_AUTO
+ * @see #CONTROL_MODE_USE_SCENE_MODE
+ * @see #CONTROL_MODE_OFF_KEEP_STATE
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_MODE =
+ new Key<Integer>("android.control.mode", int.class);
+
+ /**
+ * <p>Control for which scene mode is currently active.</p>
+ * <p>Scene modes are custom camera modes optimized for a certain set of conditions and
+ * capture settings.</p>
+ * <p>This is the mode that that is active when
+ * <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} == USE_SCENE_MODE</code>. Aside from FACE_PRIORITY, these modes will
+ * disable {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}
+ * while in use.</p>
+ * <p>The interpretation and implementation of these scene modes is left
+ * to the implementor of the camera device. Their behavior will not be
+ * consistent across all devices, and any given device may only implement
+ * a subset of these modes.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_SCENE_MODE_DISABLED DISABLED}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_FACE_PRIORITY FACE_PRIORITY}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_ACTION ACTION}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_PORTRAIT PORTRAIT}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_LANDSCAPE LANDSCAPE}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_NIGHT NIGHT}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_NIGHT_PORTRAIT NIGHT_PORTRAIT}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_THEATRE THEATRE}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_BEACH BEACH}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_SNOW SNOW}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_SUNSET SUNSET}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_STEADYPHOTO STEADYPHOTO}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_FIREWORKS FIREWORKS}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_SPORTS SPORTS}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_PARTY PARTY}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_CANDLELIGHT CANDLELIGHT}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_BARCODE BARCODE}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO HIGH_SPEED_VIDEO}</li>
+ * <li>{@link #CONTROL_SCENE_MODE_HDR HDR}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AF_MODE
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see #CONTROL_SCENE_MODE_DISABLED
+ * @see #CONTROL_SCENE_MODE_FACE_PRIORITY
+ * @see #CONTROL_SCENE_MODE_ACTION
+ * @see #CONTROL_SCENE_MODE_PORTRAIT
+ * @see #CONTROL_SCENE_MODE_LANDSCAPE
+ * @see #CONTROL_SCENE_MODE_NIGHT
+ * @see #CONTROL_SCENE_MODE_NIGHT_PORTRAIT
+ * @see #CONTROL_SCENE_MODE_THEATRE
+ * @see #CONTROL_SCENE_MODE_BEACH
+ * @see #CONTROL_SCENE_MODE_SNOW
+ * @see #CONTROL_SCENE_MODE_SUNSET
+ * @see #CONTROL_SCENE_MODE_STEADYPHOTO
+ * @see #CONTROL_SCENE_MODE_FIREWORKS
+ * @see #CONTROL_SCENE_MODE_SPORTS
+ * @see #CONTROL_SCENE_MODE_PARTY
+ * @see #CONTROL_SCENE_MODE_CANDLELIGHT
+ * @see #CONTROL_SCENE_MODE_BARCODE
+ * @see #CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO
+ * @see #CONTROL_SCENE_MODE_HDR
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_SCENE_MODE =
+ new Key<Integer>("android.control.sceneMode", int.class);
+
+ /**
+ * <p>Whether video stabilization is
+ * active.</p>
+ * <p>Video stabilization automatically warps images from
+ * the camera in order to stabilize motion between consecutive frames.</p>
+ * <p>If enabled, video stabilization can modify the
+ * {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion} to keep the video stream stabilized.</p>
+ * <p>Switching between different video stabilization modes may take several
+ * frames to initialize, the camera device will report the current mode
+ * in capture result metadata. For example, When "ON" mode is requested,
+ * the video stabilization modes in the first several capture results may
+ * still be "OFF", and it will become "ON" when the initialization is
+ * done.</p>
+ * <p>In addition, not all recording sizes or frame rates may be supported for
+ * stabilization by a device that reports stabilization support. It is guaranteed
+ * that an output targeting a MediaRecorder or MediaCodec will be stabilized if
+ * the recording resolution is less than or equal to 1920 x 1080 (width less than
+ * or equal to 1920, height less than or equal to 1080), and the recording
+ * frame rate is less than or equal to 30fps. At other sizes, the CaptureResult
+ * {@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode} field will return
+ * OFF if the recording output is not stabilized, or if there are no output
+ * Surface types that can be stabilized.</p>
+ * <p>If a camera device supports both this mode and OIS
+ * ({@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode}), turning both modes on may
+ * produce undesirable interaction, so it is recommended not to enable
+ * both at the same time.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #CONTROL_VIDEO_STABILIZATION_MODE_OFF OFF}</li>
+ * <li>{@link #CONTROL_VIDEO_STABILIZATION_MODE_ON ON}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE
+ * @see CaptureRequest#SCALER_CROP_REGION
+ * @see #CONTROL_VIDEO_STABILIZATION_MODE_OFF
+ * @see #CONTROL_VIDEO_STABILIZATION_MODE_ON
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_VIDEO_STABILIZATION_MODE =
+ new Key<Integer>("android.control.videoStabilizationMode", int.class);
+
+ /**
+ * <p>The amount of additional sensitivity boost applied to output images
+ * after RAW sensor data is captured.</p>
+ * <p>Some camera devices support additional digital sensitivity boosting in the
+ * camera processing pipeline after sensor RAW image is captured.
+ * Such a boost will be applied to YUV/JPEG format output images but will not
+ * have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE.</p>
+ * <p>This key will be <code>null</code> for devices that do not support any RAW format
+ * outputs. For devices that do support RAW format outputs, this key will always
+ * present, and if a device does not support post RAW sensitivity boost, it will
+ * list <code>100</code> in this key.</p>
+ * <p>If the camera device cannot apply the exact boost requested, it will reduce the
+ * boost to the nearest supported value.
+ * The final boost value used will be available in the output capture result.</p>
+ * <p>For devices that support post RAW sensitivity boost, the YUV/JPEG output images
+ * of such device will have the total sensitivity of
+ * <code>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} * {@link CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST android.control.postRawSensitivityBoost} / 100</code>
+ * The sensitivity of RAW format images will always be <code>{@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</code></p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><b>Units</b>: ISO arithmetic units, the same as {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE android.control.postRawSensitivityBoostRange}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CaptureRequest#CONTROL_POST_RAW_SENSITIVITY_BOOST
+ * @see CameraCharacteristics#CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Integer> CONTROL_POST_RAW_SENSITIVITY_BOOST =
+ new Key<Integer>("android.control.postRawSensitivityBoost", int.class);
+
+ /**
+ * <p>Allow camera device to enable zero-shutter-lag mode for requests with
+ * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} == STILL_CAPTURE.</p>
+ * <p>If enableZsl is <code>true</code>, the camera device may enable zero-shutter-lag mode for requests with
+ * STILL_CAPTURE capture intent. The camera device may use images captured in the past to
+ * produce output images for a zero-shutter-lag request. The result metadata including the
+ * {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} reflects the source frames used to produce output images.
+ * Therefore, the contents of the output images and the result metadata may be out of order
+ * compared to previous regular requests. enableZsl does not affect requests with other
+ * capture intents.</p>
+ * <p>For example, when requests are submitted in the following order:
+ * Request A: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is PREVIEW
+ * Request B: enableZsl is ON, {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} is STILL_CAPTURE</p>
+ * <p>The output images for request B may have contents captured before the output images for
+ * request A, and the result metadata for request B may be older than the result metadata for
+ * request A.</p>
+ * <p>Note that when enableZsl is <code>true</code>, it is not guaranteed to get output images captured in
+ * the past for requests with STILL_CAPTURE capture intent.</p>
+ * <p>For applications targeting SDK versions O and newer, the value of enableZsl in
+ * TEMPLATE_STILL_CAPTURE template may be <code>true</code>. The value in other templates is always
+ * <code>false</code> if present.</p>
+ * <p>For applications targeting SDK versions older than O, the value of enableZsl in all
+ * capture templates is always <code>false</code> if present.</p>
+ * <p>For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_CAPTURE_INTENT
+ * @see CaptureResult#SENSOR_TIMESTAMP
+ */
+ @PublicKey
+ public static final Key<Boolean> CONTROL_ENABLE_ZSL =
+ new Key<Boolean>("android.control.enableZsl", boolean.class);
+
+ /**
+ * <p>Operation mode for edge
+ * enhancement.</p>
+ * <p>Edge enhancement improves sharpness and details in the captured image. OFF means
+ * no enhancement will be applied by the camera device.</p>
+ * <p>FAST/HIGH_QUALITY both mean camera device determined enhancement
+ * will be applied. HIGH_QUALITY mode indicates that the
+ * camera device will use the highest-quality enhancement algorithms,
+ * even if it slows down capture rate. FAST means the camera device will
+ * not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if
+ * edge enhancement will slow down capture rate. Every output stream will have a similar
+ * amount of enhancement applied.</p>
+ * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ * buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ * into a final capture when triggered by the user. In this mode, the camera device applies
+ * edge enhancement to low-resolution streams (below maximum recording resolution) to
+ * maximize preview quality, but does not apply edge enhancement to high-resolution streams,
+ * since those will be reprocessed later if necessary.</p>
+ * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera
+ * device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively.
+ * The camera device may adjust its internal edge enhancement parameters for best
+ * image quality based on the {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor}, if it is set.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #EDGE_MODE_OFF OFF}</li>
+ * <li>{@link #EDGE_MODE_FAST FAST}</li>
+ * <li>{@link #EDGE_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * <li>{@link #EDGE_MODE_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
+ * @see #EDGE_MODE_OFF
+ * @see #EDGE_MODE_FAST
+ * @see #EDGE_MODE_HIGH_QUALITY
+ * @see #EDGE_MODE_ZERO_SHUTTER_LAG
+ */
+ @PublicKey
+ public static final Key<Integer> EDGE_MODE =
+ new Key<Integer>("android.edge.mode", int.class);
+
+ /**
+ * <p>The desired mode for for the camera device's flash control.</p>
+ * <p>This control is only effective when flash unit is available
+ * (<code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == true</code>).</p>
+ * <p>When this control is used, the {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} must be set to ON or OFF.
+ * Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH,
+ * ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control.</p>
+ * <p>When set to OFF, the camera device will not fire flash for this capture.</p>
+ * <p>When set to SINGLE, the camera device will fire flash regardless of the camera
+ * device's auto-exposure routine's result. When used in still capture case, this
+ * control should be used along with auto-exposure (AE) precapture metering sequence
+ * ({@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}), otherwise, the image may be incorrectly exposed.</p>
+ * <p>When set to TORCH, the flash will be on continuously. This mode can be used
+ * for use cases such as preview, auto-focus assist, still capture, or video recording.</p>
+ * <p>The flash status will be reported by {@link CaptureResult#FLASH_STATE android.flash.state} in the capture result metadata.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #FLASH_MODE_OFF OFF}</li>
+ * <li>{@link #FLASH_MODE_SINGLE SINGLE}</li>
+ * <li>{@link #FLASH_MODE_TORCH TORCH}</li>
+ * </ul></p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER
+ * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
+ * @see CaptureResult#FLASH_STATE
+ * @see #FLASH_MODE_OFF
+ * @see #FLASH_MODE_SINGLE
+ * @see #FLASH_MODE_TORCH
+ */
+ @PublicKey
+ public static final Key<Integer> FLASH_MODE =
+ new Key<Integer>("android.flash.mode", int.class);
+
+ /**
+ * <p>Current state of the flash
+ * unit.</p>
+ * <p>When the camera device doesn't have flash unit
+ * (i.e. <code>{@link CameraCharacteristics#FLASH_INFO_AVAILABLE android.flash.info.available} == false</code>), this state will always be UNAVAILABLE.
+ * Other states indicate the current flash status.</p>
+ * <p>In certain conditions, this will be available on LEGACY devices:</p>
+ * <ul>
+ * <li>Flash-less cameras always return UNAVAILABLE.</li>
+ * <li>Using {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>==</code> ON_ALWAYS_FLASH
+ * will always return FIRED.</li>
+ * <li>Using {@link CaptureRequest#FLASH_MODE android.flash.mode} <code>==</code> TORCH
+ * will always return FIRED.</li>
+ * </ul>
+ * <p>In all other conditions the state will not be available on
+ * LEGACY devices (i.e. it will be <code>null</code>).</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #FLASH_STATE_UNAVAILABLE UNAVAILABLE}</li>
+ * <li>{@link #FLASH_STATE_CHARGING CHARGING}</li>
+ * <li>{@link #FLASH_STATE_READY READY}</li>
+ * <li>{@link #FLASH_STATE_FIRED FIRED}</li>
+ * <li>{@link #FLASH_STATE_PARTIAL PARTIAL}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CameraCharacteristics#FLASH_INFO_AVAILABLE
+ * @see CaptureRequest#FLASH_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #FLASH_STATE_UNAVAILABLE
+ * @see #FLASH_STATE_CHARGING
+ * @see #FLASH_STATE_READY
+ * @see #FLASH_STATE_FIRED
+ * @see #FLASH_STATE_PARTIAL
+ */
+ @PublicKey
+ public static final Key<Integer> FLASH_STATE =
+ new Key<Integer>("android.flash.state", int.class);
+
+ /**
+ * <p>Operational mode for hot pixel correction.</p>
+ * <p>Hotpixel correction interpolates out, or otherwise removes, pixels
+ * that do not accurately measure the incoming light (i.e. pixels that
+ * are stuck at an arbitrary value or are oversensitive).</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #HOT_PIXEL_MODE_OFF OFF}</li>
+ * <li>{@link #HOT_PIXEL_MODE_FAST FAST}</li>
+ * <li>{@link #HOT_PIXEL_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES android.hotPixel.availableHotPixelModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES
+ * @see #HOT_PIXEL_MODE_OFF
+ * @see #HOT_PIXEL_MODE_FAST
+ * @see #HOT_PIXEL_MODE_HIGH_QUALITY
+ */
+ @PublicKey
+ public static final Key<Integer> HOT_PIXEL_MODE =
+ new Key<Integer>("android.hotPixel.mode", int.class);
+
+ /**
+ * <p>A location object to use when generating image GPS metadata.</p>
+ * <p>Setting a location object in a request will include the GPS coordinates of the location
+ * into any JPEG images captured based on the request. These coordinates can then be
+ * viewed by anyone who receives the JPEG image.</p>
+ * <p>This key is available on all devices.</p>
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<android.location.Location> JPEG_GPS_LOCATION =
+ new Key<android.location.Location>("android.jpeg.gpsLocation", android.location.Location.class);
+
+ /**
+ * <p>GPS coordinates to include in output JPEG
+ * EXIF.</p>
+ * <p><b>Range of valid values:</b><br>
+ * (-180 - 180], [-90,90], [-inf, inf]</p>
+ * <p>This key is available on all devices.</p>
+ * @hide
+ */
+ public static final Key<double[]> JPEG_GPS_COORDINATES =
+ new Key<double[]>("android.jpeg.gpsCoordinates", double[].class);
+
+ /**
+ * <p>32 characters describing GPS algorithm to
+ * include in EXIF.</p>
+ * <p><b>Units</b>: UTF-8 null-terminated string</p>
+ * <p>This key is available on all devices.</p>
+ * @hide
+ */
+ public static final Key<String> JPEG_GPS_PROCESSING_METHOD =
+ new Key<String>("android.jpeg.gpsProcessingMethod", String.class);
+
+ /**
+ * <p>Time GPS fix was made to include in
+ * EXIF.</p>
+ * <p><b>Units</b>: UTC in seconds since January 1, 1970</p>
+ * <p>This key is available on all devices.</p>
+ * @hide
+ */
+ public static final Key<Long> JPEG_GPS_TIMESTAMP =
+ new Key<Long>("android.jpeg.gpsTimestamp", long.class);
+
+ /**
+ * <p>The orientation for a JPEG image.</p>
+ * <p>The clockwise rotation angle in degrees, relative to the orientation
+ * to the camera, that the JPEG picture needs to be rotated by, to be viewed
+ * upright.</p>
+ * <p>Camera devices may either encode this value into the JPEG EXIF header, or
+ * rotate the image data to match this orientation. When the image data is rotated,
+ * the thumbnail data will also be rotated.</p>
+ * <p>Note that this orientation is relative to the orientation of the camera sensor, given
+ * by {@link CameraCharacteristics#SENSOR_ORIENTATION android.sensor.orientation}.</p>
+ * <p>To translate from the device orientation given by the Android sensor APIs, the following
+ * sample code may be used:</p>
+ * <pre><code>private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) {
+ * if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0;
+ * int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ *
+ * // Round device orientation to a multiple of 90
+ * deviceOrientation = (deviceOrientation + 45) / 90 * 90;
+ *
+ * // Reverse device orientation for front-facing cameras
+ * boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
+ * if (facingFront) deviceOrientation = -deviceOrientation;
+ *
+ * // Calculate desired JPEG orientation relative to camera orientation to make
+ * // the image upright relative to the device orientation
+ * int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
+ *
+ * return jpegOrientation;
+ * }
+ * </code></pre>
+ * <p><b>Units</b>: Degrees in multiples of 90</p>
+ * <p><b>Range of valid values:</b><br>
+ * 0, 90, 180, 270</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_ORIENTATION
+ */
+ @PublicKey
+ public static final Key<Integer> JPEG_ORIENTATION =
+ new Key<Integer>("android.jpeg.orientation", int.class);
+
+ /**
+ * <p>Compression quality of the final JPEG
+ * image.</p>
+ * <p>85-95 is typical usage range.</p>
+ * <p><b>Range of valid values:</b><br>
+ * 1-100; larger is higher quality</p>
+ * <p>This key is available on all devices.</p>
+ */
+ @PublicKey
+ public static final Key<Byte> JPEG_QUALITY =
+ new Key<Byte>("android.jpeg.quality", byte.class);
+
+ /**
+ * <p>Compression quality of JPEG
+ * thumbnail.</p>
+ * <p><b>Range of valid values:</b><br>
+ * 1-100; larger is higher quality</p>
+ * <p>This key is available on all devices.</p>
+ */
+ @PublicKey
+ public static final Key<Byte> JPEG_THUMBNAIL_QUALITY =
+ new Key<Byte>("android.jpeg.thumbnailQuality", byte.class);
+
+ /**
+ * <p>Resolution of embedded JPEG thumbnail.</p>
+ * <p>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail,
+ * but the captured JPEG will still be a valid image.</p>
+ * <p>For best results, when issuing a request for a JPEG image, the thumbnail size selected
+ * should have the same aspect ratio as the main JPEG output.</p>
+ * <p>If the thumbnail image aspect ratio differs from the JPEG primary image aspect
+ * ratio, the camera device creates the thumbnail by cropping it from the primary image.
+ * For example, if the primary image has 4:3 aspect ratio, the thumbnail image has
+ * 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to
+ * generate the thumbnail image. The thumbnail image will always have a smaller Field
+ * Of View (FOV) than the primary image when aspect ratios differ.</p>
+ * <p>When an {@link CaptureRequest#JPEG_ORIENTATION android.jpeg.orientation} of non-zero degree is requested,
+ * the camera device will handle thumbnail rotation in one of the following ways:</p>
+ * <ul>
+ * <li>Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}
+ * and keep jpeg and thumbnail image data unrotated.</li>
+ * <li>Rotate the jpeg and thumbnail image data and not set
+ * {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this
+ * case, LIMITED or FULL hardware level devices will report rotated thumnail size in
+ * capture result, so the width and height will be interchanged if 90 or 270 degree
+ * orientation is requested. LEGACY device will always report unrotated thumbnail
+ * size.</li>
+ * </ul>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES android.jpeg.availableThumbnailSizes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#JPEG_AVAILABLE_THUMBNAIL_SIZES
+ * @see CaptureRequest#JPEG_ORIENTATION
+ */
+ @PublicKey
+ public static final Key<android.util.Size> JPEG_THUMBNAIL_SIZE =
+ new Key<android.util.Size>("android.jpeg.thumbnailSize", android.util.Size.class);
+
+ /**
+ * <p>The desired lens aperture size, as a ratio of lens focal length to the
+ * effective aperture diameter.</p>
+ * <p>Setting this value is only supported on the camera devices that have a variable
+ * aperture lens.</p>
+ * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is OFF,
+ * this can be set along with {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime},
+ * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
+ * to achieve manual exposure control.</p>
+ * <p>The requested aperture value may take several frames to reach the
+ * requested value; the camera device will report the current (intermediate)
+ * aperture size in capture result metadata while the aperture is changing.
+ * While the aperture is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
+ * <p>When this is supported and {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of
+ * the ON modes, this will be overridden by the camera device
+ * auto-exposure algorithm, the overridden values are then provided
+ * back to the user in the corresponding result.</p>
+ * <p><b>Units</b>: The f-number (f/N)</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES
+ * @see CaptureResult#LENS_STATE
+ * @see CaptureRequest#SENSOR_EXPOSURE_TIME
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Float> LENS_APERTURE =
+ new Key<Float>("android.lens.aperture", float.class);
+
+ /**
+ * <p>The desired setting for the lens neutral density filter(s).</p>
+ * <p>This control will not be supported on most camera devices.</p>
+ * <p>Lens filters are typically used to lower the amount of light the
+ * sensor is exposed to (measured in steps of EV). As used here, an EV
+ * step is the standard logarithmic representation, which are
+ * non-negative, and inversely proportional to the amount of light
+ * hitting the sensor. For example, setting this to 0 would result
+ * in no reduction of the incoming light, and setting this to 2 would
+ * mean that the filter is set to reduce incoming light by two stops
+ * (allowing 1/4 of the prior amount of light to the sensor).</p>
+ * <p>It may take several frames before the lens filter density changes
+ * to the requested value. While the filter density is still changing,
+ * {@link CaptureResult#LENS_STATE android.lens.state} will be set to MOVING.</p>
+ * <p><b>Units</b>: Exposure Value (EV)</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES
+ * @see CaptureResult#LENS_STATE
+ */
+ @PublicKey
+ public static final Key<Float> LENS_FILTER_DENSITY =
+ new Key<Float>("android.lens.filterDensity", float.class);
+
+ /**
+ * <p>The desired lens focal length; used for optical zoom.</p>
+ * <p>This setting controls the physical focal length of the camera
+ * device's lens. Changing the focal length changes the field of
+ * view of the camera device, and is usually used for optical zoom.</p>
+ * <p>Like {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, this
+ * setting won't be applied instantaneously, and it may take several
+ * frames before the lens can change to the requested focal length.
+ * While the focal length is still changing, {@link CaptureResult#LENS_STATE android.lens.state} will
+ * be set to MOVING.</p>
+ * <p>Optical zoom will not be supported on most devices.</p>
+ * <p><b>Units</b>: Millimeters</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS android.lens.info.availableFocalLengths}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#LENS_APERTURE
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS
+ * @see CaptureResult#LENS_STATE
+ */
+ @PublicKey
+ public static final Key<Float> LENS_FOCAL_LENGTH =
+ new Key<Float>("android.lens.focalLength", float.class);
+
+ /**
+ * <p>Desired distance to plane of sharpest focus,
+ * measured from frontmost surface of the lens.</p>
+ * <p>Should be zero for fixed-focus cameras</p>
+ * <p><b>Units</b>: See {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ */
+ @PublicKey
+ public static final Key<Float> LENS_FOCUS_DISTANCE =
+ new Key<Float>("android.lens.focusDistance", float.class);
+
+ /**
+ * <p>The range of scene distances that are in
+ * sharp focus (depth of field).</p>
+ * <p>If variable focus not supported, can still report
+ * fixed depth of field range</p>
+ * <p><b>Units</b>: A pair of focus distances in diopters: (near,
+ * far); see {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration} for details.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;=0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION
+ */
+ @PublicKey
+ public static final Key<android.util.Pair<Float,Float>> LENS_FOCUS_RANGE =
+ new Key<android.util.Pair<Float,Float>>("android.lens.focusRange", new TypeReference<android.util.Pair<Float,Float>>() {{ }});
+
+ /**
+ * <p>Sets whether the camera device uses optical image stabilization (OIS)
+ * when capturing images.</p>
+ * <p>OIS is used to compensate for motion blur due to small
+ * movements of the camera during capture. Unlike digital image
+ * stabilization ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), OIS
+ * makes use of mechanical elements to stabilize the camera
+ * sensor, and thus allows for longer exposure times before
+ * camera shake becomes apparent.</p>
+ * <p>Switching between different optical stabilization modes may take several
+ * frames to initialize, the camera device will report the current mode in
+ * capture result metadata. For example, When "ON" mode is requested, the
+ * optical stabilization modes in the first several capture results may still
+ * be "OFF", and it will become "ON" when the initialization is done.</p>
+ * <p>If a camera device supports both OIS and digital image stabilization
+ * ({@link CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE android.control.videoStabilizationMode}), turning both modes on may produce undesirable
+ * interaction, so it is recommended not to enable both at the same time.</p>
+ * <p>Not all devices will support OIS; see
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization} for
+ * available controls.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #LENS_OPTICAL_STABILIZATION_MODE_OFF OFF}</li>
+ * <li>{@link #LENS_OPTICAL_STABILIZATION_MODE_ON ON}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION android.lens.info.availableOpticalStabilization}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION
+ * @see #LENS_OPTICAL_STABILIZATION_MODE_OFF
+ * @see #LENS_OPTICAL_STABILIZATION_MODE_ON
+ */
+ @PublicKey
+ public static final Key<Integer> LENS_OPTICAL_STABILIZATION_MODE =
+ new Key<Integer>("android.lens.opticalStabilizationMode", int.class);
+
+ /**
+ * <p>Current lens status.</p>
+ * <p>For lens parameters {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance},
+ * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}, when changes are requested,
+ * they may take several frames to reach the requested values. This state indicates
+ * the current status of the lens parameters.</p>
+ * <p>When the state is STATIONARY, the lens parameters are not changing. This could be
+ * either because the parameters are all fixed, or because the lens has had enough
+ * time to reach the most recently-requested values.
+ * If all these lens parameters are not changable for a camera device, as listed below:</p>
+ * <ul>
+ * <li>Fixed focus (<code>{@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} == 0</code>), which means
+ * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} parameter will always be 0.</li>
+ * <li>Fixed focal length ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS android.lens.info.availableFocalLengths} contains single value),
+ * which means the optical zoom is not supported.</li>
+ * <li>No ND filter ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES android.lens.info.availableFilterDensities} contains only 0).</li>
+ * <li>Fixed aperture ({@link CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES android.lens.info.availableApertures} contains single value).</li>
+ * </ul>
+ * <p>Then this state will always be STATIONARY.</p>
+ * <p>When the state is MOVING, it indicates that at least one of the lens parameters
+ * is changing.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #LENS_STATE_STATIONARY STATIONARY}</li>
+ * <li>{@link #LENS_STATE_MOVING MOVING}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#LENS_APERTURE
+ * @see CaptureRequest#LENS_FILTER_DENSITY
+ * @see CaptureRequest#LENS_FOCAL_LENGTH
+ * @see CaptureRequest#LENS_FOCUS_DISTANCE
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_APERTURES
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FILTER_DENSITIES
+ * @see CameraCharacteristics#LENS_INFO_AVAILABLE_FOCAL_LENGTHS
+ * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE
+ * @see #LENS_STATE_STATIONARY
+ * @see #LENS_STATE_MOVING
+ */
+ @PublicKey
+ public static final Key<Integer> LENS_STATE =
+ new Key<Integer>("android.lens.state", int.class);
+
+ /**
+ * <p>The orientation of the camera relative to the sensor
+ * coordinate system.</p>
+ * <p>The four coefficients that describe the quaternion
+ * rotation from the Android sensor coordinate system to a
+ * camera-aligned coordinate system where the X-axis is
+ * aligned with the long side of the image sensor, the Y-axis
+ * is aligned with the short side of the image sensor, and
+ * the Z-axis is aligned with the optical axis of the sensor.</p>
+ * <p>To convert from the quaternion coefficients <code>(x,y,z,w)</code>
+ * to the axis of rotation <code>(a_x, a_y, a_z)</code> and rotation
+ * amount <code>theta</code>, the following formulas can be used:</p>
+ * <pre><code> theta = 2 * acos(w)
+ * a_x = x / sin(theta/2)
+ * a_y = y / sin(theta/2)
+ * a_z = z / sin(theta/2)
+ * </code></pre>
+ * <p>To create a 3x3 rotation matrix that applies the rotation
+ * defined by this quaternion, the following matrix can be
+ * used:</p>
+ * <pre><code>R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw,
+ * 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw,
+ * 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ]
+ * </code></pre>
+ * <p>This matrix can then be used to apply the rotation to a
+ * column vector point with</p>
+ * <p><code>p' = Rp</code></p>
+ * <p>where <code>p</code> is in the device sensor coordinate system, and
+ * <code>p'</code> is in the camera-oriented coordinate system.</p>
+ * <p><b>Units</b>:
+ * Quaternion coefficients</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_POSE_ROTATION =
+ new Key<float[]>("android.lens.poseRotation", float[].class);
+
+ /**
+ * <p>Position of the camera optical center.</p>
+ * <p>The position of the camera device's lens optical center,
+ * as a three-dimensional vector <code>(x,y,z)</code>, relative to the
+ * optical center of the largest camera device facing in the
+ * same direction as this camera, in the {@link android.hardware.SensorEvent Android sensor coordinate
+ * axes}. Note that only the axis definitions are shared with
+ * the sensor coordinate system, but not the origin.</p>
+ * <p>If this device is the largest or only camera device with a
+ * given facing, then this position will be <code>(0, 0, 0)</code>; a
+ * camera device with a lens optical center located 3 cm from
+ * the main sensor along the +X axis (to the right from the
+ * user's perspective) will report <code>(0.03, 0, 0)</code>.</p>
+ * <p>To transform a pixel coordinates between two cameras
+ * facing the same direction, first the source camera
+ * {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion} must be corrected for. Then
+ * the source camera {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} needs
+ * to be applied, followed by the {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}
+ * of the source camera, the translation of the source camera
+ * relative to the destination camera, the
+ * {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} of the destination camera, and
+ * finally the inverse of {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}
+ * of the destination camera. This obtains a
+ * radial-distortion-free coordinate in the destination
+ * camera pixel coordinates.</p>
+ * <p>To compare this against a real image from the destination
+ * camera, the destination camera image then needs to be
+ * corrected for radial distortion before comparison or
+ * sampling.</p>
+ * <p><b>Units</b>: Meters</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
+ * @see CameraCharacteristics#LENS_POSE_ROTATION
+ * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_POSE_TRANSLATION =
+ new Key<float[]>("android.lens.poseTranslation", float[].class);
+
+ /**
+ * <p>The parameters for this camera device's intrinsic
+ * calibration.</p>
+ * <p>The five calibration parameters that describe the
+ * transform from camera-centric 3D coordinates to sensor
+ * pixel coordinates:</p>
+ * <pre><code>[f_x, f_y, c_x, c_y, s]
+ * </code></pre>
+ * <p>Where <code>f_x</code> and <code>f_y</code> are the horizontal and vertical
+ * focal lengths, <code>[c_x, c_y]</code> is the position of the optical
+ * axis, and <code>s</code> is a skew parameter for the sensor plane not
+ * being aligned with the lens plane.</p>
+ * <p>These are typically used within a transformation matrix K:</p>
+ * <pre><code>K = [ f_x, s, c_x,
+ * 0, f_y, c_y,
+ * 0 0, 1 ]
+ * </code></pre>
+ * <p>which can then be combined with the camera pose rotation
+ * <code>R</code> and translation <code>t</code> ({@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} and
+ * {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}, respective) to calculate the
+ * complete transform from world coordinates to pixel
+ * coordinates:</p>
+ * <pre><code>P = [ K 0 * [ R t
+ * 0 1 ] 0 1 ]
+ * </code></pre>
+ * <p>and with <code>p_w</code> being a point in the world coordinate system
+ * and <code>p_s</code> being a point in the camera active pixel array
+ * coordinate system, and with the mapping including the
+ * homogeneous division by z:</p>
+ * <pre><code> p_h = (x_h, y_h, z_h) = P p_w
+ * p_s = p_h / z_h
+ * </code></pre>
+ * <p>so <code>[x_s, y_s]</code> is the pixel coordinates of the world
+ * point, <code>z_s = 1</code>, and <code>w_s</code> is a measurement of disparity
+ * (depth) in pixel coordinates.</p>
+ * <p>Note that the coordinate system for this transform is the
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize} system,
+ * where <code>(0,0)</code> is the top-left of the
+ * preCorrectionActiveArraySize rectangle. Once the pose and
+ * intrinsic calibration transforms have been applied to a
+ * world point, then the {@link CameraCharacteristics#LENS_RADIAL_DISTORTION android.lens.radialDistortion}
+ * transform needs to be applied, and the result adjusted to
+ * be in the {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize} coordinate
+ * system (where <code>(0, 0)</code> is the top-left of the
+ * activeArraySize rectangle), to determine the final pixel
+ * coordinate of the world point for processed (non-RAW)
+ * output buffers.</p>
+ * <p><b>Units</b>:
+ * Pixels in the
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize}
+ * coordinate system.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#LENS_POSE_ROTATION
+ * @see CameraCharacteristics#LENS_POSE_TRANSLATION
+ * @see CameraCharacteristics#LENS_RADIAL_DISTORTION
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_INTRINSIC_CALIBRATION =
+ new Key<float[]>("android.lens.intrinsicCalibration", float[].class);
+
+ /**
+ * <p>The correction coefficients to correct for this camera device's
+ * radial and tangential lens distortion.</p>
+ * <p>Four radial distortion coefficients <code>[kappa_0, kappa_1, kappa_2,
+ * kappa_3]</code> and two tangential distortion coefficients
+ * <code>[kappa_4, kappa_5]</code> that can be used to correct the
+ * lens's geometric distortion with the mapping equations:</p>
+ * <pre><code> x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ * kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 )
+ * y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) +
+ * kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 )
+ * </code></pre>
+ * <p>Here, <code>[x_c, y_c]</code> are the coordinates to sample in the
+ * input image that correspond to the pixel values in the
+ * corrected image at the coordinate <code>[x_i, y_i]</code>:</p>
+ * <pre><code> correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage)
+ * </code></pre>
+ * <p>The pixel coordinates are defined in a normalized
+ * coordinate system related to the
+ * {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration} calibration fields.
+ * Both <code>[x_i, y_i]</code> and <code>[x_c, y_c]</code> have <code>(0,0)</code> at the
+ * lens optical center <code>[c_x, c_y]</code>. The maximum magnitudes
+ * of both x and y coordinates are normalized to be 1 at the
+ * edge further from the optical center, so the range
+ * for both dimensions is <code>-1 &lt;= x &lt;= 1</code>.</p>
+ * <p>Finally, <code>r</code> represents the radial distance from the
+ * optical center, <code>r^2 = x_i^2 + y_i^2</code>, and its magnitude
+ * is therefore no larger than <code>|r| &lt;= sqrt(2)</code>.</p>
+ * <p>The distortion model used is the Brown-Conrady model.</p>
+ * <p><b>Units</b>:
+ * Unitless coefficients.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION
+ */
+ @PublicKey
+ public static final Key<float[]> LENS_RADIAL_DISTORTION =
+ new Key<float[]>("android.lens.radialDistortion", float[].class);
+
+ /**
+ * <p>Mode of operation for the noise reduction algorithm.</p>
+ * <p>The noise reduction algorithm attempts to improve image quality by removing
+ * excessive noise added by the capture process, especially in dark conditions.</p>
+ * <p>OFF means no noise reduction will be applied by the camera device, for both raw and
+ * YUV domain.</p>
+ * <p>MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove
+ * demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF.
+ * This mode is optional, may not be support by all devices. The application should check
+ * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} before using it.</p>
+ * <p>FAST/HIGH_QUALITY both mean camera device determined noise filtering
+ * will be applied. HIGH_QUALITY mode indicates that the camera device
+ * will use the highest-quality noise filtering algorithms,
+ * even if it slows down capture rate. FAST means the camera device will not
+ * slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if
+ * MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate.
+ * Every output stream will have a similar amount of enhancement applied.</p>
+ * <p>ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular
+ * buffer of high-resolution images during preview and reprocess image(s) from that buffer
+ * into a final capture when triggered by the user. In this mode, the camera device applies
+ * noise reduction to low-resolution streams (below maximum recording resolution) to maximize
+ * preview quality, but does not apply noise reduction to high-resolution streams, since
+ * those will be reprocessed later if necessary.</p>
+ * <p>For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device
+ * will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device
+ * may adjust the noise reduction parameters for best image quality based on the
+ * {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor} if it is set.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #NOISE_REDUCTION_MODE_OFF OFF}</li>
+ * <li>{@link #NOISE_REDUCTION_MODE_FAST FAST}</li>
+ * <li>{@link #NOISE_REDUCTION_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * <li>{@link #NOISE_REDUCTION_MODE_MINIMAL MINIMAL}</li>
+ * <li>{@link #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG ZERO_SHUTTER_LAG}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES
+ * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR
+ * @see #NOISE_REDUCTION_MODE_OFF
+ * @see #NOISE_REDUCTION_MODE_FAST
+ * @see #NOISE_REDUCTION_MODE_HIGH_QUALITY
+ * @see #NOISE_REDUCTION_MODE_MINIMAL
+ * @see #NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG
+ */
+ @PublicKey
+ public static final Key<Integer> NOISE_REDUCTION_MODE =
+ new Key<Integer>("android.noiseReduction.mode", int.class);
+
+ /**
+ * <p>Whether a result given to the framework is the
+ * final one for the capture, or only a partial that contains a
+ * subset of the full set of dynamic metadata
+ * values.</p>
+ * <p>The entries in the result metadata buffers for a
+ * single capture may not overlap, except for this entry. The
+ * FINAL buffers must retain FIFO ordering relative to the
+ * requests that generate them, so the FINAL buffer for frame 3 must
+ * always be sent to the framework after the FINAL buffer for frame 2, and
+ * before the FINAL buffer for frame 4. PARTIAL buffers may be returned
+ * in any order relative to other frames, but all PARTIAL buffers for a given
+ * capture must arrive before the FINAL buffer for that capture. This entry may
+ * only be used by the camera device if quirks.usePartialResult is set to 1.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Optional. Default value is FINAL.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<Boolean> QUIRKS_PARTIAL_RESULT =
+ new Key<Boolean>("android.quirks.partialResult", boolean.class);
+
+ /**
+ * <p>A frame counter set by the framework. This value monotonically
+ * increases with every new result (that is, each new result has a unique
+ * frameCount value).</p>
+ * <p>Reset on release()</p>
+ * <p><b>Units</b>: count of frames</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt; 0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<Integer> REQUEST_FRAME_COUNT =
+ new Key<Integer>("android.request.frameCount", int.class);
+
+ /**
+ * <p>An application-specified ID for the current
+ * request. Must be maintained unchanged in output
+ * frame</p>
+ * <p><b>Units</b>: arbitrary integer assigned by application</p>
+ * <p><b>Range of valid values:</b><br>
+ * Any int</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @hide
+ */
+ public static final Key<Integer> REQUEST_ID =
+ new Key<Integer>("android.request.id", int.class);
+
+ /**
+ * <p>Specifies the number of pipeline stages the frame went
+ * through from when it was exposed to when the final completed result
+ * was available to the framework.</p>
+ * <p>Depending on what settings are used in the request, and
+ * what streams are configured, the data may undergo less processing,
+ * and some pipeline stages skipped.</p>
+ * <p>See {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} for more details.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &lt;= {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH
+ */
+ @PublicKey
+ public static final Key<Byte> REQUEST_PIPELINE_DEPTH =
+ new Key<Byte>("android.request.pipelineDepth", byte.class);
+
+ /**
+ * <p>The desired region of the sensor to read out for this capture.</p>
+ * <p>This control can be used to implement digital zoom.</p>
+ * <p>The crop region coordinate system is based off
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with <code>(0, 0)</code> being the
+ * top-left corner of the sensor active array.</p>
+ * <p>Output streams use this rectangle to produce their output,
+ * cropping to a smaller region if necessary to maintain the
+ * stream's aspect ratio, then scaling the sensor input to
+ * match the output's configured resolution.</p>
+ * <p>The crop region is applied after the RAW to other color
+ * space (e.g. YUV) conversion. Since raw streams
+ * (e.g. RAW16) don't have the conversion stage, they are not
+ * croppable. The crop region will be ignored by raw streams.</p>
+ * <p>For non-raw streams, any additional per-stream cropping will
+ * be done to maximize the final pixel area of the stream.</p>
+ * <p>For example, if the crop region is set to a 4:3 aspect
+ * ratio, then 4:3 streams will use the exact crop
+ * region. 16:9 streams will further crop vertically
+ * (letterbox).</p>
+ * <p>Conversely, if the crop region is set to a 16:9, then 4:3
+ * outputs will crop horizontally (pillarbox), and 16:9
+ * streams will match exactly. These additional crops will
+ * be centered within the crop region.</p>
+ * <p>The width and height of the crop region cannot
+ * be set to be smaller than
+ * <code>floor( activeArraySize.width / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )</code> and
+ * <code>floor( activeArraySize.height / {@link CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM android.scaler.availableMaxDigitalZoom} )</code>, respectively.</p>
+ * <p>The camera device may adjust the crop region to account
+ * for rounding and other hardware requirements; the final
+ * crop region used will be included in the output capture
+ * result.</p>
+ * <p><b>Units</b>: Pixel coordinates relative to
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.graphics.Rect> SCALER_CROP_REGION =
+ new Key<android.graphics.Rect>("android.scaler.cropRegion", android.graphics.Rect.class);
+
+ /**
+ * <p>Duration each pixel is exposed to
+ * light.</p>
+ * <p>If the sensor can't expose this exact duration, it will shorten the
+ * duration exposed to the nearest possible value (rather than expose longer).
+ * The final exposure time used will be available in the output capture result.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE
+ */
+ @PublicKey
+ public static final Key<Long> SENSOR_EXPOSURE_TIME =
+ new Key<Long>("android.sensor.exposureTime", long.class);
+
+ /**
+ * <p>Duration from start of frame exposure to
+ * start of next frame exposure.</p>
+ * <p>The maximum frame rate that can be supported by a camera subsystem is
+ * a function of many factors:</p>
+ * <ul>
+ * <li>Requested resolutions of output image streams</li>
+ * <li>Availability of binning / skipping modes on the imager</li>
+ * <li>The bandwidth of the imager interface</li>
+ * <li>The bandwidth of the various ISP processing blocks</li>
+ * </ul>
+ * <p>Since these factors can vary greatly between different ISPs and
+ * sensors, the camera abstraction tries to represent the bandwidth
+ * restrictions with as simple a model as possible.</p>
+ * <p>The model presented has the following characteristics:</p>
+ * <ul>
+ * <li>The image sensor is always configured to output the smallest
+ * resolution possible given the application's requested output stream
+ * sizes. The smallest resolution is defined as being at least as large
+ * as the largest requested output stream size; the camera pipeline must
+ * never digitally upsample sensor data when the crop region covers the
+ * whole sensor. In general, this means that if only small output stream
+ * resolutions are configured, the sensor can provide a higher frame
+ * rate.</li>
+ * <li>Since any request may use any or all the currently configured
+ * output streams, the sensor and ISP must be configured to support
+ * scaling a single capture to all the streams at the same time. This
+ * means the camera pipeline must be ready to produce the largest
+ * requested output size without any delay. Therefore, the overall
+ * frame rate of a given configured stream set is governed only by the
+ * largest requested stream resolution.</li>
+ * <li>Using more than one output stream in a request does not affect the
+ * frame duration.</li>
+ * <li>Certain format-streams may need to do additional background processing
+ * before data is consumed/produced by that stream. These processors
+ * can run concurrently to the rest of the camera pipeline, but
+ * cannot process more than 1 capture at a time.</li>
+ * </ul>
+ * <p>The necessary information for the application, given the model above,
+ * is provided via the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} field using
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.
+ * These are used to determine the maximum frame rate / minimum frame
+ * duration that is possible for a given stream configuration.</p>
+ * <p>Specifically, the application can use the following rules to
+ * determine the minimum frame duration it can request from the camera
+ * device:</p>
+ * <ol>
+ * <li>Let the set of currently configured input/output streams
+ * be called <code>S</code>.</li>
+ * <li>Find the minimum frame durations for each stream in <code>S</code>, by looking
+ * it up in {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} using {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }
+ * (with its respective size/format). Let this set of frame durations be
+ * called <code>F</code>.</li>
+ * <li>For any given request <code>R</code>, the minimum frame duration allowed
+ * for <code>R</code> is the maximum out of all values in <code>F</code>. Let the streams
+ * used in <code>R</code> be called <code>S_r</code>.</li>
+ * </ol>
+ * <p>If none of the streams in <code>S_r</code> have a stall time (listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }
+ * using its respective size/format), then the frame duration in <code>F</code>
+ * determines the steady state frame rate that the application will get
+ * if it uses <code>R</code> as a repeating request. Let this special kind of
+ * request be called <code>Rsimple</code>.</p>
+ * <p>A repeating request <code>Rsimple</code> can be <em>occasionally</em> interleaved
+ * by a single capture of a new request <code>Rstall</code> (which has at least
+ * one in-use stream with a non-0 stall time) and if <code>Rstall</code> has the
+ * same minimum frame duration this will not cause a frame rate loss
+ * if all buffers from the previous <code>Rstall</code> have already been
+ * delivered.</p>
+ * <p>For more details about stalling, see
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * See {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration},
+ * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}. The duration
+ * is capped to <code>max(duration, exposureTime + overhead)</code>.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION
+ */
+ @PublicKey
+ public static final Key<Long> SENSOR_FRAME_DURATION =
+ new Key<Long>("android.sensor.frameDuration", long.class);
+
+ /**
+ * <p>The amount of gain applied to sensor data
+ * before processing.</p>
+ * <p>The sensitivity is the standard ISO sensitivity value,
+ * as defined in ISO 12232:2006.</p>
+ * <p>The sensitivity must be within {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}, and
+ * if if it less than {@link CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY android.sensor.maxAnalogSensitivity}, the camera device
+ * is guaranteed to use only analog amplification for applying the gain.</p>
+ * <p>If the camera device cannot apply the exact sensitivity
+ * requested, it will reduce the gain to the nearest supported
+ * value. The final sensitivity used will be available in the
+ * output capture result.</p>
+ * <p>This control is only effective if {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} or {@link CaptureRequest#CONTROL_MODE android.control.mode} is set to
+ * OFF; otherwise the auto-exposure algorithm will override this value.</p>
+ * <p><b>Units</b>: ISO arithmetic units</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE
+ * @see CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_SENSITIVITY =
+ new Key<Integer>("android.sensor.sensitivity", int.class);
+
+ /**
+ * <p>Time at start of exposure of first
+ * row of the image sensor active array, in nanoseconds.</p>
+ * <p>The timestamps are also included in all image
+ * buffers produced for the same capture, and will be identical
+ * on all the outputs.</p>
+ * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> UNKNOWN,
+ * the timestamps measure time since an unspecified starting point,
+ * and are monotonically increasing. They can be compared with the
+ * timestamps for other captures from the same camera device, but are
+ * not guaranteed to be comparable to any other time source.</p>
+ * <p>When {@link CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE android.sensor.info.timestampSource} <code>==</code> REALTIME, the
+ * timestamps measure time in the same timebase as {@link android.os.SystemClock#elapsedRealtimeNanos }, and they can
+ * be compared to other timestamps from other subsystems that
+ * are using that base.</p>
+ * <p>For reprocessing, the timestamp will match the start of exposure of
+ * the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the
+ * timestamp} in the TotalCaptureResult that was used to create the
+ * reprocess capture request.</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt; 0</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE
+ */
+ @PublicKey
+ public static final Key<Long> SENSOR_TIMESTAMP =
+ new Key<Long>("android.sensor.timestamp", long.class);
+
+ /**
+ * <p>The estimated camera neutral color in the native sensor colorspace at
+ * the time of capture.</p>
+ * <p>This value gives the neutral color point encoded as an RGB value in the
+ * native sensor color space. The neutral color point indicates the
+ * currently estimated white point of the scene illumination. It can be
+ * used to interpolate between the provided color transforms when
+ * processing raw sensor data.</p>
+ * <p>The order of the values is R, G, B; where R is in the lowest index.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ */
+ @PublicKey
+ public static final Key<Rational[]> SENSOR_NEUTRAL_COLOR_POINT =
+ new Key<Rational[]>("android.sensor.neutralColorPoint", Rational[].class);
+
+ /**
+ * <p>Noise model coefficients for each CFA mosaic channel.</p>
+ * <p>This key contains two noise model coefficients for each CFA channel
+ * corresponding to the sensor amplification (S) and sensor readout
+ * noise (O). These are given as pairs of coefficients for each channel
+ * in the same order as channels listed for the CFA layout key
+ * (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}). This is
+ * represented as an array of Pair&lt;Double, Double&gt;, where
+ * the first member of the Pair at index n is the S coefficient and the
+ * second member is the O coefficient for the nth color channel in the CFA.</p>
+ * <p>These coefficients are used in a two parameter noise model to describe
+ * the amount of noise present in the image for each CFA channel. The
+ * noise model used here is:</p>
+ * <p>N(x) = sqrt(Sx + O)</p>
+ * <p>Where x represents the recorded signal of a CFA channel normalized to
+ * the range [0, 1], and S and O are the noise model coeffiecients for
+ * that channel.</p>
+ * <p>A more detailed description of the noise model can be found in the
+ * Adobe DNG specification for the NoiseProfile tag.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
+ @PublicKey
+ public static final Key<android.util.Pair<Double,Double>[]> SENSOR_NOISE_PROFILE =
+ new Key<android.util.Pair<Double,Double>[]>("android.sensor.noiseProfile", new TypeReference<android.util.Pair<Double,Double>[]>() {{ }});
+
+ /**
+ * <p>The worst-case divergence between Bayer green channels.</p>
+ * <p>This value is an estimate of the worst case split between the
+ * Bayer green channels in the red and blue rows in the sensor color
+ * filter array.</p>
+ * <p>The green split is calculated as follows:</p>
+ * <ol>
+ * <li>A 5x5 pixel (or larger) window W within the active sensor array is
+ * chosen. The term 'pixel' here is taken to mean a group of 4 Bayer
+ * mosaic channels (R, Gr, Gb, B). The location and size of the window
+ * chosen is implementation defined, and should be chosen to provide a
+ * green split estimate that is both representative of the entire image
+ * for this camera sensor, and can be calculated quickly.</li>
+ * <li>The arithmetic mean of the green channels from the red
+ * rows (mean_Gr) within W is computed.</li>
+ * <li>The arithmetic mean of the green channels from the blue
+ * rows (mean_Gb) within W is computed.</li>
+ * <li>The maximum ratio R of the two means is computed as follows:
+ * <code>R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))</code></li>
+ * </ol>
+ * <p>The ratio R is the green split divergence reported for this property,
+ * which represents how much the green channels differ in the mosaic
+ * pattern. This value is typically used to determine the treatment of
+ * the green mosaic channels when demosaicing.</p>
+ * <p>The green split value can be roughly interpreted as follows:</p>
+ * <ul>
+ * <li>R &lt; 1.03 is a negligible split (&lt;3% divergence).</li>
+ * <li>1.20 &lt;= R &gt;= 1.03 will require some software
+ * correction to avoid demosaic errors (3-20% divergence).</li>
+ * <li>R &gt; 1.20 will require strong software correction to produce
+ * a usuable image (&gt;20% divergence).</li>
+ * </ul>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>&gt;= 0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ */
+ @PublicKey
+ public static final Key<Float> SENSOR_GREEN_SPLIT =
+ new Key<Float>("android.sensor.greenSplit", float.class);
+
+ /**
+ * <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern
+ * when {@link CaptureRequest#SENSOR_TEST_PATTERN_MODE android.sensor.testPatternMode} is SOLID_COLOR.</p>
+ * <p>Each color channel is treated as an unsigned 32-bit integer.
+ * The camera device then uses the most significant X bits
+ * that correspond to how many bits are in its Bayer raw sensor
+ * output.</p>
+ * <p>For example, a sensor with RAW10 Bayer output would use the
+ * 10 most significant bits from each color channel.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE
+ */
+ @PublicKey
+ public static final Key<int[]> SENSOR_TEST_PATTERN_DATA =
+ new Key<int[]>("android.sensor.testPatternData", int[].class);
+
+ /**
+ * <p>When enabled, the sensor sends a test pattern instead of
+ * doing a real exposure from the camera.</p>
+ * <p>When a test pattern is enabled, all manual sensor controls specified
+ * by android.sensor.* will be ignored. All other controls should
+ * work as normal.</p>
+ * <p>For example, if manual flash is enabled, flash firing should still
+ * occur (and that the test pattern remain unmodified, since the flash
+ * would not actually affect it).</p>
+ * <p>Defaults to OFF.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_OFF OFF}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR SOLID_COLOR}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_COLOR_BARS COLOR_BARS}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY COLOR_BARS_FADE_TO_GRAY}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_PN9 PN9}</li>
+ * <li>{@link #SENSOR_TEST_PATTERN_MODE_CUSTOM1 CUSTOM1}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES android.sensor.availableTestPatternModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES
+ * @see #SENSOR_TEST_PATTERN_MODE_OFF
+ * @see #SENSOR_TEST_PATTERN_MODE_SOLID_COLOR
+ * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS
+ * @see #SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY
+ * @see #SENSOR_TEST_PATTERN_MODE_PN9
+ * @see #SENSOR_TEST_PATTERN_MODE_CUSTOM1
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_TEST_PATTERN_MODE =
+ new Key<Integer>("android.sensor.testPatternMode", int.class);
+
+ /**
+ * <p>Duration between the start of first row exposure
+ * and the start of last row exposure.</p>
+ * <p>This is the exposure time skew between the first and last
+ * row exposure start times. The first row and the last row are
+ * the first and last rows inside of the
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
+ * <p>For typical camera sensors that use rolling shutters, this is also equivalent
+ * to the frame readout time.</p>
+ * <p><b>Units</b>: Nanoseconds</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 0 and &lt;
+ * {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration }.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<Long> SENSOR_ROLLING_SHUTTER_SKEW =
+ new Key<Long>("android.sensor.rollingShutterSkew", long.class);
+
+ /**
+ * <p>A per-frame dynamic black level offset for each of the color filter
+ * arrangement (CFA) mosaic channels.</p>
+ * <p>Camera sensor black levels may vary dramatically for different
+ * capture settings (e.g. {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}). The fixed black
+ * level reported by {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may be too
+ * inaccurate to represent the actual value on a per-frame basis. The
+ * camera device internal pipeline relies on reliable black level values
+ * to process the raw images appropriately. To get the best image
+ * quality, the camera device may choose to estimate the per frame black
+ * level values either based on optically shielded black regions
+ * ({@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}) or its internal model.</p>
+ * <p>This key reports the camera device estimated per-frame zero light
+ * value for each of the CFA mosaic channels in the camera sensor. The
+ * {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may only represent a coarse
+ * approximation of the actual black level values. This value is the
+ * black level used in camera device internal image processing pipeline
+ * and generally more accurate than the fixed black level values.
+ * However, since they are estimated values by the camera device, they
+ * may not be as accurate as the black level values calculated from the
+ * optical black pixels reported by {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}.</p>
+ * <p>The values are given in the same order as channels listed for the CFA
+ * layout key (see {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}), i.e. the
+ * nth value given corresponds to the black level offset for the nth
+ * color channel listed in the CFA.</p>
+ * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is
+ * available or the camera device advertises this key via
+ * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys }.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 0 for each.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
+ * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<float[]> SENSOR_DYNAMIC_BLACK_LEVEL =
+ new Key<float[]>("android.sensor.dynamicBlackLevel", float[].class);
+
+ /**
+ * <p>Maximum raw value output by sensor for this frame.</p>
+ * <p>Since the {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern} may change for different
+ * capture settings (e.g., {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}), the white
+ * level will change accordingly. This key is similar to
+ * {@link CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL android.sensor.info.whiteLevel}, but specifies the camera device
+ * estimated white level for each frame.</p>
+ * <p>This key will be available if {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions} is
+ * available or the camera device advertises this key via
+ * {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys }.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN
+ * @see CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL
+ * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS
+ * @see CaptureRequest#SENSOR_SENSITIVITY
+ */
+ @PublicKey
+ public static final Key<Integer> SENSOR_DYNAMIC_WHITE_LEVEL =
+ new Key<Integer>("android.sensor.dynamicWhiteLevel", int.class);
+
+ /**
+ * <p>Quality of lens shading correction applied
+ * to the image data.</p>
+ * <p>When set to OFF mode, no lens shading correction will be applied by the
+ * camera device, and an identity lens shading map data will be provided
+ * if <code>{@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} == ON</code>. For example, for lens
+ * shading map with size of <code>[ 4, 3 ]</code>,
+ * the output {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap} for this case will be an identity
+ * map shown below:</p>
+ * <pre><code>[ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ]
+ * </code></pre>
+ * <p>When set to other modes, lens shading correction will be applied by the camera
+ * device. Applications can request lens shading map data by setting
+ * {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode} to ON, and then the camera device will provide lens
+ * shading map data in {@link CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP android.statistics.lensShadingCorrectionMap}; the returned shading map
+ * data will be the one applied by the camera device for this capture request.</p>
+ * <p>The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore
+ * the reliability of the map data may be affected by the AE and AWB algorithms. When AE and
+ * AWB are in AUTO modes({@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} <code>!=</code> OFF and {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} <code>!=</code>
+ * OFF), to get best results, it is recommended that the applications wait for the AE and AWB
+ * to be converged before using the returned shading map data.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SHADING_MODE_OFF OFF}</li>
+ * <li>{@link #SHADING_MODE_FAST FAST}</li>
+ * <li>{@link #SHADING_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#SHADING_AVAILABLE_MODES android.shading.availableModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_AWB_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SHADING_AVAILABLE_MODES
+ * @see CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP
+ * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE
+ * @see #SHADING_MODE_OFF
+ * @see #SHADING_MODE_FAST
+ * @see #SHADING_MODE_HIGH_QUALITY
+ */
+ @PublicKey
+ public static final Key<Integer> SHADING_MODE =
+ new Key<Integer>("android.shading.mode", int.class);
+
+ /**
+ * <p>Operating mode for the face detector
+ * unit.</p>
+ * <p>Whether face detection is enabled, and whether it
+ * should output just the basic fields or the full set of
+ * fields.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #STATISTICS_FACE_DETECT_MODE_OFF OFF}</li>
+ * <li>{@link #STATISTICS_FACE_DETECT_MODE_SIMPLE SIMPLE}</li>
+ * <li>{@link #STATISTICS_FACE_DETECT_MODE_FULL FULL}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES android.statistics.info.availableFaceDetectModes}</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES
+ * @see #STATISTICS_FACE_DETECT_MODE_OFF
+ * @see #STATISTICS_FACE_DETECT_MODE_SIMPLE
+ * @see #STATISTICS_FACE_DETECT_MODE_FULL
+ */
+ @PublicKey
+ public static final Key<Integer> STATISTICS_FACE_DETECT_MODE =
+ new Key<Integer>("android.statistics.faceDetectMode", int.class);
+
+ /**
+ * <p>List of unique IDs for detected faces.</p>
+ * <p>Each detected face is given a unique ID that is valid for as long as the face is visible
+ * to the camera device. A face that leaves the field of view and later returns may be
+ * assigned a new ID.</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL
+ * This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ * @hide
+ */
+ public static final Key<int[]> STATISTICS_FACE_IDS =
+ new Key<int[]>("android.statistics.faceIds", int[].class);
+
+ /**
+ * <p>List of landmarks for detected
+ * faces.</p>
+ * <p>The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
+ * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} == FULL
+ * This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ * @hide
+ */
+ public static final Key<int[]> STATISTICS_FACE_LANDMARKS =
+ new Key<int[]>("android.statistics.faceLandmarks", int[].class);
+
+ /**
+ * <p>List of the bounding rectangles for detected
+ * faces.</p>
+ * <p>The coordinate system is that of {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}, with
+ * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF
+ * This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ * @hide
+ */
+ public static final Key<android.graphics.Rect[]> STATISTICS_FACE_RECTANGLES =
+ new Key<android.graphics.Rect[]>("android.statistics.faceRectangles", android.graphics.Rect[].class);
+
+ /**
+ * <p>List of the face confidence scores for
+ * detected faces</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} != OFF.</p>
+ * <p><b>Range of valid values:</b><br>
+ * 1-100</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ * @hide
+ */
+ public static final Key<byte[]> STATISTICS_FACE_SCORES =
+ new Key<byte[]>("android.statistics.faceScores", byte[].class);
+
+ /**
+ * <p>List of the faces detected through camera face detection
+ * in this capture.</p>
+ * <p>Only available if {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} <code>!=</code> OFF.</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<android.hardware.camera2.params.Face[]> STATISTICS_FACES =
+ new Key<android.hardware.camera2.params.Face[]>("android.statistics.faces", android.hardware.camera2.params.Face[].class);
+
+ /**
+ * <p>The shading map is a low-resolution floating-point map
+ * that lists the coefficients used to correct for vignetting, for each
+ * Bayer color channel.</p>
+ * <p>The map provided here is the same map that is used by the camera device to
+ * correct both color shading and vignetting for output non-RAW images.</p>
+ * <p>When there is no lens shading correction applied to RAW
+ * output images ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} <code>==</code>
+ * false), this map is the complete lens shading correction
+ * map; when there is some lens shading correction applied to
+ * the RAW output image ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}<code>==</code> true), this map reports the remaining lens shading
+ * correction map that needs to be applied to get shading
+ * corrected images that match the camera device's output for
+ * non-RAW formats.</p>
+ * <p>For a complete shading correction map, the least shaded
+ * section of the image will have a gain factor of 1; all
+ * other sections will have gains above 1.</p>
+ * <p>When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map
+ * will take into account the colorCorrection settings.</p>
+ * <p>The shading map is for the entire active pixel array, and is not
+ * affected by the crop region specified in the request. Each shading map
+ * entry is the value of the shading compensation map over a specific
+ * pixel on the sensor. Specifically, with a (N x M) resolution shading
+ * map, and an active pixel array size (W x H), shading map entry
+ * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+ * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+ * The map is assumed to be bilinearly interpolated between the sample points.</p>
+ * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
+ * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+ * The shading map is stored in a fully interleaved format.</p>
+ * <p>The shading map will generally have on the order of 30-40 rows and columns,
+ * and will be smaller than 64x64.</p>
+ * <p>As an example, given a very small map defined as:</p>
+ * <pre><code>width,height = [ 4, 3 ]
+ * values =
+ * [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
+ * 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
+ * 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
+ * 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
+ * 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
+ * </code></pre>
+ * <p>The low-resolution scaling map images for each channel are
+ * (displayed using nearest-neighbor interpolation):</p>
+ * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+ * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+ * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+ * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+ * <p>As a visualization only, inverting the full-color map to recover an
+ * image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives:</p>
+ * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+ * <p><b>Range of valid values:</b><br>
+ * Each gain factor is &gt;= 1</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED
+ */
+ @PublicKey
+ public static final Key<android.hardware.camera2.params.LensShadingMap> STATISTICS_LENS_SHADING_CORRECTION_MAP =
+ new Key<android.hardware.camera2.params.LensShadingMap>("android.statistics.lensShadingCorrectionMap", android.hardware.camera2.params.LensShadingMap.class);
+
+ /**
+ * <p>The shading map is a low-resolution floating-point map
+ * that lists the coefficients used to correct for vignetting and color shading,
+ * for each Bayer color channel of RAW image data.</p>
+ * <p>The map provided here is the same map that is used by the camera device to
+ * correct both color shading and vignetting for output non-RAW images.</p>
+ * <p>When there is no lens shading correction applied to RAW
+ * output images ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} <code>==</code>
+ * false), this map is the complete lens shading correction
+ * map; when there is some lens shading correction applied to
+ * the RAW output image ({@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}<code>==</code> true), this map reports the remaining lens shading
+ * correction map that needs to be applied to get shading
+ * corrected images that match the camera device's output for
+ * non-RAW formats.</p>
+ * <p>For a complete shading correction map, the least shaded
+ * section of the image will have a gain factor of 1; all
+ * other sections will have gains above 1.</p>
+ * <p>When {@link CaptureRequest#COLOR_CORRECTION_MODE android.colorCorrection.mode} = TRANSFORM_MATRIX, the map
+ * will take into account the colorCorrection settings.</p>
+ * <p>The shading map is for the entire active pixel array, and is not
+ * affected by the crop region specified in the request. Each shading map
+ * entry is the value of the shading compensation map over a specific
+ * pixel on the sensor. Specifically, with a (N x M) resolution shading
+ * map, and an active pixel array size (W x H), shading map entry
+ * (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at
+ * pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels.
+ * The map is assumed to be bilinearly interpolated between the sample points.</p>
+ * <p>The channel order is [R, Geven, Godd, B], where Geven is the green
+ * channel for the even rows of a Bayer pattern, and Godd is the odd rows.
+ * The shading map is stored in a fully interleaved format, and its size
+ * is provided in the camera static metadata by android.lens.info.shadingMapSize.</p>
+ * <p>The shading map will generally have on the order of 30-40 rows and columns,
+ * and will be smaller than 64x64.</p>
+ * <p>As an example, given a very small map defined as:</p>
+ * <pre><code>android.lens.info.shadingMapSize = [ 4, 3 ]
+ * android.statistics.lensShadingMap =
+ * [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2,
+ * 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3,
+ * 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0,
+ * 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2,
+ * 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2,
+ * 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ]
+ * </code></pre>
+ * <p>The low-resolution scaling map images for each channel are
+ * (displayed using nearest-neighbor interpolation):</p>
+ * <p><img alt="Red lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/red_shading.png" />
+ * <img alt="Green (even rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_e_shading.png" />
+ * <img alt="Green (odd rows) lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/green_o_shading.png" />
+ * <img alt="Blue lens shading map" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/blue_shading.png" /></p>
+ * <p>As a visualization only, inverting the full-color map to recover an
+ * image of a gray wall (using bicubic interpolation for visual quality)
+ * as captured by the sensor gives:</p>
+ * <p><img alt="Image of a uniform white wall (inverse shading map)" src="../../../../images/camera2/metadata/android.statistics.lensShadingMap/inv_shading.png" /></p>
+ * <p>Note that the RAW image data might be subject to lens shading
+ * correction not reported on this map. Query
+ * {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied} to see if RAW image data has subject
+ * to lens shading correction. If {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}
+ * is TRUE, the RAW image data is subject to partial or full lens shading
+ * correction. In the case full lens shading correction is applied to RAW
+ * images, the gain factor map reported in this key will contain all 1.0 gains.
+ * In other words, the map reported in this key is the remaining lens shading
+ * that needs to be applied on the RAW image to get images without lens shading
+ * artifacts. See {@link CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW android.request.maxNumOutputRaw} for a list of RAW image
+ * formats.</p>
+ * <p><b>Range of valid values:</b><br>
+ * Each gain factor is &gt;= 1</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#REQUEST_MAX_NUM_OUTPUT_RAW
+ * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED
+ * @hide
+ */
+ public static final Key<float[]> STATISTICS_LENS_SHADING_MAP =
+ new Key<float[]>("android.statistics.lensShadingMap", float[].class);
+
+ /**
+ * <p>The best-fit color channel gains calculated
+ * by the camera device's statistics units for the current output frame.</p>
+ * <p>This may be different than the gains used for this frame,
+ * since statistics processing on data from a new frame
+ * typically completes after the transform has already been
+ * applied to that frame.</p>
+ * <p>The 4 channel gains are defined in Bayer domain,
+ * see {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} for details.</p>
+ * <p>This value should always be calculated by the auto-white balance (AWB) block,
+ * regardless of the android.control.* current values.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#COLOR_CORRECTION_GAINS
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<float[]> STATISTICS_PREDICTED_COLOR_GAINS =
+ new Key<float[]>("android.statistics.predictedColorGains", float[].class);
+
+ /**
+ * <p>The best-fit color transform matrix estimate
+ * calculated by the camera device's statistics units for the current
+ * output frame.</p>
+ * <p>The camera device will provide the estimate from its
+ * statistics unit on the white balance transforms to use
+ * for the next frame. These are the values the camera device believes
+ * are the best fit for the current output frame. This may
+ * be different than the transform used for this frame, since
+ * statistics processing on data from a new frame typically
+ * completes after the transform has already been applied to
+ * that frame.</p>
+ * <p>These estimates must be provided for all frames, even if
+ * capture settings and color transforms are set by the application.</p>
+ * <p>This value should always be calculated by the auto-white balance (AWB) block,
+ * regardless of the android.control.* current values.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @deprecated
+ * @hide
+ */
+ @Deprecated
+ public static final Key<Rational[]> STATISTICS_PREDICTED_COLOR_TRANSFORM =
+ new Key<Rational[]>("android.statistics.predictedColorTransform", Rational[].class);
+
+ /**
+ * <p>The camera device estimated scene illumination lighting
+ * frequency.</p>
+ * <p>Many light sources, such as most fluorescent lights, flicker at a rate
+ * that depends on the local utility power standards. This flicker must be
+ * accounted for by auto-exposure routines to avoid artifacts in captured images.
+ * The camera device uses this entry to tell the application what the scene
+ * illuminant frequency is.</p>
+ * <p>When manual exposure control is enabled
+ * (<code>{@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} == OFF</code> or <code>{@link CaptureRequest#CONTROL_MODE android.control.mode} ==
+ * OFF</code>), the {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} doesn't perform
+ * antibanding, and the application can ensure it selects
+ * exposure times that do not cause banding issues by looking
+ * into this metadata field. See
+ * {@link CaptureRequest#CONTROL_AE_ANTIBANDING_MODE android.control.aeAntibandingMode} for more details.</p>
+ * <p>Reports NONE if there doesn't appear to be flickering illumination.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #STATISTICS_SCENE_FLICKER_NONE NONE}</li>
+ * <li>{@link #STATISTICS_SCENE_FLICKER_50HZ 50HZ}</li>
+ * <li>{@link #STATISTICS_SCENE_FLICKER_60HZ 60HZ}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE
+ * @see CaptureRequest#CONTROL_AE_MODE
+ * @see CaptureRequest#CONTROL_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see #STATISTICS_SCENE_FLICKER_NONE
+ * @see #STATISTICS_SCENE_FLICKER_50HZ
+ * @see #STATISTICS_SCENE_FLICKER_60HZ
+ */
+ @PublicKey
+ public static final Key<Integer> STATISTICS_SCENE_FLICKER =
+ new Key<Integer>("android.statistics.sceneFlicker", int.class);
+
+ /**
+ * <p>Operating mode for hot pixel map generation.</p>
+ * <p>If set to <code>true</code>, a hot pixel map is returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.
+ * If set to <code>false</code>, no hot pixel map will be returned.</p>
+ * <p><b>Range of valid values:</b><br>
+ * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES android.statistics.info.availableHotPixelMapModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP
+ * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES
+ */
+ @PublicKey
+ public static final Key<Boolean> STATISTICS_HOT_PIXEL_MAP_MODE =
+ new Key<Boolean>("android.statistics.hotPixelMapMode", boolean.class);
+
+ /**
+ * <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
+ * <p>A coordinate <code>(x, y)</code> must lie between <code>(0, 0)</code>, and
+ * <code>(width - 1, height - 1)</code> (inclusive), which are the top-left and
+ * bottom-right of the pixel array, respectively. The width and
+ * height dimensions are given in {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.
+ * This may include hot pixels that lie outside of the active array
+ * bounds given by {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE android.sensor.info.activeArraySize}.</p>
+ * <p><b>Range of valid values:</b><br></p>
+ * <p>n &lt;= number of pixels on the sensor.
+ * The <code>(x, y)</code> coordinates must be bounded by
+ * {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE
+ * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE
+ */
+ @PublicKey
+ public static final Key<android.graphics.Point[]> STATISTICS_HOT_PIXEL_MAP =
+ new Key<android.graphics.Point[]>("android.statistics.hotPixelMap", android.graphics.Point[].class);
+
+ /**
+ * <p>Whether the camera device will output the lens
+ * shading map in output result metadata.</p>
+ * <p>When set to ON,
+ * android.statistics.lensShadingMap will be provided in
+ * the output result metadata.</p>
+ * <p>ON is always supported on devices with the RAW capability.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #STATISTICS_LENS_SHADING_MAP_MODE_OFF OFF}</li>
+ * <li>{@link #STATISTICS_LENS_SHADING_MAP_MODE_ON ON}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES android.statistics.info.availableLensShadingMapModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES
+ * @see #STATISTICS_LENS_SHADING_MAP_MODE_OFF
+ * @see #STATISTICS_LENS_SHADING_MAP_MODE_ON
+ */
+ @PublicKey
+ public static final Key<Integer> STATISTICS_LENS_SHADING_MAP_MODE =
+ new Key<Integer>("android.statistics.lensShadingMapMode", int.class);
+
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the blue
+ * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * CONTRAST_CURVE.</p>
+ * <p>See android.tonemap.curveRed for more details.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#TONEMAP_MODE
+ * @hide
+ */
+ public static final Key<float[]> TONEMAP_CURVE_BLUE =
+ new Key<float[]>("android.tonemap.curveBlue", float[].class);
+
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the green
+ * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * CONTRAST_CURVE.</p>
+ * <p>See android.tonemap.curveRed for more details.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#TONEMAP_MODE
+ * @hide
+ */
+ public static final Key<float[]> TONEMAP_CURVE_GREEN =
+ new Key<float[]>("android.tonemap.curveGreen", float[].class);
+
+ /**
+ * <p>Tonemapping / contrast / gamma curve for the red
+ * channel, to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * CONTRAST_CURVE.</p>
+ * <p>Each channel's curve is defined by an array of control points:</p>
+ * <pre><code>android.tonemap.curveRed =
+ * [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ]
+ * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
+ * <p>These are sorted in order of increasing <code>Pin</code>; it is
+ * required that input values 0.0 and 1.0 are included in the list to
+ * define a complete mapping. For input values between control points,
+ * the camera device must linearly interpolate between the control
+ * points.</p>
+ * <p>Each curve can have an independent number of points, and the number
+ * of points can be less than max (that is, the request doesn't have to
+ * always provide a curve with number of points equivalent to
+ * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
+ * <p>A few examples, and their corresponding graphical mappings; these
+ * only specify the red channel and the precision is limited to 4
+ * digits, for conciseness.</p>
+ * <p>Linear mapping:</p>
+ * <pre><code>android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ]
+ * </code></pre>
+ * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p>Invert mapping:</p>
+ * <pre><code>android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ]
+ * </code></pre>
+ * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
+ * <pre><code>android.tonemap.curveRed = [
+ * 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812,
+ * 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072,
+ * 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685,
+ * 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ]
+ * </code></pre>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
+ * <pre><code>android.tonemap.curveRed = [
+ * 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845,
+ * 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130,
+ * 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721,
+ * 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ]
+ * </code></pre>
+ * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><b>Range of valid values:</b><br>
+ * 0-1 on both input and output coordinates, normalized
+ * as a floating-point value such that 0 == black and 1 == white.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
+ * @see CaptureRequest#TONEMAP_MODE
+ * @hide
+ */
+ public static final Key<float[]> TONEMAP_CURVE_RED =
+ new Key<float[]>("android.tonemap.curveRed", float[].class);
+
+ /**
+ * <p>Tonemapping / contrast / gamma curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}
+ * is CONTRAST_CURVE.</p>
+ * <p>The tonemapCurve consist of three curves for each of red, green, and blue
+ * channels respectively. The following example uses the red channel as an
+ * example. The same logic applies to green and blue channel.
+ * Each channel's curve is defined by an array of control points:</p>
+ * <pre><code>curveRed =
+ * [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ]
+ * 2 &lt;= N &lt;= {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}</code></pre>
+ * <p>These are sorted in order of increasing <code>Pin</code>; it is always
+ * guaranteed that input values 0.0 and 1.0 are included in the list to
+ * define a complete mapping. For input values between control points,
+ * the camera device must linearly interpolate between the control
+ * points.</p>
+ * <p>Each curve can have an independent number of points, and the number
+ * of points can be less than max (that is, the request doesn't have to
+ * always provide a curve with number of points equivalent to
+ * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}).</p>
+ * <p>A few examples, and their corresponding graphical mappings; these
+ * only specify the red channel and the precision is limited to 4
+ * digits, for conciseness.</p>
+ * <p>Linear mapping:</p>
+ * <pre><code>curveRed = [ (0, 0), (1.0, 1.0) ]
+ * </code></pre>
+ * <p><img alt="Linear mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/linear_tonemap.png" /></p>
+ * <p>Invert mapping:</p>
+ * <pre><code>curveRed = [ (0, 1.0), (1.0, 0) ]
+ * </code></pre>
+ * <p><img alt="Inverting mapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/inverse_tonemap.png" /></p>
+ * <p>Gamma 1/2.2 mapping, with 16 control points:</p>
+ * <pre><code>curveRed = [
+ * (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812),
+ * (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072),
+ * (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685),
+ * (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ]
+ * </code></pre>
+ * <p><img alt="Gamma = 1/2.2 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/gamma_tonemap.png" /></p>
+ * <p>Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points:</p>
+ * <pre><code>curveRed = [
+ * (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845),
+ * (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130),
+ * (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721),
+ * (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ]
+ * </code></pre>
+ * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ @PublicKey
+ @SyntheticKey
+ public static final Key<android.hardware.camera2.params.TonemapCurve> TONEMAP_CURVE =
+ new Key<android.hardware.camera2.params.TonemapCurve>("android.tonemap.curve", android.hardware.camera2.params.TonemapCurve.class);
+
+ /**
+ * <p>High-level global contrast/gamma/tonemapping control.</p>
+ * <p>When switching to an application-defined contrast curve by setting
+ * {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} to CONTRAST_CURVE, the curve is defined
+ * per-channel with a set of <code>(in, out)</code> points that specify the
+ * mapping from input high-bit-depth pixel value to the output
+ * low-bit-depth value. Since the actual pixel ranges of both input
+ * and output may change depending on the camera pipeline, the values
+ * are specified by normalized floating-point numbers.</p>
+ * <p>More-complex color mapping operations such as 3D color look-up
+ * tables, selective chroma enhancement, or other non-linear color
+ * transforms will be disabled when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * CONTRAST_CURVE.</p>
+ * <p>When using either FAST or HIGH_QUALITY, the camera device will
+ * emit its own tonemap curve in {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.
+ * These values are always available, and as close as possible to the
+ * actually used nonlinear/nonglobal transforms.</p>
+ * <p>If a request is sent with CONTRAST_CURVE with the camera device's
+ * provided curve in FAST or HIGH_QUALITY, the image's tonemap will be
+ * roughly the same.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #TONEMAP_MODE_CONTRAST_CURVE CONTRAST_CURVE}</li>
+ * <li>{@link #TONEMAP_MODE_FAST FAST}</li>
+ * <li>{@link #TONEMAP_MODE_HIGH_QUALITY HIGH_QUALITY}</li>
+ * <li>{@link #TONEMAP_MODE_GAMMA_VALUE GAMMA_VALUE}</li>
+ * <li>{@link #TONEMAP_MODE_PRESET_CURVE PRESET_CURVE}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * {@link CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES android.tonemap.availableToneMapModes}</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CameraCharacteristics#TONEMAP_AVAILABLE_TONE_MAP_MODES
+ * @see CaptureRequest#TONEMAP_CURVE
+ * @see CaptureRequest#TONEMAP_MODE
+ * @see #TONEMAP_MODE_CONTRAST_CURVE
+ * @see #TONEMAP_MODE_FAST
+ * @see #TONEMAP_MODE_HIGH_QUALITY
+ * @see #TONEMAP_MODE_GAMMA_VALUE
+ * @see #TONEMAP_MODE_PRESET_CURVE
+ */
+ @PublicKey
+ public static final Key<Integer> TONEMAP_MODE =
+ new Key<Integer>("android.tonemap.mode", int.class);
+
+ /**
+ * <p>Tonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * GAMMA_VALUE</p>
+ * <p>The tonemap curve will be defined the following formula:
+ * * OUT = pow(IN, 1.0 / gamma)
+ * where IN and OUT is the input pixel value scaled to range [0.0, 1.0],
+ * pow is the power function and gamma is the gamma value specified by this
+ * key.</p>
+ * <p>The same curve will be applied to all color channels. The camera device
+ * may clip the input gamma value to its supported range. The actual applied
+ * value will be returned in capture result.</p>
+ * <p>The valid range of gamma value varies on different devices, but values
+ * within [1.0, 5.0] are guaranteed not to be clipped.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#TONEMAP_MODE
+ */
+ @PublicKey
+ public static final Key<Float> TONEMAP_GAMMA =
+ new Key<Float>("android.tonemap.gamma", float.class);
+
+ /**
+ * <p>Tonemapping curve to use when {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode} is
+ * PRESET_CURVE</p>
+ * <p>The tonemap curve will be defined by specified standard.</p>
+ * <p>sRGB (approximated by 16 control points):</p>
+ * <p><img alt="sRGB tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/srgb_tonemap.png" /></p>
+ * <p>Rec. 709 (approximated by 16 control points):</p>
+ * <p><img alt="Rec. 709 tonemapping curve" src="../../../../images/camera2/metadata/android.tonemap.curveRed/rec709_tonemap.png" /></p>
+ * <p>Note that above figures show a 16 control points approximation of preset
+ * curves. Camera devices may apply a different approximation to the curve.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #TONEMAP_PRESET_CURVE_SRGB SRGB}</li>
+ * <li>{@link #TONEMAP_PRESET_CURVE_REC709 REC709}</li>
+ * </ul></p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ *
+ * @see CaptureRequest#TONEMAP_MODE
+ * @see #TONEMAP_PRESET_CURVE_SRGB
+ * @see #TONEMAP_PRESET_CURVE_REC709
+ */
+ @PublicKey
+ public static final Key<Integer> TONEMAP_PRESET_CURVE =
+ new Key<Integer>("android.tonemap.presetCurve", int.class);
+
+ /**
+ * <p>This LED is nominally used to indicate to the user
+ * that the camera is powered on and may be streaming images back to the
+ * Application Processor. In certain rare circumstances, the OS may
+ * disable this when video is processed locally and not transmitted to
+ * any untrusted applications.</p>
+ * <p>In particular, the LED <em>must</em> always be on when the data could be
+ * transmitted off the device. The LED <em>should</em> always be on whenever
+ * data is stored locally on the device.</p>
+ * <p>The LED <em>may</em> be off if a trusted application is using the data that
+ * doesn't violate the above rules.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * @hide
+ */
+ public static final Key<Boolean> LED_TRANSMIT =
+ new Key<Boolean>("android.led.transmit", boolean.class);
+
+ /**
+ * <p>Whether black-level compensation is locked
+ * to its current values, or is free to vary.</p>
+ * <p>Whether the black level offset was locked for this frame. Should be
+ * ON if {@link CaptureRequest#BLACK_LEVEL_LOCK android.blackLevel.lock} was ON in the capture request, unless
+ * a change in other capture settings forced the camera device to
+ * perform a black level reset.</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Full capability</b> -
+ * Present on all camera devices that report being {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_FULL HARDWARE_LEVEL_FULL} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#BLACK_LEVEL_LOCK
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ */
+ @PublicKey
+ public static final Key<Boolean> BLACK_LEVEL_LOCK =
+ new Key<Boolean>("android.blackLevel.lock", boolean.class);
+
+ /**
+ * <p>The frame number corresponding to the last request
+ * with which the output result (metadata + buffers) has been fully
+ * synchronized.</p>
+ * <p>When a request is submitted to the camera device, there is usually a
+ * delay of several frames before the controls get applied. A camera
+ * device may either choose to account for this delay by implementing a
+ * pipeline and carefully submit well-timed atomic control updates, or
+ * it may start streaming control changes that span over several frame
+ * boundaries.</p>
+ * <p>In the latter case, whenever a request's settings change relative to
+ * the previous submitted request, the full set of changes may take
+ * multiple frame durations to fully take effect. Some settings may
+ * take effect sooner (in less frame durations) than others.</p>
+ * <p>While a set of control changes are being propagated, this value
+ * will be CONVERGING.</p>
+ * <p>Once it is fully known that a set of control changes have been
+ * finished propagating, and the resulting updated control settings
+ * have been read back by the camera device, this value will be set
+ * to a non-negative frame number (corresponding to the request to
+ * which the results have synchronized to).</p>
+ * <p>Older camera device implementations may not have a way to detect
+ * when all camera controls have been applied, and will always set this
+ * value to UNKNOWN.</p>
+ * <p>FULL capability devices will always have this value set to the
+ * frame number of the request corresponding to this result.</p>
+ * <p><em>Further details</em>:</p>
+ * <ul>
+ * <li>Whenever a request differs from the last request, any future
+ * results not yet returned may have this value set to CONVERGING (this
+ * could include any in-progress captures not yet returned by the camera
+ * device, for more details see pipeline considerations below).</li>
+ * <li>Submitting a series of multiple requests that differ from the
+ * previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3)
+ * moves the new synchronization frame to the last non-repeating
+ * request (using the smallest frame number from the contiguous list of
+ * repeating requests).</li>
+ * <li>Submitting the same request repeatedly will not change this value
+ * to CONVERGING, if it was already a non-negative value.</li>
+ * <li>When this value changes to non-negative, that means that all of the
+ * metadata controls from the request have been applied, all of the
+ * metadata controls from the camera device have been read to the
+ * updated values (into the result), and all of the graphics buffers
+ * corresponding to this result are also synchronized to the request.</li>
+ * </ul>
+ * <p><em>Pipeline considerations</em>:</p>
+ * <p>Submitting a request with updated controls relative to the previously
+ * submitted requests may also invalidate the synchronization state
+ * of all the results corresponding to currently in-flight requests.</p>
+ * <p>In other words, results for this current request and up to
+ * {@link CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH android.request.pipelineMaxDepth} prior requests may have their
+ * android.sync.frameNumber change to CONVERGING.</p>
+ * <p><b>Possible values:</b>
+ * <ul>
+ * <li>{@link #SYNC_FRAME_NUMBER_CONVERGING CONVERGING}</li>
+ * <li>{@link #SYNC_FRAME_NUMBER_UNKNOWN UNKNOWN}</li>
+ * </ul></p>
+ * <p><b>Available values for this device:</b><br>
+ * Either a non-negative value corresponding to a
+ * <code>frame_number</code>, or one of the two enums (CONVERGING / UNKNOWN).</p>
+ * <p>This key is available on all devices.</p>
+ *
+ * @see CameraCharacteristics#REQUEST_PIPELINE_MAX_DEPTH
+ * @see #SYNC_FRAME_NUMBER_CONVERGING
+ * @see #SYNC_FRAME_NUMBER_UNKNOWN
+ * @hide
+ */
+ public static final Key<Long> SYNC_FRAME_NUMBER =
+ new Key<Long>("android.sync.frameNumber", long.class);
+
+ /**
+ * <p>The amount of exposure time increase factor applied to the original output
+ * frame by the application processing before sending for reprocessing.</p>
+ * <p>This is optional, and will be supported if the camera device supports YUV_REPROCESSING
+ * capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains YUV_REPROCESSING).</p>
+ * <p>For some YUV reprocessing use cases, the application may choose to filter the original
+ * output frames to effectively reduce the noise to the same level as a frame that was
+ * captured with longer exposure time. To be more specific, assuming the original captured
+ * images were captured with a sensitivity of S and an exposure time of T, the model in
+ * the camera device is that the amount of noise in the image would be approximately what
+ * would be expected if the original capture parameters had been a sensitivity of
+ * S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather
+ * than S and T respectively. If the captured images were processed by the application
+ * before being sent for reprocessing, then the application may have used image processing
+ * algorithms and/or multi-frame image fusion to reduce the noise in the
+ * application-processed images (input images). By using the effectiveExposureFactor
+ * control, the application can communicate to the camera device the actual noise level
+ * improvement in the application-processed image. With this information, the camera
+ * device can select appropriate noise reduction and edge enhancement parameters to avoid
+ * excessive noise reduction ({@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}) and insufficient edge
+ * enhancement ({@link CaptureRequest#EDGE_MODE android.edge.mode}) being applied to the reprocessed frames.</p>
+ * <p>For example, for multi-frame image fusion use case, the application may fuse
+ * multiple output frames together to a final frame for reprocessing. When N image are
+ * fused into 1 image for reprocessing, the exposure time increase factor could be up to
+ * square root of N (based on a simple photon shot noise model). The camera device will
+ * adjust the reprocessing noise reduction and edge enhancement parameters accordingly to
+ * produce the best quality images.</p>
+ * <p>This is relative factor, 1.0 indicates the application hasn't processed the input
+ * buffer in a way that affects its effective exposure time.</p>
+ * <p>This control is only effective for YUV reprocessing capture request. For noise
+ * reduction reprocessing, it is only effective when <code>{@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode} != OFF</code>.
+ * Similarly, for edge enhancement reprocessing, it is only effective when
+ * <code>{@link CaptureRequest#EDGE_MODE android.edge.mode} != OFF</code>.</p>
+ * <p><b>Units</b>: Relative exposure time increase factor.</p>
+ * <p><b>Range of valid values:</b><br>
+ * &gt;= 1.0</p>
+ * <p><b>Optional</b> - This value may be {@code null} on some devices.</p>
+ * <p><b>Limited capability</b> -
+ * Present on all camera devices that report being at least {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED HARDWARE_LEVEL_LIMITED} devices in the
+ * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel} key</p>
+ *
+ * @see CaptureRequest#EDGE_MODE
+ * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL
+ * @see CaptureRequest#NOISE_REDUCTION_MODE
+ * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ */
+ @PublicKey
+ public static final Key<Float> REPROCESS_EFFECTIVE_EXPOSURE_FACTOR =
+ new Key<Float>("android.reprocess.effectiveExposureFactor", float.class);
+
+ /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
+ * End generated code
+ *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/
+
+
+
+}
diff --git a/android/hardware/camera2/DngCreator.java b/android/hardware/camera2/DngCreator.java
new file mode 100644
index 00000000..1a51acd6
--- /dev/null
+++ b/android/hardware/camera2/DngCreator.java
@@ -0,0 +1,678 @@
+/*
+ * Copyright 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.IntRange;
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.graphics.Bitmap;
+import android.graphics.Color;
+import android.graphics.ImageFormat;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.location.Location;
+import android.media.ExifInterface;
+import android.media.Image;
+import android.os.SystemClock;
+import android.util.Log;
+import android.util.Size;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.nio.ByteBuffer;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.TimeZone;
+
+/**
+ * The {@link DngCreator} class provides functions to write raw pixel data as a DNG file.
+ *
+ * <p>
+ * This class is designed to be used with the {@link android.graphics.ImageFormat#RAW_SENSOR}
+ * buffers available from {@link android.hardware.camera2.CameraDevice}, or with Bayer-type raw
+ * pixel data that is otherwise generated by an application. The DNG metadata tags will be
+ * generated from a {@link android.hardware.camera2.CaptureResult} object or set directly.
+ * </p>
+ *
+ * <p>
+ * The DNG file format is a cross-platform file format that is used to store pixel data from
+ * camera sensors with minimal pre-processing applied. DNG files allow for pixel data to be
+ * defined in a user-defined colorspace, and have associated metadata that allow for this
+ * pixel data to be converted to the standard CIE XYZ colorspace during post-processing.
+ * </p>
+ *
+ * <p>
+ * For more information on the DNG file format and associated metadata, please refer to the
+ * <a href=
+ * "https://wwwimages2.adobe.com/content/dam/Adobe/en/products/photoshop/pdfs/dng_spec_1.4.0.0.pdf">
+ * Adobe DNG 1.4.0.0 specification</a>.
+ * </p>
+ */
+public final class DngCreator implements AutoCloseable {
+
+ private static final String TAG = "DngCreator";
+ /**
+ * Create a new DNG object.
+ *
+ * <p>
+ * It is not necessary to call any set methods to write a well-formatted DNG file.
+ * </p>
+ * <p>
+ * DNG metadata tags will be generated from the corresponding parameters in the
+ * {@link android.hardware.camera2.CaptureResult} object.
+ * </p>
+ * <p>
+ * For best quality DNG files, it is strongly recommended that lens shading map output is
+ * enabled if supported. See {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE}.
+ * </p>
+ * @param characteristics an object containing the static
+ * {@link android.hardware.camera2.CameraCharacteristics}.
+ * @param metadata a metadata object to generate tags from.
+ */
+ public DngCreator(@NonNull CameraCharacteristics characteristics,
+ @NonNull CaptureResult metadata) {
+ if (characteristics == null || metadata == null) {
+ throw new IllegalArgumentException("Null argument to DngCreator constructor");
+ }
+
+ // Find current time in milliseconds since 1970
+ long currentTime = System.currentTimeMillis();
+ // Assume that sensor timestamp has that timebase to start
+ long timeOffset = 0;
+
+ int timestampSource = characteristics.get(
+ CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE);
+
+ if (timestampSource == CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME) {
+ // This means the same timebase as SystemClock.elapsedRealtime(),
+ // which is CLOCK_BOOTTIME
+ timeOffset = currentTime - SystemClock.elapsedRealtime();
+ } else if (timestampSource == CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN) {
+ // This means the same timebase as System.currentTimeMillis(),
+ // which is CLOCK_MONOTONIC
+ timeOffset = currentTime - SystemClock.uptimeMillis();
+ } else {
+ // Unexpected time source - treat as CLOCK_MONOTONIC
+ Log.w(TAG, "Sensor timestamp source is unexpected: " + timestampSource);
+ timeOffset = currentTime - SystemClock.uptimeMillis();
+ }
+
+ // Find capture time (nanos since boot)
+ Long timestamp = metadata.get(CaptureResult.SENSOR_TIMESTAMP);
+ long captureTime = currentTime;
+ if (timestamp != null) {
+ captureTime = timestamp / 1000000 + timeOffset;
+ }
+
+ // Create this fresh each time since the time zone may change while a long-running application
+ // is active.
+ final DateFormat dateTimeStampFormat =
+ new SimpleDateFormat(TIFF_DATETIME_FORMAT);
+ dateTimeStampFormat.setTimeZone(TimeZone.getDefault());
+
+ // Format for metadata
+ String formattedCaptureTime = dateTimeStampFormat.format(captureTime);
+
+ nativeInit(characteristics.getNativeCopy(), metadata.getNativeCopy(),
+ formattedCaptureTime);
+ }
+
+ /**
+ * Set the orientation value to write.
+ *
+ * <p>
+ * This will be written as the TIFF "Orientation" tag {@code (0x0112)}.
+ * Calling this will override any prior settings for this tag.
+ * </p>
+ *
+ * @param orientation the orientation value to set, one of:
+ * <ul>
+ * <li>{@link android.media.ExifInterface#ORIENTATION_NORMAL}</li>
+ * <li>{@link android.media.ExifInterface#ORIENTATION_FLIP_HORIZONTAL}</li>
+ * <li>{@link android.media.ExifInterface#ORIENTATION_ROTATE_180}</li>
+ * <li>{@link android.media.ExifInterface#ORIENTATION_FLIP_VERTICAL}</li>
+ * <li>{@link android.media.ExifInterface#ORIENTATION_TRANSPOSE}</li>
+ * <li>{@link android.media.ExifInterface#ORIENTATION_ROTATE_90}</li>
+ * <li>{@link android.media.ExifInterface#ORIENTATION_TRANSVERSE}</li>
+ * <li>{@link android.media.ExifInterface#ORIENTATION_ROTATE_270}</li>
+ * </ul>
+ * @return this {@link #DngCreator} object.
+ */
+ @NonNull
+ public DngCreator setOrientation(int orientation) {
+ if (orientation < ExifInterface.ORIENTATION_UNDEFINED ||
+ orientation > ExifInterface.ORIENTATION_ROTATE_270) {
+ throw new IllegalArgumentException("Orientation " + orientation +
+ " is not a valid EXIF orientation value");
+ }
+ // ExifInterface and TIFF/EP spec differ on definition of
+ // "Unknown" orientation; other values map directly
+ if (orientation == ExifInterface.ORIENTATION_UNDEFINED) {
+ orientation = TAG_ORIENTATION_UNKNOWN;
+ }
+ nativeSetOrientation(orientation);
+ return this;
+ }
+
+ /**
+ * Set the thumbnail image.
+ *
+ * <p>
+ * Pixel data will be converted to a Baseline TIFF RGB image, with 8 bits per color channel.
+ * The alpha channel will be discarded. Thumbnail images with a dimension larger than
+ * {@link #MAX_THUMBNAIL_DIMENSION} will be rejected.
+ * </p>
+ *
+ * @param pixels a {@link android.graphics.Bitmap} of pixel data.
+ * @return this {@link #DngCreator} object.
+ * @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension
+ * larger than {@link #MAX_THUMBNAIL_DIMENSION}.
+ */
+ @NonNull
+ public DngCreator setThumbnail(@NonNull Bitmap pixels) {
+ if (pixels == null) {
+ throw new IllegalArgumentException("Null argument to setThumbnail");
+ }
+
+ int width = pixels.getWidth();
+ int height = pixels.getHeight();
+
+ if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
+ throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
+ "," + height + ") too large, dimensions must be smaller than " +
+ MAX_THUMBNAIL_DIMENSION);
+ }
+
+ ByteBuffer rgbBuffer = convertToRGB(pixels);
+ nativeSetThumbnail(rgbBuffer, width, height);
+
+ return this;
+ }
+
+ /**
+ * Set the thumbnail image.
+ *
+ * <p>
+ * Pixel data is interpreted as a {@link android.graphics.ImageFormat#YUV_420_888} image.
+ * Thumbnail images with a dimension larger than {@link #MAX_THUMBNAIL_DIMENSION} will be
+ * rejected.
+ * </p>
+ *
+ * @param pixels an {@link android.media.Image} object with the format
+ * {@link android.graphics.ImageFormat#YUV_420_888}.
+ * @return this {@link #DngCreator} object.
+ * @throws java.lang.IllegalArgumentException if the given thumbnail image has a dimension
+ * larger than {@link #MAX_THUMBNAIL_DIMENSION}.
+ */
+ @NonNull
+ public DngCreator setThumbnail(@NonNull Image pixels) {
+ if (pixels == null) {
+ throw new IllegalArgumentException("Null argument to setThumbnail");
+ }
+
+ int format = pixels.getFormat();
+ if (format != ImageFormat.YUV_420_888) {
+ throw new IllegalArgumentException("Unsupported Image format " + format);
+ }
+
+ int width = pixels.getWidth();
+ int height = pixels.getHeight();
+
+ if (width > MAX_THUMBNAIL_DIMENSION || height > MAX_THUMBNAIL_DIMENSION) {
+ throw new IllegalArgumentException("Thumbnail dimensions width,height (" + width +
+ "," + height + ") too large, dimensions must be smaller than " +
+ MAX_THUMBNAIL_DIMENSION);
+ }
+
+ ByteBuffer rgbBuffer = convertToRGB(pixels);
+ nativeSetThumbnail(rgbBuffer, width, height);
+
+ return this;
+ }
+
+ /**
+ * Set image location metadata.
+ *
+ * <p>
+ * The given location object must contain at least a valid time, latitude, and longitude
+ * (equivalent to the values returned by {@link android.location.Location#getTime()},
+ * {@link android.location.Location#getLatitude()}, and
+ * {@link android.location.Location#getLongitude()} methods).
+ * </p>
+ *
+ * @param location an {@link android.location.Location} object to set.
+ * @return this {@link #DngCreator} object.
+ *
+ * @throws java.lang.IllegalArgumentException if the given location object doesn't
+ * contain enough information to set location metadata.
+ */
+ @NonNull
+ public DngCreator setLocation(@NonNull Location location) {
+ if (location == null) {
+ throw new IllegalArgumentException("Null location passed to setLocation");
+ }
+ double latitude = location.getLatitude();
+ double longitude = location.getLongitude();
+ long time = location.getTime();
+
+ int[] latTag = toExifLatLong(latitude);
+ int[] longTag = toExifLatLong(longitude);
+ String latRef = latitude >= 0 ? GPS_LAT_REF_NORTH : GPS_LAT_REF_SOUTH;
+ String longRef = longitude >= 0 ? GPS_LONG_REF_EAST : GPS_LONG_REF_WEST;
+
+ String dateTag = sExifGPSDateStamp.format(time);
+ mGPSTimeStampCalendar.setTimeInMillis(time);
+ int[] timeTag = new int[] { mGPSTimeStampCalendar.get(Calendar.HOUR_OF_DAY), 1,
+ mGPSTimeStampCalendar.get(Calendar.MINUTE), 1,
+ mGPSTimeStampCalendar.get(Calendar.SECOND), 1 };
+ nativeSetGpsTags(latTag, latRef, longTag, longRef, dateTag, timeTag);
+ return this;
+ }
+
+ /**
+ * Set the user description string to write.
+ *
+ * <p>
+ * This is equivalent to setting the TIFF "ImageDescription" tag {@code (0x010E)}.
+ * </p>
+ *
+ * @param description the user description string.
+ * @return this {@link #DngCreator} object.
+ */
+ @NonNull
+ public DngCreator setDescription(@NonNull String description) {
+ if (description == null) {
+ throw new IllegalArgumentException("Null description passed to setDescription.");
+ }
+ nativeSetDescription(description);
+ return this;
+ }
+
+ /**
+ * Write the {@link android.graphics.ImageFormat#RAW_SENSOR} pixel data to a DNG file with
+ * the currently configured metadata.
+ *
+ * <p>
+ * Raw pixel data must have 16 bits per pixel, and the input must contain at least
+ * {@code offset + 2 * width * height)} bytes. The width and height of
+ * the input are taken from the width and height set in the {@link DngCreator} metadata tags,
+ * and will typically be equal to the width and height of
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE}. Prior to
+ * API level 23, this was always the same as
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE}.
+ * The pixel layout in the input is determined from the reported color filter arrangement (CFA)
+ * set in {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}. If insufficient
+ * metadata is available to write a well-formatted DNG file, an
+ * {@link java.lang.IllegalStateException} will be thrown.
+ * </p>
+ *
+ * @param dngOutput an {@link java.io.OutputStream} to write the DNG file to.
+ * @param size the {@link Size} of the image to write, in pixels.
+ * @param pixels an {@link java.io.InputStream} of pixel data to write.
+ * @param offset the offset of the raw image in bytes. This indicates how many bytes will
+ * be skipped in the input before any pixel data is read.
+ *
+ * @throws IOException if an error was encountered in the input or output stream.
+ * @throws java.lang.IllegalStateException if not enough metadata information has been
+ * set to write a well-formatted DNG file.
+ * @throws java.lang.IllegalArgumentException if the size passed in does not match the
+ */
+ public void writeInputStream(@NonNull OutputStream dngOutput, @NonNull Size size,
+ @NonNull InputStream pixels, @IntRange(from=0) long offset) throws IOException {
+ if (dngOutput == null) {
+ throw new IllegalArgumentException("Null dngOutput passed to writeInputStream");
+ } else if (size == null) {
+ throw new IllegalArgumentException("Null size passed to writeInputStream");
+ } else if (pixels == null) {
+ throw new IllegalArgumentException("Null pixels passed to writeInputStream");
+ } else if (offset < 0) {
+ throw new IllegalArgumentException("Negative offset passed to writeInputStream");
+ }
+
+ int width = size.getWidth();
+ int height = size.getHeight();
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Size with invalid width, height: (" + width + "," +
+ height + ") passed to writeInputStream");
+ }
+ nativeWriteInputStream(dngOutput, pixels, width, height, offset);
+ }
+
+ /**
+ * Write the {@link android.graphics.ImageFormat#RAW_SENSOR} pixel data to a DNG file with
+ * the currently configured metadata.
+ *
+ * <p>
+ * Raw pixel data must have 16 bits per pixel, and the input must contain at least
+ * {@code offset + 2 * width * height)} bytes. The width and height of
+ * the input are taken from the width and height set in the {@link DngCreator} metadata tags,
+ * and will typically be equal to the width and height of
+ * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE}. Prior to
+ * API level 23, this was always the same as
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE}.
+ * The pixel layout in the input is determined from the reported color filter arrangement (CFA)
+ * set in {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}. If insufficient
+ * metadata is available to write a well-formatted DNG file, an
+ * {@link java.lang.IllegalStateException} will be thrown.
+ * </p>
+ *
+ * <p>
+ * Any mark or limit set on this {@link ByteBuffer} is ignored, and will be cleared by this
+ * method.
+ * </p>
+ *
+ * @param dngOutput an {@link java.io.OutputStream} to write the DNG file to.
+ * @param size the {@link Size} of the image to write, in pixels.
+ * @param pixels an {@link java.nio.ByteBuffer} of pixel data to write.
+ * @param offset the offset of the raw image in bytes. This indicates how many bytes will
+ * be skipped in the input before any pixel data is read.
+ *
+ * @throws IOException if an error was encountered in the input or output stream.
+ * @throws java.lang.IllegalStateException if not enough metadata information has been
+ * set to write a well-formatted DNG file.
+ */
+ public void writeByteBuffer(@NonNull OutputStream dngOutput, @NonNull Size size,
+ @NonNull ByteBuffer pixels, @IntRange(from=0) long offset)
+ throws IOException {
+ if (dngOutput == null) {
+ throw new IllegalArgumentException("Null dngOutput passed to writeByteBuffer");
+ } else if (size == null) {
+ throw new IllegalArgumentException("Null size passed to writeByteBuffer");
+ } else if (pixels == null) {
+ throw new IllegalArgumentException("Null pixels passed to writeByteBuffer");
+ } else if (offset < 0) {
+ throw new IllegalArgumentException("Negative offset passed to writeByteBuffer");
+ }
+
+ int width = size.getWidth();
+ int height = size.getHeight();
+
+ writeByteBuffer(width, height, pixels, dngOutput, DEFAULT_PIXEL_STRIDE,
+ width * DEFAULT_PIXEL_STRIDE, offset);
+ }
+
+ /**
+ * Write the pixel data to a DNG file with the currently configured metadata.
+ *
+ * <p>
+ * For this method to succeed, the {@link android.media.Image} input must contain
+ * {@link android.graphics.ImageFormat#RAW_SENSOR} pixel data, otherwise an
+ * {@link java.lang.IllegalArgumentException} will be thrown.
+ * </p>
+ *
+ * @param dngOutput an {@link java.io.OutputStream} to write the DNG file to.
+ * @param pixels an {@link android.media.Image} to write.
+ *
+ * @throws java.io.IOException if an error was encountered in the output stream.
+ * @throws java.lang.IllegalArgumentException if an image with an unsupported format was used.
+ * @throws java.lang.IllegalStateException if not enough metadata information has been
+ * set to write a well-formatted DNG file.
+ */
+ public void writeImage(@NonNull OutputStream dngOutput, @NonNull Image pixels)
+ throws IOException {
+ if (dngOutput == null) {
+ throw new IllegalArgumentException("Null dngOutput to writeImage");
+ } else if (pixels == null) {
+ throw new IllegalArgumentException("Null pixels to writeImage");
+ }
+
+ int format = pixels.getFormat();
+ if (format != ImageFormat.RAW_SENSOR) {
+ throw new IllegalArgumentException("Unsupported image format " + format);
+ }
+
+ Image.Plane[] planes = pixels.getPlanes();
+ if (planes == null || planes.length <= 0) {
+ throw new IllegalArgumentException("Image with no planes passed to writeImage");
+ }
+
+ ByteBuffer buf = planes[0].getBuffer();
+ writeByteBuffer(pixels.getWidth(), pixels.getHeight(), buf, dngOutput,
+ planes[0].getPixelStride(), planes[0].getRowStride(), 0);
+ }
+
+ @Override
+ public void close() {
+ nativeDestroy();
+ }
+
+ /**
+ * Max width or height dimension for thumbnails.
+ */
+ public static final int MAX_THUMBNAIL_DIMENSION = 256; // max pixel dimension for TIFF/EP
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private static final String GPS_LAT_REF_NORTH = "N";
+ private static final String GPS_LAT_REF_SOUTH = "S";
+ private static final String GPS_LONG_REF_EAST = "E";
+ private static final String GPS_LONG_REF_WEST = "W";
+
+ private static final String GPS_DATE_FORMAT_STR = "yyyy:MM:dd";
+ private static final String TIFF_DATETIME_FORMAT = "yyyy:MM:dd HH:mm:ss";
+ private static final DateFormat sExifGPSDateStamp = new SimpleDateFormat(GPS_DATE_FORMAT_STR);
+ private final Calendar mGPSTimeStampCalendar = Calendar
+ .getInstance(TimeZone.getTimeZone("UTC"));
+
+ static {
+ sExifGPSDateStamp.setTimeZone(TimeZone.getTimeZone("UTC"));
+ }
+
+ private static final int DEFAULT_PIXEL_STRIDE = 2; // bytes per sample
+ private static final int BYTES_PER_RGB_PIX = 3; // byts per pixel
+
+ // TIFF tag values needed to map between public API and TIFF spec
+ private static final int TAG_ORIENTATION_UNKNOWN = 9;
+
+ /**
+ * Offset, rowStride, and pixelStride are given in bytes. Height and width are given in pixels.
+ */
+ private void writeByteBuffer(int width, int height, ByteBuffer pixels, OutputStream dngOutput,
+ int pixelStride, int rowStride, long offset) throws IOException {
+ if (width <= 0 || height <= 0) {
+ throw new IllegalArgumentException("Image with invalid width, height: (" + width + "," +
+ height + ") passed to write");
+ }
+ long capacity = pixels.capacity();
+ long totalSize = ((long) rowStride) * height + offset;
+ if (capacity < totalSize) {
+ throw new IllegalArgumentException("Image size " + capacity +
+ " is too small (must be larger than " + totalSize + ")");
+ }
+ int minRowStride = pixelStride * width;
+ if (minRowStride > rowStride) {
+ throw new IllegalArgumentException("Invalid image pixel stride, row byte width " +
+ minRowStride + " is too large, expecting " + rowStride);
+ }
+ pixels.clear(); // Reset mark and limit
+ nativeWriteImage(dngOutput, width, height, pixels, rowStride, pixelStride, offset,
+ pixels.isDirect());
+ pixels.clear();
+ }
+
+ /**
+ * Convert a single YUV pixel to RGB.
+ */
+ private static void yuvToRgb(byte[] yuvData, int outOffset, /*out*/byte[] rgbOut) {
+ final int COLOR_MAX = 255;
+
+ float y = yuvData[0] & 0xFF; // Y channel
+ float cb = yuvData[1] & 0xFF; // U channel
+ float cr = yuvData[2] & 0xFF; // V channel
+
+ // convert YUV -> RGB (from JFIF's "Conversion to and from RGB" section)
+ float r = y + 1.402f * (cr - 128);
+ float g = y - 0.34414f * (cb - 128) - 0.71414f * (cr - 128);
+ float b = y + 1.772f * (cb - 128);
+
+ // clamp to [0,255]
+ rgbOut[outOffset] = (byte) Math.max(0, Math.min(COLOR_MAX, r));
+ rgbOut[outOffset + 1] = (byte) Math.max(0, Math.min(COLOR_MAX, g));
+ rgbOut[outOffset + 2] = (byte) Math.max(0, Math.min(COLOR_MAX, b));
+ }
+
+ /**
+ * Convert a single {@link Color} pixel to RGB.
+ */
+ private static void colorToRgb(int color, int outOffset, /*out*/byte[] rgbOut) {
+ rgbOut[outOffset] = (byte) Color.red(color);
+ rgbOut[outOffset + 1] = (byte) Color.green(color);
+ rgbOut[outOffset + 2] = (byte) Color.blue(color);
+ // Discards Alpha
+ }
+
+ /**
+ * Generate a direct RGB {@link ByteBuffer} from a YUV420_888 {@link Image}.
+ */
+ private static ByteBuffer convertToRGB(Image yuvImage) {
+ // TODO: Optimize this with renderscript intrinsic.
+ int width = yuvImage.getWidth();
+ int height = yuvImage.getHeight();
+ ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);
+
+ Image.Plane yPlane = yuvImage.getPlanes()[0];
+ Image.Plane uPlane = yuvImage.getPlanes()[1];
+ Image.Plane vPlane = yuvImage.getPlanes()[2];
+
+ ByteBuffer yBuf = yPlane.getBuffer();
+ ByteBuffer uBuf = uPlane.getBuffer();
+ ByteBuffer vBuf = vPlane.getBuffer();
+
+ yBuf.rewind();
+ uBuf.rewind();
+ vBuf.rewind();
+
+ int yRowStride = yPlane.getRowStride();
+ int vRowStride = vPlane.getRowStride();
+ int uRowStride = uPlane.getRowStride();
+
+ int yPixStride = yPlane.getPixelStride();
+ int vPixStride = vPlane.getPixelStride();
+ int uPixStride = uPlane.getPixelStride();
+
+ byte[] yuvPixel = { 0, 0, 0 };
+ byte[] yFullRow = new byte[yPixStride * (width - 1) + 1];
+ byte[] uFullRow = new byte[uPixStride * (width / 2 - 1) + 1];
+ byte[] vFullRow = new byte[vPixStride * (width / 2 - 1) + 1];
+ byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
+ for (int i = 0; i < height; i++) {
+ int halfH = i / 2;
+ yBuf.position(yRowStride * i);
+ yBuf.get(yFullRow);
+ uBuf.position(uRowStride * halfH);
+ uBuf.get(uFullRow);
+ vBuf.position(vRowStride * halfH);
+ vBuf.get(vFullRow);
+ for (int j = 0; j < width; j++) {
+ int halfW = j / 2;
+ yuvPixel[0] = yFullRow[yPixStride * j];
+ yuvPixel[1] = uFullRow[uPixStride * halfW];
+ yuvPixel[2] = vFullRow[vPixStride * halfW];
+ yuvToRgb(yuvPixel, j * BYTES_PER_RGB_PIX, /*out*/finalRow);
+ }
+ buf.put(finalRow);
+ }
+
+ yBuf.rewind();
+ uBuf.rewind();
+ vBuf.rewind();
+ buf.rewind();
+ return buf;
+ }
+
+ /**
+ * Generate a direct RGB {@link ByteBuffer} from a {@link Bitmap}.
+ */
+ private static ByteBuffer convertToRGB(Bitmap argbBitmap) {
+ // TODO: Optimize this.
+ int width = argbBitmap.getWidth();
+ int height = argbBitmap.getHeight();
+ ByteBuffer buf = ByteBuffer.allocateDirect(BYTES_PER_RGB_PIX * width * height);
+
+ int[] pixelRow = new int[width];
+ byte[] finalRow = new byte[BYTES_PER_RGB_PIX * width];
+ for (int i = 0; i < height; i++) {
+ argbBitmap.getPixels(pixelRow, /*offset*/0, /*stride*/width, /*x*/0, /*y*/i,
+ /*width*/width, /*height*/1);
+ for (int j = 0; j < width; j++) {
+ colorToRgb(pixelRow[j], j * BYTES_PER_RGB_PIX, /*out*/finalRow);
+ }
+ buf.put(finalRow);
+ }
+
+ buf.rewind();
+ return buf;
+ }
+
+ /**
+ * Convert coordinate to EXIF GPS tag format.
+ */
+ private static int[] toExifLatLong(double value) {
+ // convert to the format dd/1 mm/1 ssss/100
+ value = Math.abs(value);
+ int degrees = (int) value;
+ value = (value - degrees) * 60;
+ int minutes = (int) value;
+ value = (value - minutes) * 6000;
+ int seconds = (int) value;
+ return new int[] { degrees, 1, minutes, 1, seconds, 100 };
+ }
+
+ /**
+ * This field is used by native code, do not access or modify.
+ */
+ private long mNativeContext;
+
+ private static native void nativeClassInit();
+
+ private synchronized native void nativeInit(CameraMetadataNative nativeCharacteristics,
+ CameraMetadataNative nativeResult,
+ String captureTime);
+
+ private synchronized native void nativeDestroy();
+
+ private synchronized native void nativeSetOrientation(int orientation);
+
+ private synchronized native void nativeSetDescription(String description);
+
+ private synchronized native void nativeSetGpsTags(int[] latTag, String latRef, int[] longTag,
+ String longRef, String dateTag,
+ int[] timeTag);
+
+ private synchronized native void nativeSetThumbnail(ByteBuffer buffer, int width, int height);
+
+ private synchronized native void nativeWriteImage(OutputStream out, int width, int height,
+ ByteBuffer rawBuffer, int rowStride,
+ int pixStride, long offset, boolean isDirect)
+ throws IOException;
+
+ private synchronized native void nativeWriteInputStream(OutputStream out, InputStream rawStream,
+ int width, int height, long offset)
+ throws IOException;
+
+ static {
+ nativeClassInit();
+ }
+}
diff --git a/android/hardware/camera2/TotalCaptureResult.java b/android/hardware/camera2/TotalCaptureResult.java
new file mode 100644
index 00000000..657745c8
--- /dev/null
+++ b/android/hardware/camera2/TotalCaptureResult.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2;
+
+import android.annotation.NonNull;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.impl.CaptureResultExtras;
+
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * <p>The total assembled results of a single image capture from the image sensor.</p>
+ *
+ * <p>Contains the final configuration for the capture hardware (sensor, lens,
+ * flash), the processing pipeline, the control algorithms, and the output
+ * buffers.</p>
+ *
+ * <p>A {@code TotalCaptureResult} is produced by a {@link CameraDevice} after processing a
+ * {@link CaptureRequest}. All properties listed for capture requests can also
+ * be queried on the capture result, to determine the final values used for
+ * capture. The result also includes additional metadata about the state of the
+ * camera device during the capture.</p>
+ *
+ * <p>All properties returned by {@link CameraCharacteristics#getAvailableCaptureResultKeys()}
+ * are available (that is {@link CaptureResult#get} will return non-{@code null}, if and only if
+ * that key that was enabled by the request. A few keys such as
+ * {@link CaptureResult#STATISTICS_FACES} are disabled by default unless enabled with a switch (such
+ * as {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE}). Refer to each key documentation on
+ * a case-by-case basis.</p>
+ *
+ * <p>{@link TotalCaptureResult} objects are immutable.</p>
+ *
+ * @see CameraDevice.CaptureCallback#onCaptureCompleted
+ */
+public final class TotalCaptureResult extends CaptureResult {
+
+ private final List<CaptureResult> mPartialResults;
+ private final int mSessionId;
+
+ /**
+ * Takes ownership of the passed-in camera metadata and the partial results
+ *
+ * @param partials a list of partial results; {@code null} will be substituted for an empty list
+ * @hide
+ */
+ public TotalCaptureResult(CameraMetadataNative results, CaptureRequest parent,
+ CaptureResultExtras extras, List<CaptureResult> partials, int sessionId) {
+ super(results, parent, extras);
+
+ if (partials == null) {
+ mPartialResults = new ArrayList<>();
+ } else {
+ mPartialResults = partials;
+ }
+
+ mSessionId = sessionId;
+ }
+
+ /**
+ * Creates a request-less result.
+ *
+ * <p><strong>For testing only.</strong></p>
+ * @hide
+ */
+ public TotalCaptureResult(CameraMetadataNative results, int sequenceId) {
+ super(results, sequenceId);
+
+ mPartialResults = new ArrayList<>();
+ mSessionId = CameraCaptureSession.SESSION_ID_NONE;
+ }
+
+ /**
+ * Get the read-only list of partial results that compose this total result.
+ *
+ * <p>The list is returned is unmodifiable; attempting to modify it will result in a
+ * {@code UnsupportedOperationException} being thrown.</p>
+ *
+ * <p>The list size will be inclusive between {@code 0} and
+ * {@link CameraCharacteristics#REQUEST_PARTIAL_RESULT_COUNT}, with elements in ascending order
+ * of when {@link CameraCaptureSession.CaptureCallback#onCaptureProgressed} was invoked.</p>
+ *
+ * @return unmodifiable list of partial results
+ */
+ @NonNull
+ public List<CaptureResult> getPartialResults() {
+ return Collections.unmodifiableList(mPartialResults);
+ }
+
+ /**
+ * Get the ID of the session where the capture request of this result was submitted.
+ *
+ * @return The session ID
+ * @hide
+ */
+ public int getSessionId() {
+ return mSessionId;
+ }
+}
diff --git a/android/hardware/camera2/dispatch/ArgumentReplacingDispatcher.java b/android/hardware/camera2/dispatch/ArgumentReplacingDispatcher.java
new file mode 100644
index 00000000..866f370f
--- /dev/null
+++ b/android/hardware/camera2/dispatch/ArgumentReplacingDispatcher.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.dispatch;
+
+import java.lang.reflect.Method;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * A dispatcher that replaces one argument with another; replaces any argument at an index
+ * with another argument.
+ *
+ * <p>For example, we can override an {@code void onSomething(int x)} calls to have {@code x} always
+ * equal to 1. Or, if using this with a duck typing dispatcher, we could even overwrite {@code x} to
+ * be something
+ * that's not an {@code int}.</p>
+ *
+ * @param <T>
+ * source dispatch type, whose methods with {@link #dispatch} will be called
+ * @param <TArg>
+ * argument replacement type, args in {@link #dispatch} matching {@code argumentIndex}
+ * will be overriden to objects of this type
+ */
+public class ArgumentReplacingDispatcher<T, TArg> implements Dispatchable<T> {
+
+ private final Dispatchable<T> mTarget;
+ private final int mArgumentIndex;
+ private final TArg mReplaceWith;
+
+ /**
+ * Create a new argument replacing dispatcher; dispatches are forwarded to {@code target}
+ * after the argument is replaced.
+ *
+ * <p>For example, if a method {@code onAction(T1 a, Integer b, T2 c)} is invoked, and we wanted
+ * to replace all occurrences of {@code b} with {@code 0xDEADBEEF}, we would set
+ * {@code argumentIndex = 1} and {@code replaceWith = 0xDEADBEEF}.</p>
+ *
+ * <p>If a method dispatched has less arguments than {@code argumentIndex}, it is
+ * passed through with the arguments unchanged.</p>
+ *
+ * @param target destination dispatch type, methods will be redirected to this dispatcher
+ * @param argumentIndex the numeric index of the argument {@code >= 0}
+ * @param replaceWith arguments matching {@code argumentIndex} will be replaced with this object
+ */
+ public ArgumentReplacingDispatcher(Dispatchable<T> target, int argumentIndex,
+ TArg replaceWith) {
+ mTarget = checkNotNull(target, "target must not be null");
+ mArgumentIndex = checkArgumentNonnegative(argumentIndex,
+ "argumentIndex must not be negative");
+ mReplaceWith = checkNotNull(replaceWith, "replaceWith must not be null");
+ }
+
+ @Override
+ public Object dispatch(Method method, Object[] args) throws Throwable {
+
+ if (args.length > mArgumentIndex) {
+ args = arrayCopy(args); // don't change in-place since it can affect upstream dispatches
+ args[mArgumentIndex] = mReplaceWith;
+ }
+
+ return mTarget.dispatch(method, args);
+ }
+
+ private static Object[] arrayCopy(Object[] array) {
+ int length = array.length;
+ Object[] newArray = new Object[length];
+ for (int i = 0; i < length; ++i) {
+ newArray[i] = array[i];
+ }
+ return newArray;
+ }
+}
diff --git a/android/hardware/camera2/dispatch/BroadcastDispatcher.java b/android/hardware/camera2/dispatch/BroadcastDispatcher.java
new file mode 100644
index 00000000..fe575b27
--- /dev/null
+++ b/android/hardware/camera2/dispatch/BroadcastDispatcher.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.dispatch;
+
+
+import java.lang.reflect.Method;
+import java.util.Arrays;
+import java.util.List;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Broadcast a single dispatch into multiple other dispatchables.
+ *
+ * <p>Every time {@link #dispatch} is invoked, all the broadcast targets will
+ * see the same dispatch as well. The first target's return value is returned.</p>
+ *
+ * <p>This enables a single listener to be converted into a multi-listener.</p>
+ */
+public class BroadcastDispatcher<T> implements Dispatchable<T> {
+
+ private final List<Dispatchable<T>> mDispatchTargets;
+
+ /**
+ * Create a broadcast dispatcher from the supplied dispatch targets.
+ *
+ * @param dispatchTargets one or more targets to dispatch to
+ */
+ @SafeVarargs
+ public BroadcastDispatcher(Dispatchable<T>... dispatchTargets) {
+ mDispatchTargets = Arrays.asList(
+ checkNotNull(dispatchTargets, "dispatchTargets must not be null"));
+ }
+
+ @Override
+ public Object dispatch(Method method, Object[] args) throws Throwable {
+ Object result = null;
+ boolean gotResult = false;
+
+ for (Dispatchable<T> dispatchTarget : mDispatchTargets) {
+ Object localResult = dispatchTarget.dispatch(method, args);
+
+ if (!gotResult) {
+ gotResult = true;
+ result = localResult;
+ }
+ }
+
+ return result;
+ }
+}
diff --git a/android/hardware/camera2/dispatch/Dispatchable.java b/android/hardware/camera2/dispatch/Dispatchable.java
new file mode 100644
index 00000000..753103f4
--- /dev/null
+++ b/android/hardware/camera2/dispatch/Dispatchable.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.dispatch;
+
+import java.lang.reflect.Method;
+
+/**
+ * Dynamically dispatch a method and its argument to some object.
+ *
+ * <p>This can be used to intercept method calls and do work around them, redirect work,
+ * or block calls entirely.</p>
+ */
+public interface Dispatchable<T> {
+ /**
+ * Dispatch the method and arguments to this object.
+ * @param method a method defined in class {@code T}
+ * @param args arguments corresponding to said {@code method}
+ * @return the object returned when invoking {@code method}
+ * @throws Throwable any exception that might have been raised while invoking the method
+ */
+ public Object dispatch(Method method, Object[] args) throws Throwable;
+}
diff --git a/android/hardware/camera2/dispatch/DuckTypingDispatcher.java b/android/hardware/camera2/dispatch/DuckTypingDispatcher.java
new file mode 100644
index 00000000..75f97e46
--- /dev/null
+++ b/android/hardware/camera2/dispatch/DuckTypingDispatcher.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.dispatch;
+
+
+import java.lang.reflect.Method;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Duck typing dispatcher; converts dispatch methods calls from one class to another by
+ * looking up equivalently methods at runtime by name.
+ *
+ * <p>For example, if two types have identical method names and arguments, but
+ * are not subclasses/subinterfaces of each other, this dispatcher will allow calls to be
+ * made from one type to the other.</p>
+ *
+ * @param <TFrom> source dispatch type, whose methods with {@link #dispatch} will be called
+ * @param <T> destination dispatch type, methods will be converted to the class of {@code T}
+ */
+public class DuckTypingDispatcher<TFrom, T> implements Dispatchable<TFrom> {
+
+ private final MethodNameInvoker<T> mDuck;
+
+ /**
+ * Create a new duck typing dispatcher.
+ *
+ * @param target destination dispatch type, methods will be redirected to this dispatcher
+ * @param targetClass destination dispatch class, methods will be converted to this class's
+ */
+ public DuckTypingDispatcher(Dispatchable<T> target, Class<T> targetClass) {
+ checkNotNull(targetClass, "targetClass must not be null");
+ checkNotNull(target, "target must not be null");
+
+ mDuck = new MethodNameInvoker<T>(target, targetClass);
+ }
+
+ @Override
+ public Object dispatch(Method method, Object[] args) {
+ return mDuck.invoke(method.getName(), args);
+ }
+}
diff --git a/android/hardware/camera2/dispatch/HandlerDispatcher.java b/android/hardware/camera2/dispatch/HandlerDispatcher.java
new file mode 100644
index 00000000..f8e9d49a
--- /dev/null
+++ b/android/hardware/camera2/dispatch/HandlerDispatcher.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.dispatch;
+
+import android.hardware.camera2.utils.UncheckedThrow;
+import android.os.Handler;
+import android.util.Log;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Forward all interface calls into a handler by posting it as a {@code Runnable}.
+ *
+ * <p>All calls will return immediately; functions with return values will return a default
+ * value of {@code null}, {@code 0}, or {@code false} where that value is legal.</p>
+ *
+ * <p>Any exceptions thrown on the handler while trying to invoke a method
+ * will be re-thrown. Throwing checked exceptions on a handler which doesn't expect any
+ * checked exceptions to be thrown will result in "undefined" behavior
+ * (although in practice it is usually thrown as normal).</p>
+ */
+public class HandlerDispatcher<T> implements Dispatchable<T> {
+
+ private static final String TAG = "HandlerDispatcher";
+
+ private final Dispatchable<T> mDispatchTarget;
+ private final Handler mHandler;
+
+ /**
+ * Create a dispatcher that forwards it's dispatch calls by posting
+ * them onto the {@code handler} as a {@code Runnable}.
+ *
+ * @param dispatchTarget the destination whose method calls will be redirected into the handler
+ * @param handler all calls into {@code dispatchTarget} will be posted onto this handler
+ * @param <T> the type of the element you want to wrap.
+ * @return a dispatcher that will forward it's dispatch calls to a handler
+ */
+ public HandlerDispatcher(Dispatchable<T> dispatchTarget, Handler handler) {
+ mDispatchTarget = checkNotNull(dispatchTarget, "dispatchTarget must not be null");
+ mHandler = checkNotNull(handler, "handler must not be null");
+ }
+
+ @Override
+ public Object dispatch(final Method method, final Object[] args) throws Throwable {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ try {
+ mDispatchTarget.dispatch(method, args);
+ } catch (InvocationTargetException e) {
+ Throwable t = e.getTargetException();
+ // Potential UB. Hopefully 't' is a runtime exception.
+ UncheckedThrow.throwAnyException(t);
+ } catch (IllegalAccessException e) {
+ // Impossible
+ Log.wtf(TAG, "IllegalAccessException while invoking " + method, e);
+ } catch (IllegalArgumentException e) {
+ // Impossible
+ Log.wtf(TAG, "IllegalArgumentException while invoking " + method, e);
+ } catch (Throwable e) {
+ UncheckedThrow.throwAnyException(e);
+ }
+ }
+ });
+
+ // TODO handle primitive return values that would avoid NPE if unboxed
+ return null;
+ }
+}
diff --git a/android/hardware/camera2/dispatch/InvokeDispatcher.java b/android/hardware/camera2/dispatch/InvokeDispatcher.java
new file mode 100644
index 00000000..ac5f5267
--- /dev/null
+++ b/android/hardware/camera2/dispatch/InvokeDispatcher.java
@@ -0,0 +1,55 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.dispatch;
+
+import android.hardware.camera2.utils.UncheckedThrow;
+import android.util.Log;
+
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+
+import static com.android.internal.util.Preconditions.*;
+
+
+public class InvokeDispatcher<T> implements Dispatchable<T> {
+
+ private static final String TAG = "InvocationSink";
+ private final T mTarget;
+
+ public InvokeDispatcher(T target) {
+ mTarget = checkNotNull(target, "target must not be null");
+ }
+
+ @Override
+ public Object dispatch(Method method, Object[] args) {
+ try {
+ return method.invoke(mTarget, args);
+ } catch (InvocationTargetException e) {
+ Throwable t = e.getTargetException();
+ // Potential UB. Hopefully 't' is a runtime exception.
+ UncheckedThrow.throwAnyException(t);
+ } catch (IllegalAccessException e) {
+ // Impossible
+ Log.wtf(TAG, "IllegalAccessException while invoking " + method, e);
+ } catch (IllegalArgumentException e) {
+ // Impossible
+ Log.wtf(TAG, "IllegalArgumentException while invoking " + method, e);
+ }
+
+ // unreachable
+ return null;
+ }
+}
diff --git a/android/hardware/camera2/dispatch/MethodNameInvoker.java b/android/hardware/camera2/dispatch/MethodNameInvoker.java
new file mode 100644
index 00000000..8296b7a9
--- /dev/null
+++ b/android/hardware/camera2/dispatch/MethodNameInvoker.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.dispatch;
+
+import static com.android.internal.util.Preconditions.checkNotNull;
+
+import android.hardware.camera2.utils.UncheckedThrow;
+
+import java.lang.reflect.Method;
+import java.util.concurrent.ConcurrentHashMap;
+
+/**
+ * Invoke a method on a dispatchable by its name (without knowing the {@code Method} ahead of time).
+ *
+ * @param <T> destination dispatch type, methods will be looked up in the class of {@code T}
+ */
+public class MethodNameInvoker<T> {
+
+ private final Dispatchable<T> mTarget;
+ private final Class<T> mTargetClass;
+ private final Method[] mTargetClassMethods;
+ private final ConcurrentHashMap<String, Method> mMethods =
+ new ConcurrentHashMap<>();
+
+ /**
+ * Create a new method name invoker.
+ *
+ * @param target destination dispatch type, invokes will be redirected to this dispatcher
+ * @param targetClass destination dispatch class, the invoked methods will be from this class
+ */
+ public MethodNameInvoker(Dispatchable<T> target, Class<T> targetClass) {
+ mTargetClass = targetClass;
+ mTargetClassMethods = targetClass.getMethods();
+ mTarget = target;
+ }
+
+ /**
+ * Invoke a method by its name.
+ *
+ * <p>If more than one method exists in {@code targetClass}, the first method with the right
+ * number of arguments will be used, and later calls will all use that method.</p>
+ *
+ * @param methodName
+ * The name of the method, which will be matched 1:1 to the destination method
+ * @param params
+ * Variadic parameter list.
+ * @return
+ * The same kind of value that would normally be returned by calling {@code methodName}
+ * statically.
+ *
+ * @throws IllegalArgumentException if {@code methodName} does not exist on the target class
+ * @throws Throwable will rethrow anything that the target method would normally throw
+ */
+ @SuppressWarnings("unchecked")
+ public <K> K invoke(String methodName, Object... params) {
+ checkNotNull(methodName, "methodName must not be null");
+
+ Method targetMethod = mMethods.get(methodName);
+ if (targetMethod == null) {
+ for (Method method : mTargetClassMethods) {
+ // TODO future: match types of params if possible
+ if (method.getName().equals(methodName) &&
+ (params.length == method.getParameterTypes().length) ) {
+ targetMethod = method;
+ mMethods.put(methodName, targetMethod);
+ break;
+ }
+ }
+
+ if (targetMethod == null) {
+ throw new IllegalArgumentException(
+ "Method " + methodName + " does not exist on class " + mTargetClass);
+ }
+ }
+
+ try {
+ return (K) mTarget.dispatch(targetMethod, params);
+ } catch (Throwable e) {
+ UncheckedThrow.throwAnyException(e);
+ // unreachable
+ return null;
+ }
+ }
+}
diff --git a/android/hardware/camera2/dispatch/NullDispatcher.java b/android/hardware/camera2/dispatch/NullDispatcher.java
new file mode 100644
index 00000000..fada075c
--- /dev/null
+++ b/android/hardware/camera2/dispatch/NullDispatcher.java
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.dispatch;
+
+
+import java.lang.reflect.Method;
+
+/**
+ * Do nothing when dispatching; follows the null object pattern.
+ */
+public class NullDispatcher<T> implements Dispatchable<T> {
+ /**
+ * Create a dispatcher that does nothing when dispatched to.
+ */
+ public NullDispatcher() {
+ }
+
+ /**
+ * Do nothing; all parameters are ignored.
+ */
+ @Override
+ public Object dispatch(Method method, Object[] args) {
+ return null;
+ }
+}
diff --git a/android/hardware/camera2/impl/CallbackProxies.java b/android/hardware/camera2/impl/CallbackProxies.java
new file mode 100644
index 00000000..c9eecf10
--- /dev/null
+++ b/android/hardware/camera2/impl/CallbackProxies.java
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.impl;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.dispatch.Dispatchable;
+import android.hardware.camera2.dispatch.MethodNameInvoker;
+import android.view.Surface;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Proxy out invocations to the camera2 API callbacks into a {@link Dispatchable}.
+ *
+ * <p>Since abstract classes do not support Java's dynamic {@code Proxy}, we have to
+ * to use our own proxy mechanism.</p>
+ */
+public class CallbackProxies {
+
+ // TODO: replace with codegen
+
+ public static class DeviceStateCallbackProxy extends CameraDeviceImpl.StateCallbackKK {
+ private final MethodNameInvoker<CameraDeviceImpl.StateCallbackKK> mProxy;
+
+ public DeviceStateCallbackProxy(
+ Dispatchable<CameraDeviceImpl.StateCallbackKK> dispatchTarget) {
+ dispatchTarget = checkNotNull(dispatchTarget, "dispatchTarget must not be null");
+ mProxy = new MethodNameInvoker<>(dispatchTarget, CameraDeviceImpl.StateCallbackKK.class);
+ }
+
+ @Override
+ public void onOpened(CameraDevice camera) {
+ mProxy.invoke("onOpened", camera);
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ mProxy.invoke("onDisconnected", camera);
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int error) {
+ mProxy.invoke("onError", camera, error);
+ }
+
+ @Override
+ public void onUnconfigured(CameraDevice camera) {
+ mProxy.invoke("onUnconfigured", camera);
+ }
+
+ @Override
+ public void onActive(CameraDevice camera) {
+ mProxy.invoke("onActive", camera);
+ }
+
+ @Override
+ public void onBusy(CameraDevice camera) {
+ mProxy.invoke("onBusy", camera);
+ }
+
+ @Override
+ public void onClosed(CameraDevice camera) {
+ mProxy.invoke("onClosed", camera);
+ }
+
+ @Override
+ public void onIdle(CameraDevice camera) {
+ mProxy.invoke("onIdle", camera);
+ }
+ }
+
+ @SuppressWarnings("deprecation")
+ public static class DeviceCaptureCallbackProxy implements CameraDeviceImpl.CaptureCallback {
+ private final MethodNameInvoker<CameraDeviceImpl.CaptureCallback> mProxy;
+
+ public DeviceCaptureCallbackProxy(
+ Dispatchable<CameraDeviceImpl.CaptureCallback> dispatchTarget) {
+ dispatchTarget = checkNotNull(dispatchTarget, "dispatchTarget must not be null");
+ mProxy = new MethodNameInvoker<>(dispatchTarget, CameraDeviceImpl.CaptureCallback.class);
+ }
+
+ @Override
+ public void onCaptureStarted(CameraDevice camera,
+ CaptureRequest request, long timestamp, long frameNumber) {
+ mProxy.invoke("onCaptureStarted", camera, request, timestamp, frameNumber);
+ }
+
+ @Override
+ public void onCapturePartial(CameraDevice camera,
+ CaptureRequest request, CaptureResult result) {
+ mProxy.invoke("onCapturePartial", camera, request, result);
+ }
+
+ @Override
+ public void onCaptureProgressed(CameraDevice camera,
+ CaptureRequest request, CaptureResult partialResult) {
+ mProxy.invoke("onCaptureProgressed", camera, request, partialResult);
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraDevice camera,
+ CaptureRequest request, TotalCaptureResult result) {
+ mProxy.invoke("onCaptureCompleted", camera, request, result);
+ }
+
+ @Override
+ public void onCaptureFailed(CameraDevice camera,
+ CaptureRequest request, CaptureFailure failure) {
+ mProxy.invoke("onCaptureFailed", camera, request, failure);
+ }
+
+ @Override
+ public void onCaptureSequenceCompleted(CameraDevice camera,
+ int sequenceId, long frameNumber) {
+ mProxy.invoke("onCaptureSequenceCompleted", camera, sequenceId, frameNumber);
+ }
+
+ @Override
+ public void onCaptureSequenceAborted(CameraDevice camera,
+ int sequenceId) {
+ mProxy.invoke("onCaptureSequenceAborted", camera, sequenceId);
+ }
+
+ @Override
+ public void onCaptureBufferLost(CameraDevice camera,
+ CaptureRequest request, Surface target, long frameNumber) {
+ mProxy.invoke("onCaptureBufferLost", camera, request, target, frameNumber);
+ }
+
+ }
+
+ public static class SessionStateCallbackProxy
+ extends CameraCaptureSession.StateCallback {
+ private final MethodNameInvoker<CameraCaptureSession.StateCallback> mProxy;
+
+ public SessionStateCallbackProxy(
+ Dispatchable<CameraCaptureSession.StateCallback> dispatchTarget) {
+ dispatchTarget = checkNotNull(dispatchTarget, "dispatchTarget must not be null");
+ mProxy = new MethodNameInvoker<>(dispatchTarget,
+ CameraCaptureSession.StateCallback.class);
+ }
+
+ @Override
+ public void onConfigured(CameraCaptureSession session) {
+ mProxy.invoke("onConfigured", session);
+ }
+
+
+ @Override
+ public void onConfigureFailed(CameraCaptureSession session) {
+ mProxy.invoke("onConfigureFailed", session);
+ }
+
+ @Override
+ public void onReady(CameraCaptureSession session) {
+ mProxy.invoke("onReady", session);
+ }
+
+ @Override
+ public void onActive(CameraCaptureSession session) {
+ mProxy.invoke("onActive", session);
+ }
+
+ @Override
+ public void onCaptureQueueEmpty(CameraCaptureSession session) {
+ mProxy.invoke("onCaptureQueueEmpty", session);
+ }
+
+ @Override
+ public void onClosed(CameraCaptureSession session) {
+ mProxy.invoke("onClosed", session);
+ }
+
+ @Override
+ public void onSurfacePrepared(CameraCaptureSession session, Surface surface) {
+ mProxy.invoke("onSurfacePrepared", session, surface);
+ }
+
+ }
+
+ private CallbackProxies() {
+ throw new AssertionError();
+ }
+}
diff --git a/android/hardware/camera2/impl/CameraCaptureSessionCore.java b/android/hardware/camera2/impl/CameraCaptureSessionCore.java
new file mode 100644
index 00000000..116f0f17
--- /dev/null
+++ b/android/hardware/camera2/impl/CameraCaptureSessionCore.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+/**
+ * Internal interface for CameraDeviceImpl to CameraCaptureSessionImpl(s) communication
+ */
+public interface CameraCaptureSessionCore {
+
+ /**
+ * Replace this session with another session.
+ *
+ * <p>This is an optimization to avoid unconfiguring and then immediately having to
+ * reconfigure again.</p>
+ *
+ * <p>The semantics are identical to {@link #close}, except that unconfiguring will be skipped.
+ * </p>
+ *
+ * <p>After this call completes, the session will not call any further methods on the camera
+ * device.</p>
+ *
+ * @see CameraCaptureSession#close
+ */
+ void replaceSessionClose();
+
+ /**
+ *
+ * Create an internal state callback, to be invoked on the mDeviceHandler
+ *
+ * <p>It has a few behaviors:
+ * <ul>
+ * <li>Convert device state changes into session state changes.
+ * <li>Keep track of async tasks that the session began (idle, abort).
+ * </ul>
+ * </p>
+ * */
+ CameraDeviceImpl.StateCallbackKK getDeviceStateCallback();
+
+ /**
+ * Whether currently in mid-abort.
+ *
+ * <p>This is used by the implementation to set the capture failure
+ * reason, in lieu of more accurate error codes from the camera service.
+ * Unsynchronized to avoid deadlocks between simultaneous session->device,
+ * device->session calls.</p>
+ *
+ */
+ boolean isAborting();
+
+}
diff --git a/android/hardware/camera2/impl/CameraCaptureSessionImpl.java b/android/hardware/camera2/impl/CameraCaptureSessionImpl.java
new file mode 100644
index 00000000..c7654c9e
--- /dev/null
+++ b/android/hardware/camera2/impl/CameraCaptureSessionImpl.java
@@ -0,0 +1,804 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.impl;
+
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.ICameraDeviceUser;
+import android.hardware.camera2.dispatch.ArgumentReplacingDispatcher;
+import android.hardware.camera2.dispatch.BroadcastDispatcher;
+import android.hardware.camera2.dispatch.DuckTypingDispatcher;
+import android.hardware.camera2.dispatch.HandlerDispatcher;
+import android.hardware.camera2.dispatch.InvokeDispatcher;
+import android.hardware.camera2.params.OutputConfiguration;
+import android.hardware.camera2.utils.TaskDrainer;
+import android.hardware.camera2.utils.TaskSingleDrainer;
+import android.os.Handler;
+import android.util.Log;
+import android.view.Surface;
+
+import java.util.Arrays;
+import java.util.List;
+
+import static android.hardware.camera2.impl.CameraDeviceImpl.checkHandler;
+import static com.android.internal.util.Preconditions.*;
+
+public class CameraCaptureSessionImpl extends CameraCaptureSession
+ implements CameraCaptureSessionCore {
+ private static final String TAG = "CameraCaptureSession";
+ private static final boolean DEBUG = false;
+
+ /** Simple integer ID for session for debugging */
+ private final int mId;
+ private final String mIdString;
+
+ /** Input surface configured by native camera framework based on user-specified configuration */
+ private final Surface mInput;
+ /**
+ * User-specified state callback, used for outgoing events; calls to this object will be
+ * automatically {@link Handler#post(Runnable) posted} to {@code mStateHandler}.
+ */
+ private final CameraCaptureSession.StateCallback mStateCallback;
+ /** User-specified state handler used for outgoing state callback events */
+ private final Handler mStateHandler;
+
+ /** Internal camera device; used to translate calls into existing deprecated API */
+ private final android.hardware.camera2.impl.CameraDeviceImpl mDeviceImpl;
+ /** Internal handler; used for all incoming events to preserve total order */
+ private final Handler mDeviceHandler;
+
+ /** Drain Sequence IDs which have been queued but not yet finished with aborted/completed */
+ private final TaskDrainer<Integer> mSequenceDrainer;
+ /** Drain state transitions from ACTIVE -> IDLE */
+ private final TaskSingleDrainer mIdleDrainer;
+ /** Drain state transitions from BUSY -> IDLE */
+ private final TaskSingleDrainer mAbortDrainer;
+
+ /** This session is closed; all further calls will throw ISE */
+ private boolean mClosed = false;
+ /** This session failed to be configured successfully */
+ private final boolean mConfigureSuccess;
+ /** Do not unconfigure if this is set; another session will overwrite configuration */
+ private boolean mSkipUnconfigure = false;
+
+ /** Is the session in the process of aborting? Pay attention to BUSY->IDLE transitions. */
+ private volatile boolean mAborting;
+
+ /**
+ * Create a new CameraCaptureSession.
+ *
+ * <p>The camera device must already be in the {@code IDLE} state when this is invoked.
+ * There must be no pending actions
+ * (e.g. no pending captures, no repeating requests, no flush).</p>
+ */
+ CameraCaptureSessionImpl(int id, Surface input,
+ CameraCaptureSession.StateCallback callback, Handler stateHandler,
+ android.hardware.camera2.impl.CameraDeviceImpl deviceImpl,
+ Handler deviceStateHandler, boolean configureSuccess) {
+ if (callback == null) {
+ throw new IllegalArgumentException("callback must not be null");
+ }
+
+ mId = id;
+ mIdString = String.format("Session %d: ", mId);
+
+ mInput = input;
+ mStateHandler = checkHandler(stateHandler);
+ mStateCallback = createUserStateCallbackProxy(mStateHandler, callback);
+
+ mDeviceHandler = checkNotNull(deviceStateHandler, "deviceStateHandler must not be null");
+ mDeviceImpl = checkNotNull(deviceImpl, "deviceImpl must not be null");
+
+ /*
+ * Use the same handler as the device's StateCallback for all the internal coming events
+ *
+ * This ensures total ordering between CameraDevice.StateCallback and
+ * CameraDeviceImpl.CaptureCallback events.
+ */
+ mSequenceDrainer = new TaskDrainer<>(mDeviceHandler, new SequenceDrainListener(),
+ /*name*/"seq");
+ mIdleDrainer = new TaskSingleDrainer(mDeviceHandler, new IdleDrainListener(),
+ /*name*/"idle");
+ mAbortDrainer = new TaskSingleDrainer(mDeviceHandler, new AbortDrainListener(),
+ /*name*/"abort");
+
+ // CameraDevice should call configureOutputs and have it finish before constructing us
+
+ if (configureSuccess) {
+ mStateCallback.onConfigured(this);
+ if (DEBUG) Log.v(TAG, mIdString + "Created session successfully");
+ mConfigureSuccess = true;
+ } else {
+ mStateCallback.onConfigureFailed(this);
+ mClosed = true; // do not fire any other callbacks, do not allow any other work
+ Log.e(TAG, mIdString + "Failed to create capture session; configuration failed");
+ mConfigureSuccess = false;
+ }
+ }
+
+ @Override
+ public CameraDevice getDevice() {
+ return mDeviceImpl;
+ }
+
+ @Override
+ public void prepare(Surface surface) throws CameraAccessException {
+ mDeviceImpl.prepare(surface);
+ }
+
+ @Override
+ public void prepare(int maxCount, Surface surface) throws CameraAccessException {
+ mDeviceImpl.prepare(maxCount, surface);
+ }
+
+ @Override
+ public void tearDown(Surface surface) throws CameraAccessException {
+ mDeviceImpl.tearDown(surface);
+ }
+
+ @Override
+ public void finalizeOutputConfigurations(
+ List<OutputConfiguration> outputConfigs) throws CameraAccessException {
+ mDeviceImpl.finalizeOutputConfigs(outputConfigs);
+ }
+
+ @Override
+ public int capture(CaptureRequest request, CaptureCallback callback,
+ Handler handler) throws CameraAccessException {
+ if (request == null) {
+ throw new IllegalArgumentException("request must not be null");
+ } else if (request.isReprocess() && !isReprocessable()) {
+ throw new IllegalArgumentException("this capture session cannot handle reprocess " +
+ "requests");
+ } else if (request.isReprocess() && request.getReprocessableSessionId() != mId) {
+ throw new IllegalArgumentException("capture request was created for another session");
+ }
+
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ checkNotClosed();
+
+ handler = checkHandler(handler, callback);
+
+ if (DEBUG) {
+ Log.v(TAG, mIdString + "capture - request " + request + ", callback " + callback +
+ " handler " + handler);
+ }
+
+ return addPendingSequence(mDeviceImpl.capture(request,
+ createCaptureCallbackProxy(handler, callback), mDeviceHandler));
+ }
+ }
+
+ @Override
+ public int captureBurst(List<CaptureRequest> requests, CaptureCallback callback,
+ Handler handler) throws CameraAccessException {
+ if (requests == null) {
+ throw new IllegalArgumentException("Requests must not be null");
+ } else if (requests.isEmpty()) {
+ throw new IllegalArgumentException("Requests must have at least one element");
+ }
+
+ for (CaptureRequest request : requests) {
+ if (request.isReprocess()) {
+ if (!isReprocessable()) {
+ throw new IllegalArgumentException("This capture session cannot handle " +
+ "reprocess requests");
+ } else if (request.getReprocessableSessionId() != mId) {
+ throw new IllegalArgumentException("Capture request was created for another " +
+ "session");
+ }
+ }
+ }
+
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ checkNotClosed();
+
+ handler = checkHandler(handler, callback);
+
+ if (DEBUG) {
+ CaptureRequest[] requestArray = requests.toArray(new CaptureRequest[0]);
+ Log.v(TAG, mIdString + "captureBurst - requests " + Arrays.toString(requestArray) +
+ ", callback " + callback + " handler " + handler);
+ }
+
+ return addPendingSequence(mDeviceImpl.captureBurst(requests,
+ createCaptureCallbackProxy(handler, callback), mDeviceHandler));
+ }
+ }
+
+ @Override
+ public int setRepeatingRequest(CaptureRequest request, CaptureCallback callback,
+ Handler handler) throws CameraAccessException {
+ if (request == null) {
+ throw new IllegalArgumentException("request must not be null");
+ } else if (request.isReprocess()) {
+ throw new IllegalArgumentException("repeating reprocess requests are not supported");
+ }
+
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ checkNotClosed();
+
+ handler = checkHandler(handler, callback);
+
+ if (DEBUG) {
+ Log.v(TAG, mIdString + "setRepeatingRequest - request " + request + ", callback " +
+ callback + " handler" + " " + handler);
+ }
+
+ return addPendingSequence(mDeviceImpl.setRepeatingRequest(request,
+ createCaptureCallbackProxy(handler, callback), mDeviceHandler));
+ }
+ }
+
+ @Override
+ public int setRepeatingBurst(List<CaptureRequest> requests,
+ CaptureCallback callback, Handler handler) throws CameraAccessException {
+ if (requests == null) {
+ throw new IllegalArgumentException("requests must not be null");
+ } else if (requests.isEmpty()) {
+ throw new IllegalArgumentException("requests must have at least one element");
+ }
+
+ for (CaptureRequest r : requests) {
+ if (r.isReprocess()) {
+ throw new IllegalArgumentException("repeating reprocess burst requests are not " +
+ "supported");
+ }
+ }
+
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ checkNotClosed();
+
+ handler = checkHandler(handler, callback);
+
+ if (DEBUG) {
+ CaptureRequest[] requestArray = requests.toArray(new CaptureRequest[0]);
+ Log.v(TAG, mIdString + "setRepeatingBurst - requests " +
+ Arrays.toString(requestArray) + ", callback " + callback +
+ " handler" + "" + handler);
+ }
+
+ return addPendingSequence(mDeviceImpl.setRepeatingBurst(requests,
+ createCaptureCallbackProxy(handler, callback), mDeviceHandler));
+ }
+ }
+
+ @Override
+ public void stopRepeating() throws CameraAccessException {
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ checkNotClosed();
+
+ if (DEBUG) {
+ Log.v(TAG, mIdString + "stopRepeating");
+ }
+
+ mDeviceImpl.stopRepeating();
+ }
+ }
+
+ @Override
+ public void abortCaptures() throws CameraAccessException {
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ checkNotClosed();
+
+ if (DEBUG) {
+ Log.v(TAG, mIdString + "abortCaptures");
+ }
+
+ if (mAborting) {
+ Log.w(TAG, mIdString + "abortCaptures - Session is already aborting; doing nothing");
+ return;
+ }
+
+ mAborting = true;
+ mAbortDrainer.taskStarted();
+
+ mDeviceImpl.flush();
+ // The next BUSY -> IDLE set of transitions will mark the end of the abort.
+ }
+ }
+
+ @Override
+ public boolean isReprocessable() {
+ return mInput != null;
+ }
+
+ @Override
+ public Surface getInputSurface() {
+ return mInput;
+ }
+
+ /**
+ * Replace this session with another session.
+ *
+ * <p>This is an optimization to avoid unconfiguring and then immediately having to
+ * reconfigure again.</p>
+ *
+ * <p>The semantics are identical to {@link #close}, except that unconfiguring will be skipped.
+ * <p>
+ *
+ * <p>After this call completes, the session will not call any further methods on the camera
+ * device.</p>
+ *
+ * @see CameraCaptureSession#close
+ */
+ @Override
+ public void replaceSessionClose() {
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ /*
+ * In order for creating new sessions to be fast, the new session should be created
+ * before the old session is closed.
+ *
+ * Otherwise the old session will always unconfigure if there is no new session to
+ * replace it.
+ *
+ * Unconfiguring could add hundreds of milliseconds of delay. We could race and attempt
+ * to skip unconfigure if a new session is created before the captures are all drained,
+ * but this would introduce nondeterministic behavior.
+ */
+
+ if (DEBUG) Log.v(TAG, mIdString + "replaceSessionClose");
+
+ // Set up fast shutdown. Possible alternative paths:
+ // - This session is active, so close() below starts the shutdown drain
+ // - This session is mid-shutdown drain, and hasn't yet reached the idle drain listener.
+ // - This session is already closed and has executed the idle drain listener, and
+ // configureOutputsChecked(null) has already been called.
+ //
+ // Do not call configureOutputsChecked(null) going forward, since it would race with the
+ // configuration for the new session. If it was already called, then we don't care,
+ // since it won't get called again.
+ mSkipUnconfigure = true;
+ close();
+ }
+ }
+
+ @Override
+ public void close() {
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ if (mClosed) {
+ if (DEBUG) Log.v(TAG, mIdString + "close - reentering");
+ return;
+ }
+
+ if (DEBUG) Log.v(TAG, mIdString + "close - first time");
+
+ mClosed = true;
+
+ /*
+ * Flush out any repeating request. Since camera is closed, no new requests
+ * can be queued, and eventually the entire request queue will be drained.
+ *
+ * If the camera device was already closed, short circuit and do nothing; since
+ * no more internal device callbacks will fire anyway.
+ *
+ * Otherwise, once stopRepeating is done, wait for camera to idle, then unconfigure
+ * the camera. Once that's done, fire #onClosed.
+ */
+ try {
+ mDeviceImpl.stopRepeating();
+ } catch (IllegalStateException e) {
+ // OK: Camera device may already be closed, nothing else to do
+
+ // TODO: Fire onClosed anytime we get the device onClosed or the ISE?
+ // or just suppress the ISE only and rely onClosed.
+ // Also skip any of the draining work if this is already closed.
+
+ // Short-circuit; queue callback immediately and return
+ mStateCallback.onClosed(this);
+ return;
+ } catch (CameraAccessException e) {
+ // OK: close does not throw checked exceptions.
+ Log.e(TAG, mIdString + "Exception while stopping repeating: ", e);
+
+ // TODO: call onError instead of onClosed if this happens
+ }
+
+ // If no sequences are pending, fire #onClosed immediately
+ mSequenceDrainer.beginDrain();
+ }
+ if (mInput != null) {
+ mInput.release();
+ }
+ }
+
+ /**
+ * Whether currently in mid-abort.
+ *
+ * <p>This is used by the implementation to set the capture failure
+ * reason, in lieu of more accurate error codes from the camera service.
+ * Unsynchronized to avoid deadlocks between simultaneous session->device,
+ * device->session calls.</p>
+ *
+ */
+ @Override
+ public boolean isAborting() {
+ return mAborting;
+ }
+
+ /**
+ * Post calls into a CameraCaptureSession.StateCallback to the user-specified {@code handler}.
+ */
+ private StateCallback createUserStateCallbackProxy(Handler handler, StateCallback callback) {
+ InvokeDispatcher<StateCallback> userCallbackSink = new InvokeDispatcher<>(callback);
+ HandlerDispatcher<StateCallback> handlerPassthrough =
+ new HandlerDispatcher<>(userCallbackSink, handler);
+
+ return new CallbackProxies.SessionStateCallbackProxy(handlerPassthrough);
+ }
+
+ /**
+ * Forward callbacks from
+ * CameraDeviceImpl.CaptureCallback to the CameraCaptureSession.CaptureCallback.
+ *
+ * <p>In particular, all calls are automatically split to go both to our own
+ * internal callback, and to the user-specified callback (by transparently posting
+ * to the user-specified handler).</p>
+ *
+ * <p>When a capture sequence finishes, update the pending checked sequences set.</p>
+ */
+ @SuppressWarnings("deprecation")
+ private CameraDeviceImpl.CaptureCallback createCaptureCallbackProxy(
+ Handler handler, CaptureCallback callback) {
+ CameraDeviceImpl.CaptureCallback localCallback = new CameraDeviceImpl.CaptureCallback() {
+
+ @Override
+ public void onCaptureStarted(CameraDevice camera,
+ CaptureRequest request, long timestamp, long frameNumber) {
+ // Do nothing
+ }
+
+ @Override
+ public void onCapturePartial(CameraDevice camera,
+ CaptureRequest request, android.hardware.camera2.CaptureResult result) {
+ // Do nothing
+ }
+
+ @Override
+ public void onCaptureProgressed(CameraDevice camera,
+ CaptureRequest request, android.hardware.camera2.CaptureResult partialResult) {
+ // Do nothing
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraDevice camera,
+ CaptureRequest request, android.hardware.camera2.TotalCaptureResult result) {
+ // Do nothing
+ }
+
+ @Override
+ public void onCaptureFailed(CameraDevice camera,
+ CaptureRequest request, android.hardware.camera2.CaptureFailure failure) {
+ // Do nothing
+ }
+
+ @Override
+ public void onCaptureSequenceCompleted(CameraDevice camera,
+ int sequenceId, long frameNumber) {
+ finishPendingSequence(sequenceId);
+ }
+
+ @Override
+ public void onCaptureSequenceAborted(CameraDevice camera,
+ int sequenceId) {
+ finishPendingSequence(sequenceId);
+ }
+
+ @Override
+ public void onCaptureBufferLost(CameraDevice camera,
+ CaptureRequest request, Surface target, long frameNumber) {
+ // Do nothing
+ }
+
+ };
+
+ /*
+ * Split the calls from the device callback into local callback and the following chain:
+ * - replace the first CameraDevice arg with a CameraCaptureSession
+ * - duck type from device callback to session callback
+ * - then forward the call to a handler
+ * - then finally invoke the destination method on the session callback object
+ */
+ if (callback == null) {
+ // OK: API allows the user to not specify a callback, and the handler may
+ // also be null in that case. Collapse whole dispatch chain to only call the local
+ // callback
+ return localCallback;
+ }
+
+ InvokeDispatcher<CameraDeviceImpl.CaptureCallback> localSink =
+ new InvokeDispatcher<>(localCallback);
+
+ InvokeDispatcher<CaptureCallback> userCallbackSink =
+ new InvokeDispatcher<>(callback);
+ HandlerDispatcher<CaptureCallback> handlerPassthrough =
+ new HandlerDispatcher<>(userCallbackSink, handler);
+ DuckTypingDispatcher<CameraDeviceImpl.CaptureCallback, CaptureCallback> duckToSession
+ = new DuckTypingDispatcher<>(handlerPassthrough, CaptureCallback.class);
+ ArgumentReplacingDispatcher<CameraDeviceImpl.CaptureCallback, CameraCaptureSessionImpl>
+ replaceDeviceWithSession = new ArgumentReplacingDispatcher<>(duckToSession,
+ /*argumentIndex*/0, this);
+
+ BroadcastDispatcher<CameraDeviceImpl.CaptureCallback> broadcaster =
+ new BroadcastDispatcher<CameraDeviceImpl.CaptureCallback>(
+ replaceDeviceWithSession,
+ localSink);
+
+ return new CallbackProxies.DeviceCaptureCallbackProxy(broadcaster);
+ }
+
+ /**
+ *
+ * Create an internal state callback, to be invoked on the mDeviceHandler
+ *
+ * <p>It has a few behaviors:
+ * <ul>
+ * <li>Convert device state changes into session state changes.
+ * <li>Keep track of async tasks that the session began (idle, abort).
+ * </ul>
+ * </p>
+ * */
+ @Override
+ public CameraDeviceImpl.StateCallbackKK getDeviceStateCallback() {
+ final CameraCaptureSession session = this;
+ final Object interfaceLock = mDeviceImpl.mInterfaceLock;
+
+
+ return new CameraDeviceImpl.StateCallbackKK() {
+ private boolean mBusy = false;
+ private boolean mActive = false;
+
+ @Override
+ public void onOpened(CameraDevice camera) {
+ throw new AssertionError("Camera must already be open before creating a session");
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ if (DEBUG) Log.v(TAG, mIdString + "onDisconnected");
+ close();
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int error) {
+ // Should not be reached, handled by device code
+ Log.wtf(TAG, mIdString + "Got device error " + error);
+ }
+
+ @Override
+ public void onActive(CameraDevice camera) {
+ mIdleDrainer.taskStarted();
+ mActive = true;
+
+ if (DEBUG) Log.v(TAG, mIdString + "onActive");
+ mStateCallback.onActive(session);
+ }
+
+ @Override
+ public void onIdle(CameraDevice camera) {
+ boolean isAborting;
+ if (DEBUG) Log.v(TAG, mIdString + "onIdle");
+
+ synchronized (interfaceLock) {
+ isAborting = mAborting;
+ }
+
+ /*
+ * Check which states we transitioned through:
+ *
+ * (ACTIVE -> IDLE)
+ * (BUSY -> IDLE)
+ *
+ * Note that this is also legal:
+ * (ACTIVE -> BUSY -> IDLE)
+ *
+ * and mark those tasks as finished
+ */
+ if (mBusy && isAborting) {
+ mAbortDrainer.taskFinished();
+
+ synchronized (interfaceLock) {
+ mAborting = false;
+ }
+ }
+
+ if (mActive) {
+ mIdleDrainer.taskFinished();
+ }
+
+ mBusy = false;
+ mActive = false;
+
+ mStateCallback.onReady(session);
+ }
+
+ @Override
+ public void onBusy(CameraDevice camera) {
+ mBusy = true;
+
+ // TODO: Queue captures during abort instead of failing them
+ // since the app won't be able to distinguish the two actives
+ // Don't signal the application since there's no clean mapping here
+ if (DEBUG) Log.v(TAG, mIdString + "onBusy");
+ }
+
+ @Override
+ public void onUnconfigured(CameraDevice camera) {
+ if (DEBUG) Log.v(TAG, mIdString + "onUnconfigured");
+ }
+
+ @Override
+ public void onRequestQueueEmpty() {
+ if (DEBUG) Log.v(TAG, mIdString + "onRequestQueueEmpty");
+ mStateCallback.onCaptureQueueEmpty(session);
+ }
+
+ @Override
+ public void onSurfacePrepared(Surface surface) {
+ if (DEBUG) Log.v(TAG, mIdString + "onSurfacePrepared");
+ mStateCallback.onSurfacePrepared(session, surface);
+ }
+ };
+
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ private void checkNotClosed() {
+ if (mClosed) {
+ throw new IllegalStateException(
+ "Session has been closed; further changes are illegal.");
+ }
+ }
+
+ /**
+ * Notify the session that a pending capture sequence has just been queued.
+ *
+ * <p>During a shutdown/close, the session waits until all pending sessions are finished
+ * before taking any further steps to shut down itself.</p>
+ *
+ * @see #finishPendingSequence
+ */
+ private int addPendingSequence(int sequenceId) {
+ mSequenceDrainer.taskStarted(sequenceId);
+ return sequenceId;
+ }
+
+ /**
+ * Notify the session that a pending capture sequence is now finished.
+ *
+ * <p>During a shutdown/close, once all pending sequences finish, it is safe to
+ * close the camera further by unconfiguring and then firing {@code onClosed}.</p>
+ */
+ private void finishPendingSequence(int sequenceId) {
+ try {
+ mSequenceDrainer.taskFinished(sequenceId);
+ } catch (IllegalStateException e) {
+ // Workaround for b/27870771
+ Log.w(TAG, e.getMessage());
+ }
+ }
+
+ private class SequenceDrainListener implements TaskDrainer.DrainListener {
+ @Override
+ public void onDrained() {
+ /*
+ * No repeating request is set; and the capture queue has fully drained.
+ *
+ * If no captures were queued to begin with, and an abort was queued,
+ * it's still possible to get another BUSY before the last IDLE.
+ *
+ * If the camera is already "IDLE" and no aborts are pending,
+ * then the drain immediately finishes.
+ */
+ if (DEBUG) Log.v(TAG, mIdString + "onSequenceDrained");
+
+
+ // Fire session close as soon as all sequences are complete.
+ // We may still need to unconfigure the device, but a new session might be created
+ // past this point, and notifications would then stop to this instance.
+ mStateCallback.onClosed(CameraCaptureSessionImpl.this);
+
+ // Fast path: A new capture session has replaced this one; don't wait for abort/idle
+ // as we won't get state updates any more anyway.
+ if (mSkipUnconfigure) {
+ return;
+ }
+
+ mAbortDrainer.beginDrain();
+ }
+ }
+
+ private class AbortDrainListener implements TaskDrainer.DrainListener {
+ @Override
+ public void onDrained() {
+ if (DEBUG) Log.v(TAG, mIdString + "onAbortDrained");
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ /*
+ * Any queued aborts have now completed.
+ *
+ * It's now safe to wait to receive the final "IDLE" event, as the camera device
+ * will no longer again transition to "ACTIVE" by itself.
+ *
+ * If the camera is already "IDLE", then the drain immediately finishes.
+ */
+
+ // Fast path: A new capture session has replaced this one; don't wait for idle
+ // as we won't get state updates any more anyway.
+ if (mSkipUnconfigure) {
+ return;
+ }
+ mIdleDrainer.beginDrain();
+ }
+ }
+ }
+
+ private class IdleDrainListener implements TaskDrainer.DrainListener {
+ @Override
+ public void onDrained() {
+ if (DEBUG) Log.v(TAG, mIdString + "onIdleDrained");
+
+ // Take device lock before session lock so that we can call back into device
+ // without causing a deadlock
+ synchronized (mDeviceImpl.mInterfaceLock) {
+ /*
+ * The device is now IDLE, and has settled. It will not transition to
+ * ACTIVE or BUSY again by itself.
+ *
+ * It's now safe to unconfigure the outputs.
+ *
+ * This operation is idempotent; a session will not be closed twice.
+ */
+ if (DEBUG)
+ Log.v(TAG, mIdString + "Session drain complete, skip unconfigure: " +
+ mSkipUnconfigure);
+
+ // Fast path: A new capture session has replaced this one; don't wait for idle
+ // as we won't get state updates any more anyway.
+ if (mSkipUnconfigure) {
+ return;
+ }
+
+ // Final slow path: unconfigure the camera, no session has replaced us and
+ // everything is idle.
+ try {
+ // begin transition to unconfigured
+ mDeviceImpl.configureStreamsChecked(/*inputConfig*/null, /*outputs*/null,
+ /*operatingMode*/ ICameraDeviceUser.NORMAL_MODE);
+ } catch (CameraAccessException e) {
+ // OK: do not throw checked exceptions.
+ Log.e(TAG, mIdString + "Exception while unconfiguring outputs: ", e);
+
+ // TODO: call onError instead of onClosed if this happens
+ } catch (IllegalStateException e) {
+ // Camera is already closed, so nothing left to do
+ if (DEBUG) Log.v(TAG, mIdString +
+ "Camera was already closed or busy, skipping unconfigure");
+ }
+ }
+ }
+ }
+
+}
diff --git a/android/hardware/camera2/impl/CameraConstrainedHighSpeedCaptureSessionImpl.java b/android/hardware/camera2/impl/CameraConstrainedHighSpeedCaptureSessionImpl.java
new file mode 100644
index 00000000..fec7fd97
--- /dev/null
+++ b/android/hardware/camera2/impl/CameraConstrainedHighSpeedCaptureSessionImpl.java
@@ -0,0 +1,313 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.impl;
+
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.OutputConfiguration;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.utils.SurfaceUtils;
+import android.os.Handler;
+import android.util.Range;
+import android.view.Surface;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Standard implementation of CameraConstrainedHighSpeedCaptureSession.
+ *
+ * <p>
+ * Mostly just forwards calls to an instance of CameraCaptureSessionImpl,
+ * but implements the few necessary behavior changes and additional methods required
+ * for the constrained high speed speed mode.
+ * </p>
+ */
+
+public class CameraConstrainedHighSpeedCaptureSessionImpl
+ extends CameraConstrainedHighSpeedCaptureSession implements CameraCaptureSessionCore {
+ private final CameraCharacteristics mCharacteristics;
+ private final CameraCaptureSessionImpl mSessionImpl;
+
+ /**
+ * Create a new CameraCaptureSession.
+ *
+ * <p>The camera device must already be in the {@code IDLE} state when this is invoked.
+ * There must be no pending actions
+ * (e.g. no pending captures, no repeating requests, no flush).</p>
+ */
+ CameraConstrainedHighSpeedCaptureSessionImpl(int id,
+ CameraCaptureSession.StateCallback callback, Handler stateHandler,
+ android.hardware.camera2.impl.CameraDeviceImpl deviceImpl,
+ Handler deviceStateHandler, boolean configureSuccess,
+ CameraCharacteristics characteristics) {
+ mCharacteristics = characteristics;
+ CameraCaptureSession.StateCallback wrapperCallback = new WrapperCallback(callback);
+ mSessionImpl = new CameraCaptureSessionImpl(id, /*input*/null, wrapperCallback,
+ stateHandler, deviceImpl, deviceStateHandler, configureSuccess);
+ }
+
+ @Override
+ public List<CaptureRequest> createHighSpeedRequestList(CaptureRequest request)
+ throws CameraAccessException {
+ if (request == null) {
+ throw new IllegalArgumentException("Input capture request must not be null");
+ }
+ Collection<Surface> outputSurfaces = request.getTargets();
+ Range<Integer> fpsRange = request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
+
+ StreamConfigurationMap config =
+ mCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputSurfaces, fpsRange, config);
+
+ // Request list size: to limit the preview to 30fps, need use maxFps/30; to maximize
+ // the preview frame rate, should use maxBatch size for that high speed stream
+ // configuration. We choose the former for now.
+ int requestListSize = fpsRange.getUpper() / 30;
+ List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
+
+ // Prepare the Request builders: need carry over the request controls.
+ // First, create a request builder that will only include preview or recording target.
+ CameraMetadataNative requestMetadata = new CameraMetadataNative(request.getNativeCopy());
+ // Note that after this step, the requestMetadata is mutated (swapped) and can not be used
+ // for next request builder creation.
+ CaptureRequest.Builder singleTargetRequestBuilder = new CaptureRequest.Builder(
+ requestMetadata, /*reprocess*/false, CameraCaptureSession.SESSION_ID_NONE);
+
+ // Carry over userTag, as native metadata doesn't have this field.
+ singleTargetRequestBuilder.setTag(request.getTag());
+
+ // Overwrite the capture intent to make sure a good value is set.
+ Iterator<Surface> iterator = outputSurfaces.iterator();
+ Surface firstSurface = iterator.next();
+ Surface secondSurface = null;
+ if (outputSurfaces.size() == 1 && SurfaceUtils.isSurfaceForHwVideoEncoder(firstSurface)) {
+ singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
+ CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
+ } else {
+ // Video only, or preview + video
+ singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
+ CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
+ }
+ singleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/true);
+
+ // Second, Create a request builder that will include both preview and recording targets.
+ CaptureRequest.Builder doubleTargetRequestBuilder = null;
+ if (outputSurfaces.size() == 2) {
+ // Have to create a new copy, the original one was mutated after a new
+ // CaptureRequest.Builder creation.
+ requestMetadata = new CameraMetadataNative(request.getNativeCopy());
+ doubleTargetRequestBuilder = new CaptureRequest.Builder(
+ requestMetadata, /*reprocess*/false, CameraCaptureSession.SESSION_ID_NONE);
+ doubleTargetRequestBuilder.setTag(request.getTag());
+ doubleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
+ CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD);
+ doubleTargetRequestBuilder.addTarget(firstSurface);
+ secondSurface = iterator.next();
+ doubleTargetRequestBuilder.addTarget(secondSurface);
+ doubleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/true);
+ // Make sure singleTargetRequestBuilder contains only recording surface for
+ // preview + recording case.
+ Surface recordingSurface = firstSurface;
+ if (!SurfaceUtils.isSurfaceForHwVideoEncoder(recordingSurface)) {
+ recordingSurface = secondSurface;
+ }
+ singleTargetRequestBuilder.addTarget(recordingSurface);
+ } else {
+ // Single output case: either recording or preview.
+ singleTargetRequestBuilder.addTarget(firstSurface);
+ }
+
+ // Generate the final request list.
+ for (int i = 0; i < requestListSize; i++) {
+ if (i == 0 && doubleTargetRequestBuilder != null) {
+ // First request should be recording + preview request
+ requestList.add(doubleTargetRequestBuilder.build());
+ } else {
+ requestList.add(singleTargetRequestBuilder.build());
+ }
+ }
+
+ return Collections.unmodifiableList(requestList);
+ }
+
+ private boolean isConstrainedHighSpeedRequestList(List<CaptureRequest> requestList) {
+ checkCollectionNotEmpty(requestList, "High speed request list");
+ for (CaptureRequest request : requestList) {
+ if (!request.isPartOfCRequestList()) {
+ return false;
+ }
+ }
+ return true;
+ }
+
+ @Override
+ public CameraDevice getDevice() {
+ return mSessionImpl.getDevice();
+ }
+
+ @Override
+ public void prepare(Surface surface) throws CameraAccessException {
+ mSessionImpl.prepare(surface);
+ }
+
+ @Override
+ public void prepare(int maxCount, Surface surface) throws CameraAccessException {
+ mSessionImpl.prepare(maxCount, surface);
+ }
+
+ @Override
+ public void tearDown(Surface surface) throws CameraAccessException {
+ mSessionImpl.tearDown(surface);
+ }
+
+ @Override
+ public int capture(CaptureRequest request, CaptureCallback listener, Handler handler)
+ throws CameraAccessException {
+ throw new UnsupportedOperationException("Constrained high speed session doesn't support"
+ + " this method");
+ }
+
+ @Override
+ public int captureBurst(List<CaptureRequest> requests, CaptureCallback listener,
+ Handler handler) throws CameraAccessException {
+ if (!isConstrainedHighSpeedRequestList(requests)) {
+ throw new IllegalArgumentException(
+ "Only request lists created by createHighSpeedRequestList() can be submitted to " +
+ "a constrained high speed capture session");
+ }
+ return mSessionImpl.captureBurst(requests, listener, handler);
+ }
+
+ @Override
+ public int setRepeatingRequest(CaptureRequest request, CaptureCallback listener,
+ Handler handler) throws CameraAccessException {
+ throw new UnsupportedOperationException("Constrained high speed session doesn't support"
+ + " this method");
+ }
+
+ @Override
+ public int setRepeatingBurst(List<CaptureRequest> requests, CaptureCallback listener,
+ Handler handler) throws CameraAccessException {
+ if (!isConstrainedHighSpeedRequestList(requests)) {
+ throw new IllegalArgumentException(
+ "Only request lists created by createHighSpeedRequestList() can be submitted to " +
+ "a constrained high speed capture session");
+ }
+ return mSessionImpl.setRepeatingBurst(requests, listener, handler);
+ }
+
+ @Override
+ public void stopRepeating() throws CameraAccessException {
+ mSessionImpl.stopRepeating();
+ }
+
+ @Override
+ public void abortCaptures() throws CameraAccessException {
+ mSessionImpl.abortCaptures();
+ }
+
+ @Override
+ public Surface getInputSurface() {
+ return null;
+ }
+
+ @Override
+ public void close() {
+ mSessionImpl.close();
+ }
+
+ @Override
+ public boolean isReprocessable() {
+ return false;
+ }
+
+ // Implementation of CameraCaptureSessionCore methods
+
+ @Override
+ public void replaceSessionClose() {
+ mSessionImpl.replaceSessionClose();
+ }
+
+ @Override
+ public CameraDeviceImpl.StateCallbackKK getDeviceStateCallback() {
+ return mSessionImpl.getDeviceStateCallback();
+ }
+
+ @Override
+ public boolean isAborting() {
+ return mSessionImpl.isAborting();
+ }
+
+ @Override
+ public void finalizeOutputConfigurations(List<OutputConfiguration> deferredOutputConfigs)
+ throws CameraAccessException {
+ mSessionImpl.finalizeOutputConfigurations(deferredOutputConfigs);
+ }
+
+ private class WrapperCallback extends StateCallback {
+ private final StateCallback mCallback;
+
+ public WrapperCallback(StateCallback callback) {
+ mCallback = callback;
+ }
+
+ @Override
+ public void onConfigured(CameraCaptureSession session) {
+ mCallback.onConfigured(CameraConstrainedHighSpeedCaptureSessionImpl.this);
+ }
+
+ @Override
+ public void onConfigureFailed(CameraCaptureSession session) {
+ mCallback.onConfigureFailed(CameraConstrainedHighSpeedCaptureSessionImpl.this);
+ }
+
+ @Override
+ public void onReady(CameraCaptureSession session) {
+ mCallback.onReady(CameraConstrainedHighSpeedCaptureSessionImpl.this);
+ }
+
+ @Override
+ public void onActive(CameraCaptureSession session) {
+ mCallback.onActive(CameraConstrainedHighSpeedCaptureSessionImpl.this);
+ }
+
+ @Override
+ public void onCaptureQueueEmpty(CameraCaptureSession session) {
+ mCallback.onCaptureQueueEmpty(CameraConstrainedHighSpeedCaptureSessionImpl.this);
+ }
+
+ @Override
+ public void onClosed(CameraCaptureSession session) {
+ mCallback.onClosed(CameraConstrainedHighSpeedCaptureSessionImpl.this);
+ }
+
+ @Override
+ public void onSurfacePrepared(CameraCaptureSession session, Surface surface) {
+ mCallback.onSurfacePrepared(CameraConstrainedHighSpeedCaptureSessionImpl.this,
+ surface);
+ }
+ }
+}
diff --git a/android/hardware/camera2/impl/CameraDeviceImpl.java b/android/hardware/camera2/impl/CameraDeviceImpl.java
new file mode 100644
index 00000000..bfeb14de
--- /dev/null
+++ b/android/hardware/camera2/impl/CameraDeviceImpl.java
@@ -0,0 +1,2227 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+import static android.hardware.camera2.CameraAccessException.CAMERA_IN_USE;
+
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.ICameraDeviceCallbacks;
+import android.hardware.camera2.ICameraDeviceUser;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.InputConfiguration;
+import android.hardware.camera2.params.OutputConfiguration;
+import android.hardware.camera2.params.ReprocessFormatsMap;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.utils.SubmitInfo;
+import android.hardware.camera2.utils.SurfaceUtils;
+import android.hardware.ICameraService;
+import android.os.Build;
+import android.os.Handler;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.RemoteException;
+import android.os.ServiceSpecificException;
+import android.util.Log;
+import android.util.Range;
+import android.util.Size;
+import android.util.SparseArray;
+import android.view.Surface;
+
+import java.util.AbstractMap.SimpleEntry;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.LinkedList;
+import java.util.TreeMap;
+
+/**
+ * HAL2.1+ implementation of CameraDevice. Use CameraManager#open to instantiate
+ */
+public class CameraDeviceImpl extends CameraDevice
+ implements IBinder.DeathRecipient {
+ private final String TAG;
+ private final boolean DEBUG = false;
+
+ private static final int REQUEST_ID_NONE = -1;
+
+ // TODO: guard every function with if (!mRemoteDevice) check (if it was closed)
+ private ICameraDeviceUserWrapper mRemoteDevice;
+
+ // Lock to synchronize cross-thread access to device public interface
+ final Object mInterfaceLock = new Object(); // access from this class and Session only!
+ private final CameraDeviceCallbacks mCallbacks = new CameraDeviceCallbacks();
+
+ private final StateCallback mDeviceCallback;
+ private volatile StateCallbackKK mSessionStateCallback;
+ private final Handler mDeviceHandler;
+
+ private final AtomicBoolean mClosing = new AtomicBoolean();
+ private boolean mInError = false;
+ private boolean mIdle = true;
+
+ /** map request IDs to callback/request data */
+ private final SparseArray<CaptureCallbackHolder> mCaptureCallbackMap =
+ new SparseArray<CaptureCallbackHolder>();
+
+ private int mRepeatingRequestId = REQUEST_ID_NONE;
+ // Map stream IDs to input/output configurations
+ private SimpleEntry<Integer, InputConfiguration> mConfiguredInput =
+ new SimpleEntry<>(REQUEST_ID_NONE, null);
+ private final SparseArray<OutputConfiguration> mConfiguredOutputs =
+ new SparseArray<>();
+
+ private final String mCameraId;
+ private final CameraCharacteristics mCharacteristics;
+ private final int mTotalPartialCount;
+
+ private static final long NANO_PER_SECOND = 1000000000; //ns
+
+ /**
+ * A list tracking request and its expected last regular frame number and last reprocess frame
+ * number. Updated when calling ICameraDeviceUser methods.
+ */
+ private final List<RequestLastFrameNumbersHolder> mRequestLastFrameNumbersList =
+ new ArrayList<>();
+
+ /**
+ * An object tracking received frame numbers.
+ * Updated when receiving callbacks from ICameraDeviceCallbacks.
+ */
+ private final FrameNumberTracker mFrameNumberTracker = new FrameNumberTracker();
+
+ private CameraCaptureSessionCore mCurrentSession;
+ private int mNextSessionId = 0;
+
+ private final int mAppTargetSdkVersion;
+
+ // Runnables for all state transitions, except error, which needs the
+ // error code argument
+
+ private final Runnable mCallOnOpened = new Runnable() {
+ @Override
+ public void run() {
+ StateCallbackKK sessionCallback = null;
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ sessionCallback = mSessionStateCallback;
+ }
+ if (sessionCallback != null) {
+ sessionCallback.onOpened(CameraDeviceImpl.this);
+ }
+ mDeviceCallback.onOpened(CameraDeviceImpl.this);
+ }
+ };
+
+ private final Runnable mCallOnUnconfigured = new Runnable() {
+ @Override
+ public void run() {
+ StateCallbackKK sessionCallback = null;
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ sessionCallback = mSessionStateCallback;
+ }
+ if (sessionCallback != null) {
+ sessionCallback.onUnconfigured(CameraDeviceImpl.this);
+ }
+ }
+ };
+
+ private final Runnable mCallOnActive = new Runnable() {
+ @Override
+ public void run() {
+ StateCallbackKK sessionCallback = null;
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ sessionCallback = mSessionStateCallback;
+ }
+ if (sessionCallback != null) {
+ sessionCallback.onActive(CameraDeviceImpl.this);
+ }
+ }
+ };
+
+ private final Runnable mCallOnBusy = new Runnable() {
+ @Override
+ public void run() {
+ StateCallbackKK sessionCallback = null;
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ sessionCallback = mSessionStateCallback;
+ }
+ if (sessionCallback != null) {
+ sessionCallback.onBusy(CameraDeviceImpl.this);
+ }
+ }
+ };
+
+ private final Runnable mCallOnClosed = new Runnable() {
+ private boolean mClosedOnce = false;
+
+ @Override
+ public void run() {
+ if (mClosedOnce) {
+ throw new AssertionError("Don't post #onClosed more than once");
+ }
+ StateCallbackKK sessionCallback = null;
+ synchronized(mInterfaceLock) {
+ sessionCallback = mSessionStateCallback;
+ }
+ if (sessionCallback != null) {
+ sessionCallback.onClosed(CameraDeviceImpl.this);
+ }
+ mDeviceCallback.onClosed(CameraDeviceImpl.this);
+ mClosedOnce = true;
+ }
+ };
+
+ private final Runnable mCallOnIdle = new Runnable() {
+ @Override
+ public void run() {
+ StateCallbackKK sessionCallback = null;
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ sessionCallback = mSessionStateCallback;
+ }
+ if (sessionCallback != null) {
+ sessionCallback.onIdle(CameraDeviceImpl.this);
+ }
+ }
+ };
+
+ private final Runnable mCallOnDisconnected = new Runnable() {
+ @Override
+ public void run() {
+ StateCallbackKK sessionCallback = null;
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ sessionCallback = mSessionStateCallback;
+ }
+ if (sessionCallback != null) {
+ sessionCallback.onDisconnected(CameraDeviceImpl.this);
+ }
+ mDeviceCallback.onDisconnected(CameraDeviceImpl.this);
+ }
+ };
+
+ public CameraDeviceImpl(String cameraId, StateCallback callback, Handler handler,
+ CameraCharacteristics characteristics, int appTargetSdkVersion) {
+ if (cameraId == null || callback == null || handler == null || characteristics == null) {
+ throw new IllegalArgumentException("Null argument given");
+ }
+ mCameraId = cameraId;
+ mDeviceCallback = callback;
+ mDeviceHandler = handler;
+ mCharacteristics = characteristics;
+ mAppTargetSdkVersion = appTargetSdkVersion;
+
+ final int MAX_TAG_LEN = 23;
+ String tag = String.format("CameraDevice-JV-%s", mCameraId);
+ if (tag.length() > MAX_TAG_LEN) {
+ tag = tag.substring(0, MAX_TAG_LEN);
+ }
+ TAG = tag;
+
+ Integer partialCount =
+ mCharacteristics.get(CameraCharacteristics.REQUEST_PARTIAL_RESULT_COUNT);
+ if (partialCount == null) {
+ // 1 means partial result is not supported.
+ mTotalPartialCount = 1;
+ } else {
+ mTotalPartialCount = partialCount;
+ }
+ }
+
+ public CameraDeviceCallbacks getCallbacks() {
+ return mCallbacks;
+ }
+
+ /**
+ * Set remote device, which triggers initial onOpened/onUnconfigured callbacks
+ *
+ * <p>This function may post onDisconnected and throw CAMERA_DISCONNECTED if remoteDevice dies
+ * during setup.</p>
+ *
+ */
+ public void setRemoteDevice(ICameraDeviceUser remoteDevice) throws CameraAccessException {
+ synchronized(mInterfaceLock) {
+ // TODO: Move from decorator to direct binder-mediated exceptions
+ // If setRemoteFailure already called, do nothing
+ if (mInError) return;
+
+ mRemoteDevice = new ICameraDeviceUserWrapper(remoteDevice);
+
+ IBinder remoteDeviceBinder = remoteDevice.asBinder();
+ // For legacy camera device, remoteDevice is in the same process, and
+ // asBinder returns NULL.
+ if (remoteDeviceBinder != null) {
+ try {
+ remoteDeviceBinder.linkToDeath(this, /*flag*/ 0);
+ } catch (RemoteException e) {
+ CameraDeviceImpl.this.mDeviceHandler.post(mCallOnDisconnected);
+
+ throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED,
+ "The camera device has encountered a serious error");
+ }
+ }
+
+ mDeviceHandler.post(mCallOnOpened);
+ mDeviceHandler.post(mCallOnUnconfigured);
+ }
+ }
+
+ /**
+ * Call to indicate failed connection to a remote camera device.
+ *
+ * <p>This places the camera device in the error state and informs the callback.
+ * Use in place of setRemoteDevice() when startup fails.</p>
+ */
+ public void setRemoteFailure(final ServiceSpecificException failure) {
+ int failureCode = StateCallback.ERROR_CAMERA_DEVICE;
+ boolean failureIsError = true;
+
+ switch (failure.errorCode) {
+ case ICameraService.ERROR_CAMERA_IN_USE:
+ failureCode = StateCallback.ERROR_CAMERA_IN_USE;
+ break;
+ case ICameraService.ERROR_MAX_CAMERAS_IN_USE:
+ failureCode = StateCallback.ERROR_MAX_CAMERAS_IN_USE;
+ break;
+ case ICameraService.ERROR_DISABLED:
+ failureCode = StateCallback.ERROR_CAMERA_DISABLED;
+ break;
+ case ICameraService.ERROR_DISCONNECTED:
+ failureIsError = false;
+ break;
+ case ICameraService.ERROR_INVALID_OPERATION:
+ failureCode = StateCallback.ERROR_CAMERA_DEVICE;
+ break;
+ default:
+ Log.e(TAG, "Unexpected failure in opening camera device: " + failure.errorCode +
+ failure.getMessage());
+ break;
+ }
+ final int code = failureCode;
+ final boolean isError = failureIsError;
+ synchronized(mInterfaceLock) {
+ mInError = true;
+ mDeviceHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (isError) {
+ mDeviceCallback.onError(CameraDeviceImpl.this, code);
+ } else {
+ mDeviceCallback.onDisconnected(CameraDeviceImpl.this);
+ }
+ }
+ });
+ }
+ }
+
+ @Override
+ public String getId() {
+ return mCameraId;
+ }
+
+ public void configureOutputs(List<Surface> outputs) throws CameraAccessException {
+ // Leave this here for backwards compatibility with older code using this directly
+ ArrayList<OutputConfiguration> outputConfigs = new ArrayList<>(outputs.size());
+ for (Surface s : outputs) {
+ outputConfigs.add(new OutputConfiguration(s));
+ }
+ configureStreamsChecked(/*inputConfig*/null, outputConfigs,
+ /*operatingMode*/ICameraDeviceUser.NORMAL_MODE);
+
+ }
+
+ /**
+ * Attempt to configure the input and outputs; the device goes to idle and then configures the
+ * new input and outputs if possible.
+ *
+ * <p>The configuration may gracefully fail, if input configuration is not supported,
+ * if there are too many outputs, if the formats are not supported, or if the sizes for that
+ * format is not supported. In this case this function will return {@code false} and the
+ * unconfigured callback will be fired.</p>
+ *
+ * <p>If the configuration succeeds (with 1 or more outputs with or without an input),
+ * then the idle callback is fired. Unconfiguring the device always fires the idle callback.</p>
+ *
+ * @param inputConfig input configuration or {@code null} for no input
+ * @param outputs a list of one or more surfaces, or {@code null} to unconfigure
+ * @param operatingMode If the stream configuration is for a normal session,
+ * a constrained high speed session, or something else.
+ * @return whether or not the configuration was successful
+ *
+ * @throws CameraAccessException if there were any unexpected problems during configuration
+ */
+ public boolean configureStreamsChecked(InputConfiguration inputConfig,
+ List<OutputConfiguration> outputs, int operatingMode)
+ throws CameraAccessException {
+ // Treat a null input the same an empty list
+ if (outputs == null) {
+ outputs = new ArrayList<OutputConfiguration>();
+ }
+ if (outputs.size() == 0 && inputConfig != null) {
+ throw new IllegalArgumentException("cannot configure an input stream without " +
+ "any output streams");
+ }
+
+ checkInputConfiguration(inputConfig);
+
+ boolean success = false;
+
+ synchronized(mInterfaceLock) {
+ checkIfCameraClosedOrInError();
+ // Streams to create
+ HashSet<OutputConfiguration> addSet = new HashSet<OutputConfiguration>(outputs);
+ // Streams to delete
+ List<Integer> deleteList = new ArrayList<Integer>();
+
+ // Determine which streams need to be created, which to be deleted
+ for (int i = 0; i < mConfiguredOutputs.size(); ++i) {
+ int streamId = mConfiguredOutputs.keyAt(i);
+ OutputConfiguration outConfig = mConfiguredOutputs.valueAt(i);
+
+ if (!outputs.contains(outConfig) || outConfig.isDeferredConfiguration()) {
+ // Always delete the deferred output configuration when the session
+ // is created, as the deferred output configuration doesn't have unique surface
+ // related identifies.
+ deleteList.add(streamId);
+ } else {
+ addSet.remove(outConfig); // Don't create a stream previously created
+ }
+ }
+
+ mDeviceHandler.post(mCallOnBusy);
+ stopRepeating();
+
+ try {
+ waitUntilIdle();
+
+ mRemoteDevice.beginConfigure();
+
+ // reconfigure the input stream if the input configuration is different.
+ InputConfiguration currentInputConfig = mConfiguredInput.getValue();
+ if (inputConfig != currentInputConfig &&
+ (inputConfig == null || !inputConfig.equals(currentInputConfig))) {
+ if (currentInputConfig != null) {
+ mRemoteDevice.deleteStream(mConfiguredInput.getKey());
+ mConfiguredInput = new SimpleEntry<Integer, InputConfiguration>(
+ REQUEST_ID_NONE, null);
+ }
+ if (inputConfig != null) {
+ int streamId = mRemoteDevice.createInputStream(inputConfig.getWidth(),
+ inputConfig.getHeight(), inputConfig.getFormat());
+ mConfiguredInput = new SimpleEntry<Integer, InputConfiguration>(
+ streamId, inputConfig);
+ }
+ }
+
+ // Delete all streams first (to free up HW resources)
+ for (Integer streamId : deleteList) {
+ mRemoteDevice.deleteStream(streamId);
+ mConfiguredOutputs.delete(streamId);
+ }
+
+ // Add all new streams
+ for (OutputConfiguration outConfig : outputs) {
+ if (addSet.contains(outConfig)) {
+ int streamId = mRemoteDevice.createStream(outConfig);
+ mConfiguredOutputs.put(streamId, outConfig);
+ }
+ }
+
+ mRemoteDevice.endConfigure(operatingMode);
+
+ success = true;
+ } catch (IllegalArgumentException e) {
+ // OK. camera service can reject stream config if it's not supported by HAL
+ // This is only the result of a programmer misusing the camera2 api.
+ Log.w(TAG, "Stream configuration failed due to: " + e.getMessage());
+ return false;
+ } catch (CameraAccessException e) {
+ if (e.getReason() == CameraAccessException.CAMERA_IN_USE) {
+ throw new IllegalStateException("The camera is currently busy." +
+ " You must wait until the previous operation completes.", e);
+ }
+ throw e;
+ } finally {
+ if (success && outputs.size() > 0) {
+ mDeviceHandler.post(mCallOnIdle);
+ } else {
+ // Always return to the 'unconfigured' state if we didn't hit a fatal error
+ mDeviceHandler.post(mCallOnUnconfigured);
+ }
+ }
+ }
+
+ return success;
+ }
+
+ @Override
+ public void createCaptureSession(List<Surface> outputs,
+ CameraCaptureSession.StateCallback callback, Handler handler)
+ throws CameraAccessException {
+ List<OutputConfiguration> outConfigurations = new ArrayList<>(outputs.size());
+ for (Surface surface : outputs) {
+ outConfigurations.add(new OutputConfiguration(surface));
+ }
+ createCaptureSessionInternal(null, outConfigurations, callback, handler,
+ /*operatingMode*/ICameraDeviceUser.NORMAL_MODE);
+ }
+
+ @Override
+ public void createCaptureSessionByOutputConfigurations(
+ List<OutputConfiguration> outputConfigurations,
+ CameraCaptureSession.StateCallback callback, Handler handler)
+ throws CameraAccessException {
+ if (DEBUG) {
+ Log.d(TAG, "createCaptureSessionByOutputConfigurations");
+ }
+
+ // OutputConfiguration objects are immutable, but need to have our own array
+ List<OutputConfiguration> currentOutputs = new ArrayList<>(outputConfigurations);
+
+ createCaptureSessionInternal(null, currentOutputs, callback, handler,
+ /*operatingMode*/ICameraDeviceUser.NORMAL_MODE);
+ }
+
+ @Override
+ public void createReprocessableCaptureSession(InputConfiguration inputConfig,
+ List<Surface> outputs, CameraCaptureSession.StateCallback callback, Handler handler)
+ throws CameraAccessException {
+ if (DEBUG) {
+ Log.d(TAG, "createReprocessableCaptureSession");
+ }
+
+ if (inputConfig == null) {
+ throw new IllegalArgumentException("inputConfig cannot be null when creating a " +
+ "reprocessable capture session");
+ }
+ List<OutputConfiguration> outConfigurations = new ArrayList<>(outputs.size());
+ for (Surface surface : outputs) {
+ outConfigurations.add(new OutputConfiguration(surface));
+ }
+ createCaptureSessionInternal(inputConfig, outConfigurations, callback, handler,
+ /*operatingMode*/ICameraDeviceUser.NORMAL_MODE);
+ }
+
+ @Override
+ public void createReprocessableCaptureSessionByConfigurations(InputConfiguration inputConfig,
+ List<OutputConfiguration> outputs,
+ android.hardware.camera2.CameraCaptureSession.StateCallback callback, Handler handler)
+ throws CameraAccessException {
+ if (DEBUG) {
+ Log.d(TAG, "createReprocessableCaptureSessionWithConfigurations");
+ }
+
+ if (inputConfig == null) {
+ throw new IllegalArgumentException("inputConfig cannot be null when creating a " +
+ "reprocessable capture session");
+ }
+
+ if (outputs == null) {
+ throw new IllegalArgumentException("Output configurations cannot be null when " +
+ "creating a reprocessable capture session");
+ }
+
+ // OutputConfiguration objects aren't immutable, make a copy before using.
+ List<OutputConfiguration> currentOutputs = new ArrayList<OutputConfiguration>();
+ for (OutputConfiguration output : outputs) {
+ currentOutputs.add(new OutputConfiguration(output));
+ }
+ createCaptureSessionInternal(inputConfig, currentOutputs,
+ callback, handler, /*operatingMode*/ICameraDeviceUser.NORMAL_MODE);
+ }
+
+ @Override
+ public void createConstrainedHighSpeedCaptureSession(List<Surface> outputs,
+ android.hardware.camera2.CameraCaptureSession.StateCallback callback, Handler handler)
+ throws CameraAccessException {
+ if (outputs == null || outputs.size() == 0 || outputs.size() > 2) {
+ throw new IllegalArgumentException(
+ "Output surface list must not be null and the size must be no more than 2");
+ }
+ StreamConfigurationMap config =
+ getCharacteristics().get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputs, /*fpsRange*/null, config);
+
+ List<OutputConfiguration> outConfigurations = new ArrayList<>(outputs.size());
+ for (Surface surface : outputs) {
+ outConfigurations.add(new OutputConfiguration(surface));
+ }
+ createCaptureSessionInternal(null, outConfigurations, callback, handler,
+ /*operatingMode*/ICameraDeviceUser.CONSTRAINED_HIGH_SPEED_MODE);
+ }
+
+ @Override
+ public void createCustomCaptureSession(InputConfiguration inputConfig,
+ List<OutputConfiguration> outputs,
+ int operatingMode,
+ android.hardware.camera2.CameraCaptureSession.StateCallback callback,
+ Handler handler) throws CameraAccessException {
+ List<OutputConfiguration> currentOutputs = new ArrayList<OutputConfiguration>();
+ for (OutputConfiguration output : outputs) {
+ currentOutputs.add(new OutputConfiguration(output));
+ }
+ createCaptureSessionInternal(inputConfig, currentOutputs, callback, handler, operatingMode);
+ }
+
+ private void createCaptureSessionInternal(InputConfiguration inputConfig,
+ List<OutputConfiguration> outputConfigurations,
+ CameraCaptureSession.StateCallback callback, Handler handler,
+ int operatingMode) throws CameraAccessException {
+ synchronized(mInterfaceLock) {
+ if (DEBUG) {
+ Log.d(TAG, "createCaptureSessionInternal");
+ }
+
+ checkIfCameraClosedOrInError();
+
+ boolean isConstrainedHighSpeed =
+ (operatingMode == ICameraDeviceUser.CONSTRAINED_HIGH_SPEED_MODE);
+ if (isConstrainedHighSpeed && inputConfig != null) {
+ throw new IllegalArgumentException("Constrained high speed session doesn't support"
+ + " input configuration yet.");
+ }
+
+ // Notify current session that it's going away, before starting camera operations
+ // After this call completes, the session is not allowed to call into CameraDeviceImpl
+ if (mCurrentSession != null) {
+ mCurrentSession.replaceSessionClose();
+ }
+
+ // TODO: dont block for this
+ boolean configureSuccess = true;
+ CameraAccessException pendingException = null;
+ Surface input = null;
+ try {
+ // configure streams and then block until IDLE
+ configureSuccess = configureStreamsChecked(inputConfig, outputConfigurations,
+ operatingMode);
+ if (configureSuccess == true && inputConfig != null) {
+ input = mRemoteDevice.getInputSurface();
+ }
+ } catch (CameraAccessException e) {
+ configureSuccess = false;
+ pendingException = e;
+ input = null;
+ if (DEBUG) {
+ Log.v(TAG, "createCaptureSession - failed with exception ", e);
+ }
+ }
+
+ // Fire onConfigured if configureOutputs succeeded, fire onConfigureFailed otherwise.
+ CameraCaptureSessionCore newSession = null;
+ if (isConstrainedHighSpeed) {
+ newSession = new CameraConstrainedHighSpeedCaptureSessionImpl(mNextSessionId++,
+ callback, handler, this, mDeviceHandler, configureSuccess,
+ mCharacteristics);
+ } else {
+ newSession = new CameraCaptureSessionImpl(mNextSessionId++, input,
+ callback, handler, this, mDeviceHandler,
+ configureSuccess);
+ }
+
+ // TODO: wait until current session closes, then create the new session
+ mCurrentSession = newSession;
+
+ if (pendingException != null) {
+ throw pendingException;
+ }
+
+ mSessionStateCallback = mCurrentSession.getDeviceStateCallback();
+ }
+ }
+
+ /**
+ * For use by backwards-compatibility code only.
+ */
+ public void setSessionListener(StateCallbackKK sessionCallback) {
+ synchronized(mInterfaceLock) {
+ mSessionStateCallback = sessionCallback;
+ }
+ }
+
+ private void overrideEnableZsl(CameraMetadataNative request, boolean newValue) {
+ Boolean enableZsl = request.get(CaptureRequest.CONTROL_ENABLE_ZSL);
+ if (enableZsl == null) {
+ // If enableZsl is not available, don't override.
+ return;
+ }
+
+ request.set(CaptureRequest.CONTROL_ENABLE_ZSL, newValue);
+ }
+
+ @Override
+ public CaptureRequest.Builder createCaptureRequest(int templateType)
+ throws CameraAccessException {
+ synchronized(mInterfaceLock) {
+ checkIfCameraClosedOrInError();
+
+ CameraMetadataNative templatedRequest = null;
+
+ templatedRequest = mRemoteDevice.createDefaultRequest(templateType);
+
+ // If app target SDK is older than O, or it's not a still capture template, enableZsl
+ // must be false in the default request.
+ if (mAppTargetSdkVersion < Build.VERSION_CODES.O ||
+ templateType != TEMPLATE_STILL_CAPTURE) {
+ overrideEnableZsl(templatedRequest, false);
+ }
+
+ CaptureRequest.Builder builder = new CaptureRequest.Builder(
+ templatedRequest, /*reprocess*/false, CameraCaptureSession.SESSION_ID_NONE);
+
+ return builder;
+ }
+ }
+
+ @Override
+ public CaptureRequest.Builder createReprocessCaptureRequest(TotalCaptureResult inputResult)
+ throws CameraAccessException {
+ synchronized(mInterfaceLock) {
+ checkIfCameraClosedOrInError();
+
+ CameraMetadataNative resultMetadata = new
+ CameraMetadataNative(inputResult.getNativeCopy());
+
+ return new CaptureRequest.Builder(resultMetadata, /*reprocess*/true,
+ inputResult.getSessionId());
+ }
+ }
+
+ public void prepare(Surface surface) throws CameraAccessException {
+ if (surface == null) throw new IllegalArgumentException("Surface is null");
+
+ synchronized(mInterfaceLock) {
+ int streamId = -1;
+ for (int i = 0; i < mConfiguredOutputs.size(); i++) {
+ final List<Surface> surfaces = mConfiguredOutputs.valueAt(i).getSurfaces();
+ if (surfaces.contains(surface)) {
+ streamId = mConfiguredOutputs.keyAt(i);
+ break;
+ }
+ }
+ if (streamId == -1) {
+ throw new IllegalArgumentException("Surface is not part of this session");
+ }
+
+ mRemoteDevice.prepare(streamId);
+ }
+ }
+
+ public void prepare(int maxCount, Surface surface) throws CameraAccessException {
+ if (surface == null) throw new IllegalArgumentException("Surface is null");
+ if (maxCount <= 0) throw new IllegalArgumentException("Invalid maxCount given: " +
+ maxCount);
+
+ synchronized(mInterfaceLock) {
+ int streamId = -1;
+ for (int i = 0; i < mConfiguredOutputs.size(); i++) {
+ if (surface == mConfiguredOutputs.valueAt(i).getSurface()) {
+ streamId = mConfiguredOutputs.keyAt(i);
+ break;
+ }
+ }
+ if (streamId == -1) {
+ throw new IllegalArgumentException("Surface is not part of this session");
+ }
+
+ mRemoteDevice.prepare2(maxCount, streamId);
+ }
+ }
+
+ public void tearDown(Surface surface) throws CameraAccessException {
+ if (surface == null) throw new IllegalArgumentException("Surface is null");
+
+ synchronized(mInterfaceLock) {
+ int streamId = -1;
+ for (int i = 0; i < mConfiguredOutputs.size(); i++) {
+ if (surface == mConfiguredOutputs.valueAt(i).getSurface()) {
+ streamId = mConfiguredOutputs.keyAt(i);
+ break;
+ }
+ }
+ if (streamId == -1) {
+ throw new IllegalArgumentException("Surface is not part of this session");
+ }
+
+ mRemoteDevice.tearDown(streamId);
+ }
+ }
+
+ public void finalizeOutputConfigs(List<OutputConfiguration> outputConfigs)
+ throws CameraAccessException {
+ if (outputConfigs == null || outputConfigs.size() == 0) {
+ throw new IllegalArgumentException("deferred config is null or empty");
+ }
+
+ synchronized(mInterfaceLock) {
+ for (OutputConfiguration config : outputConfigs) {
+ int streamId = -1;
+ for (int i = 0; i < mConfiguredOutputs.size(); i++) {
+ // Have to use equal here, as createCaptureSessionByOutputConfigurations() and
+ // createReprocessableCaptureSessionByConfigurations() do a copy of the configs.
+ if (config.equals(mConfiguredOutputs.valueAt(i))) {
+ streamId = mConfiguredOutputs.keyAt(i);
+ break;
+ }
+ }
+ if (streamId == -1) {
+ throw new IllegalArgumentException("Deferred config is not part of this "
+ + "session");
+ }
+
+ if (config.getSurfaces().size() == 0) {
+ throw new IllegalArgumentException("The final config for stream " + streamId
+ + " must have at least 1 surface");
+ }
+ mRemoteDevice.finalizeOutputConfigurations(streamId, config);
+ }
+ }
+ }
+
+ public int capture(CaptureRequest request, CaptureCallback callback, Handler handler)
+ throws CameraAccessException {
+ if (DEBUG) {
+ Log.d(TAG, "calling capture");
+ }
+ List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
+ requestList.add(request);
+ return submitCaptureRequest(requestList, callback, handler, /*streaming*/false);
+ }
+
+ public int captureBurst(List<CaptureRequest> requests, CaptureCallback callback,
+ Handler handler) throws CameraAccessException {
+ if (requests == null || requests.isEmpty()) {
+ throw new IllegalArgumentException("At least one request must be given");
+ }
+ return submitCaptureRequest(requests, callback, handler, /*streaming*/false);
+ }
+
+ /**
+ * This method checks lastFrameNumber returned from ICameraDeviceUser methods for
+ * starting and stopping repeating request and flushing.
+ *
+ * <p>If lastFrameNumber is NO_FRAMES_CAPTURED, it means that the request was never
+ * sent to HAL. Then onCaptureSequenceAborted is immediately triggered.
+ * If lastFrameNumber is non-negative, then the requestId and lastFrameNumber as the last
+ * regular frame number will be added to the list mRequestLastFrameNumbersList.</p>
+ *
+ * @param requestId the request ID of the current repeating request.
+ *
+ * @param lastFrameNumber last frame number returned from binder.
+ */
+ private void checkEarlyTriggerSequenceComplete(
+ final int requestId, final long lastFrameNumber) {
+ // lastFrameNumber being equal to NO_FRAMES_CAPTURED means that the request
+ // was never sent to HAL. Should trigger onCaptureSequenceAborted immediately.
+ if (lastFrameNumber == CaptureCallback.NO_FRAMES_CAPTURED) {
+ final CaptureCallbackHolder holder;
+ int index = mCaptureCallbackMap.indexOfKey(requestId);
+ holder = (index >= 0) ? mCaptureCallbackMap.valueAt(index) : null;
+ if (holder != null) {
+ mCaptureCallbackMap.removeAt(index);
+ if (DEBUG) {
+ Log.v(TAG, String.format(
+ "remove holder for requestId %d, "
+ + "because lastFrame is %d.",
+ requestId, lastFrameNumber));
+ }
+ }
+
+ if (holder != null) {
+ if (DEBUG) {
+ Log.v(TAG, "immediately trigger onCaptureSequenceAborted because"
+ + " request did not reach HAL");
+ }
+
+ Runnable resultDispatch = new Runnable() {
+ @Override
+ public void run() {
+ if (!CameraDeviceImpl.this.isClosed()) {
+ if (DEBUG) {
+ Log.d(TAG, String.format(
+ "early trigger sequence complete for request %d",
+ requestId));
+ }
+ holder.getCallback().onCaptureSequenceAborted(
+ CameraDeviceImpl.this,
+ requestId);
+ }
+ }
+ };
+ holder.getHandler().post(resultDispatch);
+ } else {
+ Log.w(TAG, String.format(
+ "did not register callback to request %d",
+ requestId));
+ }
+ } else {
+ // This function is only called for regular request so lastFrameNumber is the last
+ // regular frame number.
+ mRequestLastFrameNumbersList.add(new RequestLastFrameNumbersHolder(requestId,
+ lastFrameNumber));
+
+ // It is possible that the last frame has already arrived, so we need to check
+ // for sequence completion right away
+ checkAndFireSequenceComplete();
+ }
+ }
+
+ private int submitCaptureRequest(List<CaptureRequest> requestList, CaptureCallback callback,
+ Handler handler, boolean repeating) throws CameraAccessException {
+
+ // Need a valid handler, or current thread needs to have a looper, if
+ // callback is valid
+ handler = checkHandler(handler, callback);
+
+ // Make sure that there all requests have at least 1 surface; all surfaces are non-null
+ for (CaptureRequest request : requestList) {
+ if (request.getTargets().isEmpty()) {
+ throw new IllegalArgumentException(
+ "Each request must have at least one Surface target");
+ }
+
+ for (Surface surface : request.getTargets()) {
+ if (surface == null) {
+ throw new IllegalArgumentException("Null Surface targets are not allowed");
+ }
+ }
+ }
+
+ synchronized(mInterfaceLock) {
+ checkIfCameraClosedOrInError();
+ if (repeating) {
+ stopRepeating();
+ }
+
+ SubmitInfo requestInfo;
+
+ CaptureRequest[] requestArray = requestList.toArray(new CaptureRequest[requestList.size()]);
+ requestInfo = mRemoteDevice.submitRequestList(requestArray, repeating);
+ if (DEBUG) {
+ Log.v(TAG, "last frame number " + requestInfo.getLastFrameNumber());
+ }
+
+ if (callback != null) {
+ mCaptureCallbackMap.put(requestInfo.getRequestId(),
+ new CaptureCallbackHolder(
+ callback, requestList, handler, repeating, mNextSessionId - 1));
+ } else {
+ if (DEBUG) {
+ Log.d(TAG, "Listen for request " + requestInfo.getRequestId() + " is null");
+ }
+ }
+
+ if (repeating) {
+ if (mRepeatingRequestId != REQUEST_ID_NONE) {
+ checkEarlyTriggerSequenceComplete(mRepeatingRequestId,
+ requestInfo.getLastFrameNumber());
+ }
+ mRepeatingRequestId = requestInfo.getRequestId();
+ } else {
+ mRequestLastFrameNumbersList.add(
+ new RequestLastFrameNumbersHolder(requestList, requestInfo));
+ }
+
+ if (mIdle) {
+ mDeviceHandler.post(mCallOnActive);
+ }
+ mIdle = false;
+
+ return requestInfo.getRequestId();
+ }
+ }
+
+ public int setRepeatingRequest(CaptureRequest request, CaptureCallback callback,
+ Handler handler) throws CameraAccessException {
+ List<CaptureRequest> requestList = new ArrayList<CaptureRequest>();
+ requestList.add(request);
+ return submitCaptureRequest(requestList, callback, handler, /*streaming*/true);
+ }
+
+ public int setRepeatingBurst(List<CaptureRequest> requests, CaptureCallback callback,
+ Handler handler) throws CameraAccessException {
+ if (requests == null || requests.isEmpty()) {
+ throw new IllegalArgumentException("At least one request must be given");
+ }
+ return submitCaptureRequest(requests, callback, handler, /*streaming*/true);
+ }
+
+ public void stopRepeating() throws CameraAccessException {
+
+ synchronized(mInterfaceLock) {
+ checkIfCameraClosedOrInError();
+ if (mRepeatingRequestId != REQUEST_ID_NONE) {
+
+ int requestId = mRepeatingRequestId;
+ mRepeatingRequestId = REQUEST_ID_NONE;
+
+ long lastFrameNumber;
+ try {
+ lastFrameNumber = mRemoteDevice.cancelRequest(requestId);
+ } catch (IllegalArgumentException e) {
+ if (DEBUG) {
+ Log.v(TAG, "Repeating request was already stopped for request " + requestId);
+ }
+ // Repeating request was already stopped. Nothing more to do.
+ return;
+ }
+
+ checkEarlyTriggerSequenceComplete(requestId, lastFrameNumber);
+ }
+ }
+ }
+
+ private void waitUntilIdle() throws CameraAccessException {
+
+ synchronized(mInterfaceLock) {
+ checkIfCameraClosedOrInError();
+
+ if (mRepeatingRequestId != REQUEST_ID_NONE) {
+ throw new IllegalStateException("Active repeating request ongoing");
+ }
+
+ mRemoteDevice.waitUntilIdle();
+ }
+ }
+
+ public void flush() throws CameraAccessException {
+ synchronized(mInterfaceLock) {
+ checkIfCameraClosedOrInError();
+
+ mDeviceHandler.post(mCallOnBusy);
+
+ // If already idle, just do a busy->idle transition immediately, don't actually
+ // flush.
+ if (mIdle) {
+ mDeviceHandler.post(mCallOnIdle);
+ return;
+ }
+
+ long lastFrameNumber = mRemoteDevice.flush();
+ if (mRepeatingRequestId != REQUEST_ID_NONE) {
+ checkEarlyTriggerSequenceComplete(mRepeatingRequestId, lastFrameNumber);
+ mRepeatingRequestId = REQUEST_ID_NONE;
+ }
+ }
+ }
+
+ @Override
+ public void close() {
+ synchronized (mInterfaceLock) {
+ if (mClosing.getAndSet(true)) {
+ return;
+ }
+
+ if (mRemoteDevice != null) {
+ mRemoteDevice.disconnect();
+ mRemoteDevice.unlinkToDeath(this, /*flags*/0);
+ }
+
+ // Only want to fire the onClosed callback once;
+ // either a normal close where the remote device is valid
+ // or a close after a startup error (no remote device but in error state)
+ if (mRemoteDevice != null || mInError) {
+ mDeviceHandler.post(mCallOnClosed);
+ }
+
+ mRemoteDevice = null;
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ }
+ finally {
+ super.finalize();
+ }
+ }
+
+ private void checkInputConfiguration(InputConfiguration inputConfig) {
+ if (inputConfig != null) {
+ StreamConfigurationMap configMap = mCharacteristics.get(
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ int[] inputFormats = configMap.getInputFormats();
+ boolean validFormat = false;
+ for (int format : inputFormats) {
+ if (format == inputConfig.getFormat()) {
+ validFormat = true;
+ }
+ }
+
+ if (validFormat == false) {
+ throw new IllegalArgumentException("input format " + inputConfig.getFormat() +
+ " is not valid");
+ }
+
+ boolean validSize = false;
+ Size[] inputSizes = configMap.getInputSizes(inputConfig.getFormat());
+ for (Size s : inputSizes) {
+ if (inputConfig.getWidth() == s.getWidth() &&
+ inputConfig.getHeight() == s.getHeight()) {
+ validSize = true;
+ }
+ }
+
+ if (validSize == false) {
+ throw new IllegalArgumentException("input size " + inputConfig.getWidth() + "x" +
+ inputConfig.getHeight() + " is not valid");
+ }
+ }
+ }
+
+ /**
+ * <p>A callback for tracking the progress of a {@link CaptureRequest}
+ * submitted to the camera device.</p>
+ *
+ * An interface instead of an abstract class because this is internal and
+ * we want to make sure we always implement all its callbacks until we reach
+ * the public layer.
+ */
+ public interface CaptureCallback {
+
+ /**
+ * This constant is used to indicate that no images were captured for
+ * the request.
+ *
+ * @hide
+ */
+ public static final int NO_FRAMES_CAPTURED = -1;
+
+ /**
+ * This method is called when the camera device has started capturing
+ * the output image for the request, at the beginning of image exposure.
+ *
+ * @see android.media.MediaActionSound
+ */
+ public void onCaptureStarted(CameraDevice camera,
+ CaptureRequest request, long timestamp, long frameNumber);
+
+ /**
+ * This method is called when some results from an image capture are
+ * available.
+ *
+ * @hide
+ */
+ public void onCapturePartial(CameraDevice camera,
+ CaptureRequest request, CaptureResult result);
+
+ /**
+ * This method is called when an image capture makes partial forward progress; some
+ * (but not all) results from an image capture are available.
+ *
+ */
+ public void onCaptureProgressed(CameraDevice camera,
+ CaptureRequest request, CaptureResult partialResult);
+
+ /**
+ * This method is called when an image capture has fully completed and all the
+ * result metadata is available.
+ */
+ public void onCaptureCompleted(CameraDevice camera,
+ CaptureRequest request, TotalCaptureResult result);
+
+ /**
+ * This method is called instead of {@link #onCaptureCompleted} when the
+ * camera device failed to produce a {@link CaptureResult} for the
+ * request.
+ */
+ public void onCaptureFailed(CameraDevice camera,
+ CaptureRequest request, CaptureFailure failure);
+
+ /**
+ * This method is called independently of the others in CaptureCallback,
+ * when a capture sequence finishes and all {@link CaptureResult}
+ * or {@link CaptureFailure} for it have been returned via this callback.
+ */
+ public void onCaptureSequenceCompleted(CameraDevice camera,
+ int sequenceId, long frameNumber);
+
+ /**
+ * This method is called independently of the others in CaptureCallback,
+ * when a capture sequence aborts before any {@link CaptureResult}
+ * or {@link CaptureFailure} for it have been returned via this callback.
+ */
+ public void onCaptureSequenceAborted(CameraDevice camera,
+ int sequenceId);
+
+ /**
+ * This method is called independently of the others in CaptureCallback, if an output buffer
+ * is dropped for a particular capture request.
+ *
+ * Loss of metadata is communicated via onCaptureFailed, independently of any buffer loss.
+ */
+ public void onCaptureBufferLost(CameraDevice camera,
+ CaptureRequest request, Surface target, long frameNumber);
+ }
+
+ /**
+ * A callback for notifications about the state of a camera device, adding in the callbacks that
+ * were part of the earlier KK API design, but now only used internally.
+ */
+ public static abstract class StateCallbackKK extends StateCallback {
+ /**
+ * The method called when a camera device has no outputs configured.
+ *
+ */
+ public void onUnconfigured(CameraDevice camera) {
+ // Default empty implementation
+ }
+
+ /**
+ * The method called when a camera device begins processing
+ * {@link CaptureRequest capture requests}.
+ *
+ */
+ public void onActive(CameraDevice camera) {
+ // Default empty implementation
+ }
+
+ /**
+ * The method called when a camera device is busy.
+ *
+ */
+ public void onBusy(CameraDevice camera) {
+ // Default empty implementation
+ }
+
+ /**
+ * The method called when a camera device has finished processing all
+ * submitted capture requests and has reached an idle state.
+ *
+ */
+ public void onIdle(CameraDevice camera) {
+ // Default empty implementation
+ }
+
+ /**
+ * This method is called when camera device's non-repeating request queue is empty,
+ * and is ready to start capturing next image.
+ */
+ public void onRequestQueueEmpty() {
+ // Default empty implementation
+ }
+
+ /**
+ * The method called when the camera device has finished preparing
+ * an output Surface
+ */
+ public void onSurfacePrepared(Surface surface) {
+ // Default empty implementation
+ }
+ }
+
+ static class CaptureCallbackHolder {
+
+ private final boolean mRepeating;
+ private final CaptureCallback mCallback;
+ private final List<CaptureRequest> mRequestList;
+ private final Handler mHandler;
+ private final int mSessionId;
+ /**
+ * <p>Determine if the callback holder is for a constrained high speed request list that
+ * expects batched capture results. Capture results will be batched if the request list
+ * is interleaved with preview and video requests. Capture results won't be batched if the
+ * request list only contains preview requests, or if the request doesn't belong to a
+ * constrained high speed list.
+ */
+ private final boolean mHasBatchedOutputs;
+
+ CaptureCallbackHolder(CaptureCallback callback, List<CaptureRequest> requestList,
+ Handler handler, boolean repeating, int sessionId) {
+ if (callback == null || handler == null) {
+ throw new UnsupportedOperationException(
+ "Must have a valid handler and a valid callback");
+ }
+ mRepeating = repeating;
+ mHandler = handler;
+ mRequestList = new ArrayList<CaptureRequest>(requestList);
+ mCallback = callback;
+ mSessionId = sessionId;
+
+ // Check whether this callback holder is for batched outputs.
+ // The logic here should match createHighSpeedRequestList.
+ boolean hasBatchedOutputs = true;
+ for (int i = 0; i < requestList.size(); i++) {
+ CaptureRequest request = requestList.get(i);
+ if (!request.isPartOfCRequestList()) {
+ hasBatchedOutputs = false;
+ break;
+ }
+ if (i == 0) {
+ Collection<Surface> targets = request.getTargets();
+ if (targets.size() != 2) {
+ hasBatchedOutputs = false;
+ break;
+ }
+ }
+ }
+ mHasBatchedOutputs = hasBatchedOutputs;
+ }
+
+ public boolean isRepeating() {
+ return mRepeating;
+ }
+
+ public CaptureCallback getCallback() {
+ return mCallback;
+ }
+
+ public CaptureRequest getRequest(int subsequenceId) {
+ if (subsequenceId >= mRequestList.size()) {
+ throw new IllegalArgumentException(
+ String.format(
+ "Requested subsequenceId %d is larger than request list size %d.",
+ subsequenceId, mRequestList.size()));
+ } else {
+ if (subsequenceId < 0) {
+ throw new IllegalArgumentException(String.format(
+ "Requested subsequenceId %d is negative", subsequenceId));
+ } else {
+ return mRequestList.get(subsequenceId);
+ }
+ }
+ }
+
+ public CaptureRequest getRequest() {
+ return getRequest(0);
+ }
+
+ public Handler getHandler() {
+ return mHandler;
+ }
+
+ public int getSessionId() {
+ return mSessionId;
+ }
+
+ public int getRequestCount() {
+ return mRequestList.size();
+ }
+
+ public boolean hasBatchedOutputs() {
+ return mHasBatchedOutputs;
+ }
+ }
+
+ /**
+ * This class holds a capture ID and its expected last regular frame number and last reprocess
+ * frame number.
+ */
+ static class RequestLastFrameNumbersHolder {
+ // request ID
+ private final int mRequestId;
+ // The last regular frame number for this request ID. It's
+ // CaptureCallback.NO_FRAMES_CAPTURED if the request ID has no regular request.
+ private final long mLastRegularFrameNumber;
+ // The last reprocess frame number for this request ID. It's
+ // CaptureCallback.NO_FRAMES_CAPTURED if the request ID has no reprocess request.
+ private final long mLastReprocessFrameNumber;
+
+ /**
+ * Create a request-last-frame-numbers holder with a list of requests, request ID, and
+ * the last frame number returned by camera service.
+ */
+ public RequestLastFrameNumbersHolder(List<CaptureRequest> requestList, SubmitInfo requestInfo) {
+ long lastRegularFrameNumber = CaptureCallback.NO_FRAMES_CAPTURED;
+ long lastReprocessFrameNumber = CaptureCallback.NO_FRAMES_CAPTURED;
+ long frameNumber = requestInfo.getLastFrameNumber();
+
+ if (requestInfo.getLastFrameNumber() < requestList.size() - 1) {
+ throw new IllegalArgumentException(
+ "lastFrameNumber: " + requestInfo.getLastFrameNumber() +
+ " should be at least " + (requestList.size() - 1) + " for the number of " +
+ " requests in the list: " + requestList.size());
+ }
+
+ // find the last regular frame number and the last reprocess frame number
+ for (int i = requestList.size() - 1; i >= 0; i--) {
+ CaptureRequest request = requestList.get(i);
+ if (request.isReprocess() && lastReprocessFrameNumber ==
+ CaptureCallback.NO_FRAMES_CAPTURED) {
+ lastReprocessFrameNumber = frameNumber;
+ } else if (!request.isReprocess() && lastRegularFrameNumber ==
+ CaptureCallback.NO_FRAMES_CAPTURED) {
+ lastRegularFrameNumber = frameNumber;
+ }
+
+ if (lastReprocessFrameNumber != CaptureCallback.NO_FRAMES_CAPTURED &&
+ lastRegularFrameNumber != CaptureCallback.NO_FRAMES_CAPTURED) {
+ break;
+ }
+
+ frameNumber--;
+ }
+
+ mLastRegularFrameNumber = lastRegularFrameNumber;
+ mLastReprocessFrameNumber = lastReprocessFrameNumber;
+ mRequestId = requestInfo.getRequestId();
+ }
+
+ /**
+ * Create a request-last-frame-numbers holder with a request ID and last regular frame
+ * number.
+ */
+ public RequestLastFrameNumbersHolder(int requestId, long lastRegularFrameNumber) {
+ mLastRegularFrameNumber = lastRegularFrameNumber;
+ mLastReprocessFrameNumber = CaptureCallback.NO_FRAMES_CAPTURED;
+ mRequestId = requestId;
+ }
+
+ /**
+ * Return the last regular frame number. Return CaptureCallback.NO_FRAMES_CAPTURED if
+ * it contains no regular request.
+ */
+ public long getLastRegularFrameNumber() {
+ return mLastRegularFrameNumber;
+ }
+
+ /**
+ * Return the last reprocess frame number. Return CaptureCallback.NO_FRAMES_CAPTURED if
+ * it contains no reprocess request.
+ */
+ public long getLastReprocessFrameNumber() {
+ return mLastReprocessFrameNumber;
+ }
+
+ /**
+ * Return the last frame number overall.
+ */
+ public long getLastFrameNumber() {
+ return Math.max(mLastRegularFrameNumber, mLastReprocessFrameNumber);
+ }
+
+ /**
+ * Return the request ID.
+ */
+ public int getRequestId() {
+ return mRequestId;
+ }
+ }
+
+ /**
+ * This class tracks the last frame number for submitted requests.
+ */
+ public class FrameNumberTracker {
+
+ private long mCompletedFrameNumber = CaptureCallback.NO_FRAMES_CAPTURED;
+ private long mCompletedReprocessFrameNumber = CaptureCallback.NO_FRAMES_CAPTURED;
+ /** the skipped frame numbers that belong to regular results */
+ private final LinkedList<Long> mSkippedRegularFrameNumbers = new LinkedList<Long>();
+ /** the skipped frame numbers that belong to reprocess results */
+ private final LinkedList<Long> mSkippedReprocessFrameNumbers = new LinkedList<Long>();
+ /** frame number -> is reprocess */
+ private final TreeMap<Long, Boolean> mFutureErrorMap = new TreeMap<Long, Boolean>();
+ /** Map frame numbers to list of partial results */
+ private final HashMap<Long, List<CaptureResult>> mPartialResults = new HashMap<>();
+
+ private void update() {
+ Iterator iter = mFutureErrorMap.entrySet().iterator();
+ while (iter.hasNext()) {
+ TreeMap.Entry pair = (TreeMap.Entry)iter.next();
+ Long errorFrameNumber = (Long)pair.getKey();
+ Boolean reprocess = (Boolean)pair.getValue();
+ Boolean removeError = true;
+ if (reprocess) {
+ if (errorFrameNumber == mCompletedReprocessFrameNumber + 1) {
+ mCompletedReprocessFrameNumber = errorFrameNumber;
+ } else if (mSkippedReprocessFrameNumbers.isEmpty() != true &&
+ errorFrameNumber == mSkippedReprocessFrameNumbers.element()) {
+ mCompletedReprocessFrameNumber = errorFrameNumber;
+ mSkippedReprocessFrameNumbers.remove();
+ } else {
+ removeError = false;
+ }
+ } else {
+ if (errorFrameNumber == mCompletedFrameNumber + 1) {
+ mCompletedFrameNumber = errorFrameNumber;
+ } else if (mSkippedRegularFrameNumbers.isEmpty() != true &&
+ errorFrameNumber == mSkippedRegularFrameNumbers.element()) {
+ mCompletedFrameNumber = errorFrameNumber;
+ mSkippedRegularFrameNumbers.remove();
+ } else {
+ removeError = false;
+ }
+ }
+ if (removeError) {
+ iter.remove();
+ }
+ }
+ }
+
+ /**
+ * This function is called every time when a result or an error is received.
+ * @param frameNumber the frame number corresponding to the result or error
+ * @param isError true if it is an error, false if it is not an error
+ * @param isReprocess true if it is a reprocess result, false if it is a regular result.
+ */
+ public void updateTracker(long frameNumber, boolean isError, boolean isReprocess) {
+ if (isError) {
+ mFutureErrorMap.put(frameNumber, isReprocess);
+ } else {
+ try {
+ if (isReprocess) {
+ updateCompletedReprocessFrameNumber(frameNumber);
+ } else {
+ updateCompletedFrameNumber(frameNumber);
+ }
+ } catch (IllegalArgumentException e) {
+ Log.e(TAG, e.getMessage());
+ }
+ }
+ update();
+ }
+
+ /**
+ * This function is called every time a result has been completed.
+ *
+ * <p>It keeps a track of all the partial results already created for a particular
+ * frame number.</p>
+ *
+ * @param frameNumber the frame number corresponding to the result
+ * @param result the total or partial result
+ * @param partial {@true} if the result is partial, {@code false} if total
+ * @param isReprocess true if it is a reprocess result, false if it is a regular result.
+ */
+ public void updateTracker(long frameNumber, CaptureResult result, boolean partial,
+ boolean isReprocess) {
+ if (!partial) {
+ // Update the total result's frame status as being successful
+ updateTracker(frameNumber, /*isError*/false, isReprocess);
+ // Don't keep a list of total results, we don't need to track them
+ return;
+ }
+
+ if (result == null) {
+ // Do not record blank results; this also means there will be no total result
+ // so it doesn't matter that the partials were not recorded
+ return;
+ }
+
+ // Partial results must be aggregated in-order for that frame number
+ List<CaptureResult> partials = mPartialResults.get(frameNumber);
+ if (partials == null) {
+ partials = new ArrayList<>();
+ mPartialResults.put(frameNumber, partials);
+ }
+
+ partials.add(result);
+ }
+
+ /**
+ * Attempt to pop off all of the partial results seen so far for the {@code frameNumber}.
+ *
+ * <p>Once popped-off, the partial results are forgotten (unless {@code updateTracker}
+ * is called again with new partials for that frame number).</p>
+ *
+ * @param frameNumber the frame number corresponding to the result
+ * @return a list of partial results for that frame with at least 1 element,
+ * or {@code null} if there were no partials recorded for that frame
+ */
+ public List<CaptureResult> popPartialResults(long frameNumber) {
+ return mPartialResults.remove(frameNumber);
+ }
+
+ public long getCompletedFrameNumber() {
+ return mCompletedFrameNumber;
+ }
+
+ public long getCompletedReprocessFrameNumber() {
+ return mCompletedReprocessFrameNumber;
+ }
+
+ /**
+ * Update the completed frame number for regular results.
+ *
+ * It validates that all previous frames have arrived except for reprocess frames.
+ *
+ * If there is a gap since previous regular frame number, assume the frames in the gap are
+ * reprocess frames and store them in the skipped reprocess frame number queue to check
+ * against when reprocess frames arrive.
+ */
+ private void updateCompletedFrameNumber(long frameNumber) throws IllegalArgumentException {
+ if (frameNumber <= mCompletedFrameNumber) {
+ throw new IllegalArgumentException("frame number " + frameNumber + " is a repeat");
+ } else if (frameNumber <= mCompletedReprocessFrameNumber) {
+ // if frame number is smaller than completed reprocess frame number,
+ // it must be the head of mSkippedRegularFrameNumbers
+ if (mSkippedRegularFrameNumbers.isEmpty() == true ||
+ frameNumber < mSkippedRegularFrameNumbers.element()) {
+ throw new IllegalArgumentException("frame number " + frameNumber +
+ " is a repeat");
+ } else if (frameNumber > mSkippedRegularFrameNumbers.element()) {
+ throw new IllegalArgumentException("frame number " + frameNumber +
+ " comes out of order. Expecting " +
+ mSkippedRegularFrameNumbers.element());
+ }
+ // frame number matches the head of the skipped frame number queue.
+ mSkippedRegularFrameNumbers.remove();
+ } else {
+ // there is a gap of unseen frame numbers which should belong to reprocess result
+ // put all the skipped frame numbers in the queue
+ for (long i = Math.max(mCompletedFrameNumber, mCompletedReprocessFrameNumber) + 1;
+ i < frameNumber; i++) {
+ mSkippedReprocessFrameNumbers.add(i);
+ }
+ }
+
+ mCompletedFrameNumber = frameNumber;
+ }
+
+ /**
+ * Update the completed frame number for reprocess results.
+ *
+ * It validates that all previous frames have arrived except for regular frames.
+ *
+ * If there is a gap since previous reprocess frame number, assume the frames in the gap are
+ * regular frames and store them in the skipped regular frame number queue to check
+ * against when regular frames arrive.
+ */
+ private void updateCompletedReprocessFrameNumber(long frameNumber)
+ throws IllegalArgumentException {
+ if (frameNumber < mCompletedReprocessFrameNumber) {
+ throw new IllegalArgumentException("frame number " + frameNumber + " is a repeat");
+ } else if (frameNumber < mCompletedFrameNumber) {
+ // if reprocess frame number is smaller than completed regular frame number,
+ // it must be the head of the skipped reprocess frame number queue.
+ if (mSkippedReprocessFrameNumbers.isEmpty() == true ||
+ frameNumber < mSkippedReprocessFrameNumbers.element()) {
+ throw new IllegalArgumentException("frame number " + frameNumber +
+ " is a repeat");
+ } else if (frameNumber > mSkippedReprocessFrameNumbers.element()) {
+ throw new IllegalArgumentException("frame number " + frameNumber +
+ " comes out of order. Expecting " +
+ mSkippedReprocessFrameNumbers.element());
+ }
+ // frame number matches the head of the skipped frame number queue.
+ mSkippedReprocessFrameNumbers.remove();
+ } else {
+ // put all the skipped frame numbers in the queue
+ for (long i = Math.max(mCompletedFrameNumber, mCompletedReprocessFrameNumber) + 1;
+ i < frameNumber; i++) {
+ mSkippedRegularFrameNumbers.add(i);
+ }
+ }
+ mCompletedReprocessFrameNumber = frameNumber;
+ }
+ }
+
+ private void checkAndFireSequenceComplete() {
+ long completedFrameNumber = mFrameNumberTracker.getCompletedFrameNumber();
+ long completedReprocessFrameNumber = mFrameNumberTracker.getCompletedReprocessFrameNumber();
+ boolean isReprocess = false;
+ Iterator<RequestLastFrameNumbersHolder> iter = mRequestLastFrameNumbersList.iterator();
+ while (iter.hasNext()) {
+ final RequestLastFrameNumbersHolder requestLastFrameNumbers = iter.next();
+ boolean sequenceCompleted = false;
+ final int requestId = requestLastFrameNumbers.getRequestId();
+ final CaptureCallbackHolder holder;
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) {
+ Log.w(TAG, "Camera closed while checking sequences");
+ return;
+ }
+
+ int index = mCaptureCallbackMap.indexOfKey(requestId);
+ holder = (index >= 0) ?
+ mCaptureCallbackMap.valueAt(index) : null;
+ if (holder != null) {
+ long lastRegularFrameNumber =
+ requestLastFrameNumbers.getLastRegularFrameNumber();
+ long lastReprocessFrameNumber =
+ requestLastFrameNumbers.getLastReprocessFrameNumber();
+
+ // check if it's okay to remove request from mCaptureCallbackMap
+ if (lastRegularFrameNumber <= completedFrameNumber &&
+ lastReprocessFrameNumber <= completedReprocessFrameNumber) {
+ sequenceCompleted = true;
+ mCaptureCallbackMap.removeAt(index);
+ if (DEBUG) {
+ Log.v(TAG, String.format(
+ "Remove holder for requestId %d, because lastRegularFrame %d " +
+ "is <= %d and lastReprocessFrame %d is <= %d", requestId,
+ lastRegularFrameNumber, completedFrameNumber,
+ lastReprocessFrameNumber, completedReprocessFrameNumber));
+ }
+ }
+ }
+ }
+
+ // If no callback is registered for this requestId or sequence completed, remove it
+ // from the frame number->request pair because it's not needed anymore.
+ if (holder == null || sequenceCompleted) {
+ iter.remove();
+ }
+
+ // Call onCaptureSequenceCompleted
+ if (sequenceCompleted) {
+ Runnable resultDispatch = new Runnable() {
+ @Override
+ public void run() {
+ if (!CameraDeviceImpl.this.isClosed()){
+ if (DEBUG) {
+ Log.d(TAG, String.format(
+ "fire sequence complete for request %d",
+ requestId));
+ }
+
+ holder.getCallback().onCaptureSequenceCompleted(
+ CameraDeviceImpl.this,
+ requestId,
+ requestLastFrameNumbers.getLastFrameNumber());
+ }
+ }
+ };
+ holder.getHandler().post(resultDispatch);
+ }
+ }
+ }
+
+ public class CameraDeviceCallbacks extends ICameraDeviceCallbacks.Stub {
+
+ @Override
+ public IBinder asBinder() {
+ return this;
+ }
+
+ @Override
+ public void onDeviceError(final int errorCode, CaptureResultExtras resultExtras) {
+ if (DEBUG) {
+ Log.d(TAG, String.format(
+ "Device error received, code %d, frame number %d, request ID %d, subseq ID %d",
+ errorCode, resultExtras.getFrameNumber(), resultExtras.getRequestId(),
+ resultExtras.getSubsequenceId()));
+ }
+
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) {
+ return; // Camera already closed
+ }
+
+ switch (errorCode) {
+ case ERROR_CAMERA_DISCONNECTED:
+ CameraDeviceImpl.this.mDeviceHandler.post(mCallOnDisconnected);
+ break;
+ default:
+ Log.e(TAG, "Unknown error from camera device: " + errorCode);
+ // no break
+ case ERROR_CAMERA_DEVICE:
+ case ERROR_CAMERA_SERVICE:
+ mInError = true;
+ final int publicErrorCode = (errorCode == ERROR_CAMERA_DEVICE) ?
+ StateCallback.ERROR_CAMERA_DEVICE :
+ StateCallback.ERROR_CAMERA_SERVICE;
+ Runnable r = new Runnable() {
+ @Override
+ public void run() {
+ if (!CameraDeviceImpl.this.isClosed()) {
+ mDeviceCallback.onError(CameraDeviceImpl.this, publicErrorCode);
+ }
+ }
+ };
+ CameraDeviceImpl.this.mDeviceHandler.post(r);
+ break;
+ case ERROR_CAMERA_REQUEST:
+ case ERROR_CAMERA_RESULT:
+ case ERROR_CAMERA_BUFFER:
+ onCaptureErrorLocked(errorCode, resultExtras);
+ break;
+ }
+ }
+ }
+
+ @Override
+ public void onRepeatingRequestError(long lastFrameNumber, int repeatingRequestId) {
+ if (DEBUG) {
+ Log.d(TAG, "Repeating request error received. Last frame number is " +
+ lastFrameNumber);
+ }
+
+ synchronized(mInterfaceLock) {
+ // Camera is already closed or no repeating request is present.
+ if (mRemoteDevice == null || mRepeatingRequestId == REQUEST_ID_NONE) {
+ return; // Camera already closed
+ }
+
+ checkEarlyTriggerSequenceComplete(mRepeatingRequestId, lastFrameNumber);
+ // Check if there is already a new repeating request
+ if (mRepeatingRequestId == repeatingRequestId) {
+ mRepeatingRequestId = REQUEST_ID_NONE;
+ }
+ }
+ }
+
+ @Override
+ public void onDeviceIdle() {
+ if (DEBUG) {
+ Log.d(TAG, "Camera now idle");
+ }
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ if (!CameraDeviceImpl.this.mIdle) {
+ CameraDeviceImpl.this.mDeviceHandler.post(mCallOnIdle);
+ }
+ CameraDeviceImpl.this.mIdle = true;
+ }
+ }
+
+ @Override
+ public void onCaptureStarted(final CaptureResultExtras resultExtras, final long timestamp) {
+ int requestId = resultExtras.getRequestId();
+ final long frameNumber = resultExtras.getFrameNumber();
+
+ if (DEBUG) {
+ Log.d(TAG, "Capture started for id " + requestId + " frame number " + frameNumber);
+ }
+ final CaptureCallbackHolder holder;
+
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ // Get the callback for this frame ID, if there is one
+ holder = CameraDeviceImpl.this.mCaptureCallbackMap.get(requestId);
+
+ if (holder == null) {
+ return;
+ }
+
+ if (isClosed()) return;
+
+ // Dispatch capture start notice
+ holder.getHandler().post(
+ new Runnable() {
+ @Override
+ public void run() {
+ if (!CameraDeviceImpl.this.isClosed()) {
+ final int subsequenceId = resultExtras.getSubsequenceId();
+ final CaptureRequest request = holder.getRequest(subsequenceId);
+
+ if (holder.hasBatchedOutputs()) {
+ // Send derived onCaptureStarted for requests within the batch
+ final Range<Integer> fpsRange =
+ request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
+ for (int i = 0; i < holder.getRequestCount(); i++) {
+ holder.getCallback().onCaptureStarted(
+ CameraDeviceImpl.this,
+ holder.getRequest(i),
+ timestamp - (subsequenceId - i) *
+ NANO_PER_SECOND/fpsRange.getUpper(),
+ frameNumber - (subsequenceId - i));
+ }
+ } else {
+ holder.getCallback().onCaptureStarted(
+ CameraDeviceImpl.this,
+ holder.getRequest(resultExtras.getSubsequenceId()),
+ timestamp, frameNumber);
+ }
+ }
+ }
+ });
+
+ }
+ }
+
+ @Override
+ public void onResultReceived(CameraMetadataNative result,
+ CaptureResultExtras resultExtras) throws RemoteException {
+
+ int requestId = resultExtras.getRequestId();
+ long frameNumber = resultExtras.getFrameNumber();
+
+ if (DEBUG) {
+ Log.v(TAG, "Received result frame " + frameNumber + " for id "
+ + requestId);
+ }
+
+ synchronized(mInterfaceLock) {
+ if (mRemoteDevice == null) return; // Camera already closed
+
+ // TODO: Handle CameraCharacteristics access from CaptureResult correctly.
+ result.set(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE,
+ getCharacteristics().get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE));
+
+ final CaptureCallbackHolder holder =
+ CameraDeviceImpl.this.mCaptureCallbackMap.get(requestId);
+ final CaptureRequest request = holder.getRequest(resultExtras.getSubsequenceId());
+
+ boolean isPartialResult =
+ (resultExtras.getPartialResultCount() < mTotalPartialCount);
+ boolean isReprocess = request.isReprocess();
+
+ // Check if we have a callback for this
+ if (holder == null) {
+ if (DEBUG) {
+ Log.d(TAG,
+ "holder is null, early return at frame "
+ + frameNumber);
+ }
+
+ mFrameNumberTracker.updateTracker(frameNumber, /*result*/null, isPartialResult,
+ isReprocess);
+
+ return;
+ }
+
+ if (isClosed()) {
+ if (DEBUG) {
+ Log.d(TAG,
+ "camera is closed, early return at frame "
+ + frameNumber);
+ }
+
+ mFrameNumberTracker.updateTracker(frameNumber, /*result*/null, isPartialResult,
+ isReprocess);
+ return;
+ }
+
+
+ Runnable resultDispatch = null;
+
+ CaptureResult finalResult;
+ // Make a copy of the native metadata before it gets moved to a CaptureResult
+ // object.
+ final CameraMetadataNative resultCopy;
+ if (holder.hasBatchedOutputs()) {
+ resultCopy = new CameraMetadataNative(result);
+ } else {
+ resultCopy = null;
+ }
+
+ // Either send a partial result or the final capture completed result
+ if (isPartialResult) {
+ final CaptureResult resultAsCapture =
+ new CaptureResult(result, request, resultExtras);
+ // Partial result
+ resultDispatch = new Runnable() {
+ @Override
+ public void run() {
+ if (!CameraDeviceImpl.this.isClosed()) {
+ if (holder.hasBatchedOutputs()) {
+ // Send derived onCaptureProgressed for requests within
+ // the batch.
+ for (int i = 0; i < holder.getRequestCount(); i++) {
+ CameraMetadataNative resultLocal =
+ new CameraMetadataNative(resultCopy);
+ CaptureResult resultInBatch = new CaptureResult(
+ resultLocal, holder.getRequest(i), resultExtras);
+
+ holder.getCallback().onCaptureProgressed(
+ CameraDeviceImpl.this,
+ holder.getRequest(i),
+ resultInBatch);
+ }
+ } else {
+ holder.getCallback().onCaptureProgressed(
+ CameraDeviceImpl.this,
+ request,
+ resultAsCapture);
+ }
+ }
+ }
+ };
+ finalResult = resultAsCapture;
+ } else {
+ List<CaptureResult> partialResults =
+ mFrameNumberTracker.popPartialResults(frameNumber);
+
+ final long sensorTimestamp =
+ result.get(CaptureResult.SENSOR_TIMESTAMP);
+ final Range<Integer> fpsRange =
+ request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
+ final int subsequenceId = resultExtras.getSubsequenceId();
+ final TotalCaptureResult resultAsCapture = new TotalCaptureResult(result,
+ request, resultExtras, partialResults, holder.getSessionId());
+ // Final capture result
+ resultDispatch = new Runnable() {
+ @Override
+ public void run() {
+ if (!CameraDeviceImpl.this.isClosed()){
+ if (holder.hasBatchedOutputs()) {
+ // Send derived onCaptureCompleted for requests within
+ // the batch.
+ for (int i = 0; i < holder.getRequestCount(); i++) {
+ resultCopy.set(CaptureResult.SENSOR_TIMESTAMP,
+ sensorTimestamp - (subsequenceId - i) *
+ NANO_PER_SECOND/fpsRange.getUpper());
+ CameraMetadataNative resultLocal =
+ new CameraMetadataNative(resultCopy);
+ TotalCaptureResult resultInBatch = new TotalCaptureResult(
+ resultLocal, holder.getRequest(i), resultExtras,
+ partialResults, holder.getSessionId());
+
+ holder.getCallback().onCaptureCompleted(
+ CameraDeviceImpl.this,
+ holder.getRequest(i),
+ resultInBatch);
+ }
+ } else {
+ holder.getCallback().onCaptureCompleted(
+ CameraDeviceImpl.this,
+ request,
+ resultAsCapture);
+ }
+ }
+ }
+ };
+ finalResult = resultAsCapture;
+ }
+
+ holder.getHandler().post(resultDispatch);
+
+ // Collect the partials for a total result; or mark the frame as totally completed
+ mFrameNumberTracker.updateTracker(frameNumber, finalResult, isPartialResult,
+ isReprocess);
+
+ // Fire onCaptureSequenceCompleted
+ if (!isPartialResult) {
+ checkAndFireSequenceComplete();
+ }
+ }
+ }
+
+ @Override
+ public void onPrepared(int streamId) {
+ final OutputConfiguration output;
+ final StateCallbackKK sessionCallback;
+
+ if (DEBUG) {
+ Log.v(TAG, "Stream " + streamId + " is prepared");
+ }
+
+ synchronized(mInterfaceLock) {
+ output = mConfiguredOutputs.get(streamId);
+ sessionCallback = mSessionStateCallback;
+ }
+
+ if (sessionCallback == null) return;
+
+ if (output == null) {
+ Log.w(TAG, "onPrepared invoked for unknown output Surface");
+ return;
+ }
+ final List<Surface> surfaces = output.getSurfaces();
+ for (Surface surface : surfaces) {
+ sessionCallback.onSurfacePrepared(surface);
+ }
+ }
+
+ @Override
+ public void onRequestQueueEmpty() {
+ final StateCallbackKK sessionCallback;
+
+ if (DEBUG) {
+ Log.v(TAG, "Request queue becomes empty");
+ }
+
+ synchronized(mInterfaceLock) {
+ sessionCallback = mSessionStateCallback;
+ }
+
+ if (sessionCallback == null) return;
+
+ sessionCallback.onRequestQueueEmpty();
+ }
+
+ /**
+ * Called by onDeviceError for handling single-capture failures.
+ */
+ private void onCaptureErrorLocked(int errorCode, CaptureResultExtras resultExtras) {
+
+ final int requestId = resultExtras.getRequestId();
+ final int subsequenceId = resultExtras.getSubsequenceId();
+ final long frameNumber = resultExtras.getFrameNumber();
+ final CaptureCallbackHolder holder =
+ CameraDeviceImpl.this.mCaptureCallbackMap.get(requestId);
+
+ final CaptureRequest request = holder.getRequest(subsequenceId);
+
+ Runnable failureDispatch = null;
+ if (errorCode == ERROR_CAMERA_BUFFER) {
+ // Because 1 stream id could map to multiple surfaces, we need to specify both
+ // streamId and surfaceId.
+ List<Surface> surfaces =
+ mConfiguredOutputs.get(resultExtras.getErrorStreamId()).getSurfaces();
+ for (Surface surface : surfaces) {
+ if (!request.containsTarget(surface)) {
+ continue;
+ }
+ if (DEBUG) {
+ Log.v(TAG, String.format("Lost output buffer reported for frame %d, target %s",
+ frameNumber, surface));
+ }
+ failureDispatch = new Runnable() {
+ @Override
+ public void run() {
+ if (!CameraDeviceImpl.this.isClosed()){
+ holder.getCallback().onCaptureBufferLost(
+ CameraDeviceImpl.this,
+ request,
+ surface,
+ frameNumber);
+ }
+ }
+ };
+ // Dispatch the failure callback
+ holder.getHandler().post(failureDispatch);
+ }
+ } else {
+ boolean mayHaveBuffers = (errorCode == ERROR_CAMERA_RESULT);
+
+ // This is only approximate - exact handling needs the camera service and HAL to
+ // disambiguate between request failures to due abort and due to real errors. For
+ // now, assume that if the session believes we're mid-abort, then the error is due
+ // to abort.
+ int reason = (mCurrentSession != null && mCurrentSession.isAborting()) ?
+ CaptureFailure.REASON_FLUSHED :
+ CaptureFailure.REASON_ERROR;
+
+ final CaptureFailure failure = new CaptureFailure(
+ request,
+ reason,
+ /*dropped*/ mayHaveBuffers,
+ requestId,
+ frameNumber);
+
+ failureDispatch = new Runnable() {
+ @Override
+ public void run() {
+ if (!CameraDeviceImpl.this.isClosed()){
+ holder.getCallback().onCaptureFailed(
+ CameraDeviceImpl.this,
+ request,
+ failure);
+ }
+ }
+ };
+
+ // Fire onCaptureSequenceCompleted if appropriate
+ if (DEBUG) {
+ Log.v(TAG, String.format("got error frame %d", frameNumber));
+ }
+ mFrameNumberTracker.updateTracker(frameNumber, /*error*/true, request.isReprocess());
+ checkAndFireSequenceComplete();
+
+ // Dispatch the failure callback
+ holder.getHandler().post(failureDispatch);
+ }
+
+ }
+
+ } // public class CameraDeviceCallbacks
+
+ /**
+ * Default handler management.
+ *
+ * <p>
+ * If handler is null, get the current thread's
+ * Looper to create a Handler with. If no looper exists, throw {@code IllegalArgumentException}.
+ * </p>
+ */
+ static Handler checkHandler(Handler handler) {
+ if (handler == null) {
+ Looper looper = Looper.myLooper();
+ if (looper == null) {
+ throw new IllegalArgumentException(
+ "No handler given, and current thread has no looper!");
+ }
+ handler = new Handler(looper);
+ }
+ return handler;
+ }
+
+ /**
+ * Default handler management, conditional on there being a callback.
+ *
+ * <p>If the callback isn't null, check the handler, otherwise pass it through.</p>
+ */
+ static <T> Handler checkHandler(Handler handler, T callback) {
+ if (callback != null) {
+ return checkHandler(handler);
+ }
+ return handler;
+ }
+
+ private void checkIfCameraClosedOrInError() throws CameraAccessException {
+ if (mRemoteDevice == null) {
+ throw new IllegalStateException("CameraDevice was already closed");
+ }
+ if (mInError) {
+ throw new CameraAccessException(CameraAccessException.CAMERA_ERROR,
+ "The camera device has encountered a serious error");
+ }
+ }
+
+ /** Whether the camera device has started to close (may not yet have finished) */
+ private boolean isClosed() {
+ return mClosing.get();
+ }
+
+ private CameraCharacteristics getCharacteristics() {
+ return mCharacteristics;
+ }
+
+ /**
+ * Listener for binder death.
+ *
+ * <p> Handle binder death for ICameraDeviceUser. Trigger onError.</p>
+ */
+ @Override
+ public void binderDied() {
+ Log.w(TAG, "CameraDevice " + mCameraId + " died unexpectedly");
+
+ if (mRemoteDevice == null) {
+ return; // Camera already closed
+ }
+
+ mInError = true;
+ Runnable r = new Runnable() {
+ @Override
+ public void run() {
+ if (!isClosed()) {
+ mDeviceCallback.onError(CameraDeviceImpl.this,
+ StateCallback.ERROR_CAMERA_SERVICE);
+ }
+ }
+ };
+ CameraDeviceImpl.this.mDeviceHandler.post(r);
+ }
+}
diff --git a/android/hardware/camera2/impl/CameraMetadataNative.java b/android/hardware/camera2/impl/CameraMetadataNative.java
new file mode 100644
index 00000000..ebe2fa17
--- /dev/null
+++ b/android/hardware/camera2/impl/CameraMetadataNative.java
@@ -0,0 +1,1338 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+import android.graphics.ImageFormat;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.marshal.MarshalRegistry;
+import android.hardware.camera2.marshal.impl.MarshalQueryableArray;
+import android.hardware.camera2.marshal.impl.MarshalQueryableBoolean;
+import android.hardware.camera2.marshal.impl.MarshalQueryableBlackLevelPattern;
+import android.hardware.camera2.marshal.impl.MarshalQueryableColorSpaceTransform;
+import android.hardware.camera2.marshal.impl.MarshalQueryableEnum;
+import android.hardware.camera2.marshal.impl.MarshalQueryableHighSpeedVideoConfiguration;
+import android.hardware.camera2.marshal.impl.MarshalQueryableMeteringRectangle;
+import android.hardware.camera2.marshal.impl.MarshalQueryableNativeByteToInteger;
+import android.hardware.camera2.marshal.impl.MarshalQueryablePair;
+import android.hardware.camera2.marshal.impl.MarshalQueryableParcelable;
+import android.hardware.camera2.marshal.impl.MarshalQueryablePrimitive;
+import android.hardware.camera2.marshal.impl.MarshalQueryableRange;
+import android.hardware.camera2.marshal.impl.MarshalQueryableRect;
+import android.hardware.camera2.marshal.impl.MarshalQueryableReprocessFormatsMap;
+import android.hardware.camera2.marshal.impl.MarshalQueryableRggbChannelVector;
+import android.hardware.camera2.marshal.impl.MarshalQueryableSize;
+import android.hardware.camera2.marshal.impl.MarshalQueryableSizeF;
+import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfiguration;
+import android.hardware.camera2.marshal.impl.MarshalQueryableStreamConfigurationDuration;
+import android.hardware.camera2.marshal.impl.MarshalQueryableString;
+import android.hardware.camera2.params.Face;
+import android.hardware.camera2.params.HighSpeedVideoConfiguration;
+import android.hardware.camera2.params.LensShadingMap;
+import android.hardware.camera2.params.ReprocessFormatsMap;
+import android.hardware.camera2.params.StreamConfiguration;
+import android.hardware.camera2.params.StreamConfigurationDuration;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.params.TonemapCurve;
+import android.hardware.camera2.utils.TypeReference;
+import android.location.Location;
+import android.location.LocationManager;
+import android.os.Parcelable;
+import android.os.Parcel;
+import android.os.ServiceSpecificException;
+import android.util.Log;
+import android.util.Size;
+
+import com.android.internal.util.Preconditions;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.util.ArrayList;
+import java.util.HashMap;
+
+/**
+ * Implementation of camera metadata marshal/unmarshal across Binder to
+ * the camera service
+ */
+public class CameraMetadataNative implements Parcelable {
+
+ public static class Key<T> {
+ private boolean mHasTag;
+ private int mTag;
+ private long mVendorId = Long.MAX_VALUE;
+ private final Class<T> mType;
+ private final TypeReference<T> mTypeReference;
+ private final String mName;
+ private final int mHash;
+
+ /**
+ * @hide
+ */
+ public Key(String name, Class<T> type, long vendorId) {
+ if (name == null) {
+ throw new NullPointerException("Key needs a valid name");
+ } else if (type == null) {
+ throw new NullPointerException("Type needs to be non-null");
+ }
+ mName = name;
+ mType = type;
+ mVendorId = vendorId;
+ mTypeReference = TypeReference.createSpecializedTypeReference(type);
+ mHash = mName.hashCode() ^ mTypeReference.hashCode();
+ }
+
+ /**
+ * Visible for testing only.
+ *
+ * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key
+ * for application code or vendor-extended keys.</p>
+ */
+ public Key(String name, Class<T> type) {
+ if (name == null) {
+ throw new NullPointerException("Key needs a valid name");
+ } else if (type == null) {
+ throw new NullPointerException("Type needs to be non-null");
+ }
+ mName = name;
+ mType = type;
+ mTypeReference = TypeReference.createSpecializedTypeReference(type);
+ mHash = mName.hashCode() ^ mTypeReference.hashCode();
+ }
+
+ /**
+ * Visible for testing only.
+ *
+ * <p>Use the CameraCharacteristics.Key, CaptureResult.Key, or CaptureRequest.Key
+ * for application code or vendor-extended keys.</p>
+ */
+ @SuppressWarnings("unchecked")
+ public Key(String name, TypeReference<T> typeReference) {
+ if (name == null) {
+ throw new NullPointerException("Key needs a valid name");
+ } else if (typeReference == null) {
+ throw new NullPointerException("TypeReference needs to be non-null");
+ }
+ mName = name;
+ mType = (Class<T>)typeReference.getRawType();
+ mTypeReference = typeReference;
+ mHash = mName.hashCode() ^ mTypeReference.hashCode();
+ }
+
+ /**
+ * Return a camelCase, period separated name formatted like:
+ * {@code "root.section[.subsections].name"}.
+ *
+ * <p>Built-in keys exposed by the Android SDK are always prefixed with {@code "android."};
+ * keys that are device/platform-specific are prefixed with {@code "com."}.</p>
+ *
+ * <p>For example, {@code CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP} would
+ * have a name of {@code "android.scaler.streamConfigurationMap"}; whereas a device
+ * specific key might look like {@code "com.google.nexus.data.private"}.</p>
+ *
+ * @return String representation of the key name
+ */
+ public final String getName() {
+ return mName;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public final int hashCode() {
+ return mHash;
+ }
+
+ /**
+ * Compare this key against other native keys, request keys, result keys, and
+ * characteristics keys.
+ *
+ * <p>Two keys are considered equal if their name and type reference are equal.</p>
+ *
+ * <p>Note that the equality against non-native keys is one-way. A native key may be equal
+ * to a result key; but that same result key will not be equal to a native key.</p>
+ */
+ @SuppressWarnings("rawtypes")
+ @Override
+ public final boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+
+ if (o == null || this.hashCode() != o.hashCode()) {
+ return false;
+ }
+
+ Key<?> lhs;
+
+ if (o instanceof CaptureResult.Key) {
+ lhs = ((CaptureResult.Key)o).getNativeKey();
+ } else if (o instanceof CaptureRequest.Key) {
+ lhs = ((CaptureRequest.Key)o).getNativeKey();
+ } else if (o instanceof CameraCharacteristics.Key) {
+ lhs = ((CameraCharacteristics.Key)o).getNativeKey();
+ } else if ((o instanceof Key)) {
+ lhs = (Key<?>)o;
+ } else {
+ return false;
+ }
+
+ return mName.equals(lhs.mName) && mTypeReference.equals(lhs.mTypeReference);
+ }
+
+ /**
+ * <p>
+ * Get the tag corresponding to this key. This enables insertion into the
+ * native metadata.
+ * </p>
+ *
+ * <p>This value is looked up the first time, and cached subsequently.</p>
+ *
+ * @return The tag numeric value corresponding to the string
+ */
+ public final int getTag() {
+ if (!mHasTag) {
+ mTag = CameraMetadataNative.getTag(mName, mVendorId);
+ mHasTag = true;
+ }
+ return mTag;
+ }
+
+ /**
+ * Get the raw class backing the type {@code T} for this key.
+ *
+ * <p>The distinction is only important if {@code T} is a generic, e.g.
+ * {@code Range<Integer>} since the nested type will be erased.</p>
+ */
+ public final Class<T> getType() {
+ // TODO: remove this; other places should use #getTypeReference() instead
+ return mType;
+ }
+
+ /**
+ * Get the vendor tag provider id.
+ *
+ * @hide
+ */
+ public final long getVendorId() {
+ return mVendorId;
+ }
+
+ /**
+ * Get the type reference backing the type {@code T} for this key.
+ *
+ * <p>The distinction is only important if {@code T} is a generic, e.g.
+ * {@code Range<Integer>} since the nested type will be retained.</p>
+ */
+ public final TypeReference<T> getTypeReference() {
+ return mTypeReference;
+ }
+ }
+
+ private static final String TAG = "CameraMetadataJV";
+ private static final boolean DEBUG = false;
+
+ // this should be in sync with HAL_PIXEL_FORMAT_BLOB defined in graphics.h
+ public static final int NATIVE_JPEG_FORMAT = 0x21;
+
+ private static final String CELLID_PROCESS = "CELLID";
+ private static final String GPS_PROCESS = "GPS";
+ private static final int FACE_LANDMARK_SIZE = 6;
+
+ private static String translateLocationProviderToProcess(final String provider) {
+ if (provider == null) {
+ return null;
+ }
+ switch(provider) {
+ case LocationManager.GPS_PROVIDER:
+ return GPS_PROCESS;
+ case LocationManager.NETWORK_PROVIDER:
+ return CELLID_PROCESS;
+ default:
+ return null;
+ }
+ }
+
+ private static String translateProcessToLocationProvider(final String process) {
+ if (process == null) {
+ return null;
+ }
+ switch(process) {
+ case GPS_PROCESS:
+ return LocationManager.GPS_PROVIDER;
+ case CELLID_PROCESS:
+ return LocationManager.NETWORK_PROVIDER;
+ default:
+ return null;
+ }
+ }
+
+ public CameraMetadataNative() {
+ super();
+ mMetadataPtr = nativeAllocate();
+ if (mMetadataPtr == 0) {
+ throw new OutOfMemoryError("Failed to allocate native CameraMetadata");
+ }
+ }
+
+ /**
+ * Copy constructor - clone metadata
+ */
+ public CameraMetadataNative(CameraMetadataNative other) {
+ super();
+ mMetadataPtr = nativeAllocateCopy(other);
+ if (mMetadataPtr == 0) {
+ throw new OutOfMemoryError("Failed to allocate native CameraMetadata");
+ }
+ }
+
+ /**
+ * Move the contents from {@code other} into a new camera metadata instance.</p>
+ *
+ * <p>After this call, {@code other} will become empty.</p>
+ *
+ * @param other the previous metadata instance which will get pilfered
+ * @return a new metadata instance with the values from {@code other} moved into it
+ */
+ public static CameraMetadataNative move(CameraMetadataNative other) {
+ CameraMetadataNative newObject = new CameraMetadataNative();
+ newObject.swap(other);
+ return newObject;
+ }
+
+ public static final Parcelable.Creator<CameraMetadataNative> CREATOR =
+ new Parcelable.Creator<CameraMetadataNative>() {
+ @Override
+ public CameraMetadataNative createFromParcel(Parcel in) {
+ CameraMetadataNative metadata = new CameraMetadataNative();
+ metadata.readFromParcel(in);
+ return metadata;
+ }
+
+ @Override
+ public CameraMetadataNative[] newArray(int size) {
+ return new CameraMetadataNative[size];
+ }
+ };
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ nativeWriteToParcel(dest);
+ }
+
+ /**
+ * @hide
+ */
+ public <T> T get(CameraCharacteristics.Key<T> key) {
+ return get(key.getNativeKey());
+ }
+
+ /**
+ * @hide
+ */
+ public <T> T get(CaptureResult.Key<T> key) {
+ return get(key.getNativeKey());
+ }
+
+ /**
+ * @hide
+ */
+ public <T> T get(CaptureRequest.Key<T> key) {
+ return get(key.getNativeKey());
+ }
+
+ /**
+ * Look-up a metadata field value by its key.
+ *
+ * @param key a non-{@code null} key instance
+ * @return the field corresponding to the {@code key}, or {@code null} if no value was set
+ */
+ public <T> T get(Key<T> key) {
+ Preconditions.checkNotNull(key, "key must not be null");
+
+ // Check if key has been overridden to use a wrapper class on the java side.
+ GetCommand g = sGetCommandMap.get(key);
+ if (g != null) {
+ return g.getValue(this, key);
+ }
+ return getBase(key);
+ }
+
+ public void readFromParcel(Parcel in) {
+ nativeReadFromParcel(in);
+ }
+
+ /**
+ * Set the global client-side vendor tag descriptor to allow use of vendor
+ * tags in camera applications.
+ *
+ * @throws ServiceSpecificException
+ * @hide
+ */
+ public static void setupGlobalVendorTagDescriptor() throws ServiceSpecificException {
+ int err = nativeSetupGlobalVendorTagDescriptor();
+ if (err != 0) {
+ throw new ServiceSpecificException(err, "Failure to set up global vendor tags");
+ }
+ }
+
+ /**
+ * Set the global client-side vendor tag descriptor to allow use of vendor
+ * tags in camera applications.
+ *
+ * @return int An error code corresponding to one of the
+ * {@link ICameraService} error constants, or 0 on success.
+ */
+ private static native int nativeSetupGlobalVendorTagDescriptor();
+
+ /**
+ * Set a camera metadata field to a value. The field definitions can be
+ * found in {@link CameraCharacteristics}, {@link CaptureResult}, and
+ * {@link CaptureRequest}.
+ *
+ * @param key The metadata field to write.
+ * @param value The value to set the field to, which must be of a matching
+ * type to the key.
+ */
+ public <T> void set(Key<T> key, T value) {
+ SetCommand s = sSetCommandMap.get(key);
+ if (s != null) {
+ s.setValue(this, value);
+ return;
+ }
+
+ setBase(key, value);
+ }
+
+ public <T> void set(CaptureRequest.Key<T> key, T value) {
+ set(key.getNativeKey(), value);
+ }
+
+ public <T> void set(CaptureResult.Key<T> key, T value) {
+ set(key.getNativeKey(), value);
+ }
+
+ public <T> void set(CameraCharacteristics.Key<T> key, T value) {
+ set(key.getNativeKey(), value);
+ }
+
+ // Keep up-to-date with camera_metadata.h
+ /**
+ * @hide
+ */
+ public static final int TYPE_BYTE = 0;
+ /**
+ * @hide
+ */
+ public static final int TYPE_INT32 = 1;
+ /**
+ * @hide
+ */
+ public static final int TYPE_FLOAT = 2;
+ /**
+ * @hide
+ */
+ public static final int TYPE_INT64 = 3;
+ /**
+ * @hide
+ */
+ public static final int TYPE_DOUBLE = 4;
+ /**
+ * @hide
+ */
+ public static final int TYPE_RATIONAL = 5;
+ /**
+ * @hide
+ */
+ public static final int NUM_TYPES = 6;
+
+ private void close() {
+ // this sets mMetadataPtr to 0
+ nativeClose();
+ mMetadataPtr = 0; // set it to 0 again to prevent eclipse from making this field final
+ }
+
+ private <T> T getBase(CameraCharacteristics.Key<T> key) {
+ return getBase(key.getNativeKey());
+ }
+
+ private <T> T getBase(CaptureResult.Key<T> key) {
+ return getBase(key.getNativeKey());
+ }
+
+ private <T> T getBase(CaptureRequest.Key<T> key) {
+ return getBase(key.getNativeKey());
+ }
+
+ private <T> T getBase(Key<T> key) {
+ int tag = nativeGetTagFromKeyLocal(key.getName());
+ byte[] values = readValues(tag);
+ if (values == null) {
+ return null;
+ }
+
+ int nativeType = nativeGetTypeFromTagLocal(tag);
+ Marshaler<T> marshaler = getMarshalerForKey(key, nativeType);
+ ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder());
+ return marshaler.unmarshal(buffer);
+ }
+
+ // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden
+ // metadata.
+ private static final HashMap<Key<?>, GetCommand> sGetCommandMap =
+ new HashMap<Key<?>, GetCommand>();
+ static {
+ sGetCommandMap.put(
+ CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getAvailableFormats();
+ }
+ });
+ sGetCommandMap.put(
+ CaptureResult.STATISTICS_FACES.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getFaces();
+ }
+ });
+ sGetCommandMap.put(
+ CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getFaceRectangles();
+ }
+ });
+ sGetCommandMap.put(
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP.getNativeKey(),
+ new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getStreamConfigurationMap();
+ }
+ });
+ sGetCommandMap.put(
+ CameraCharacteristics.CONTROL_MAX_REGIONS_AE.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getMaxRegions(key);
+ }
+ });
+ sGetCommandMap.put(
+ CameraCharacteristics.CONTROL_MAX_REGIONS_AWB.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getMaxRegions(key);
+ }
+ });
+ sGetCommandMap.put(
+ CameraCharacteristics.CONTROL_MAX_REGIONS_AF.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getMaxRegions(key);
+ }
+ });
+ sGetCommandMap.put(
+ CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getMaxNumOutputs(key);
+ }
+ });
+ sGetCommandMap.put(
+ CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getMaxNumOutputs(key);
+ }
+ });
+ sGetCommandMap.put(
+ CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING.getNativeKey(),
+ new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getMaxNumOutputs(key);
+ }
+ });
+ sGetCommandMap.put(
+ CaptureRequest.TONEMAP_CURVE.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getTonemapCurve();
+ }
+ });
+ sGetCommandMap.put(
+ CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getGpsLocation();
+ }
+ });
+ sGetCommandMap.put(
+ CaptureResult.STATISTICS_LENS_SHADING_CORRECTION_MAP.getNativeKey(),
+ new GetCommand() {
+ @Override
+ @SuppressWarnings("unchecked")
+ public <T> T getValue(CameraMetadataNative metadata, Key<T> key) {
+ return (T) metadata.getLensShadingMap();
+ }
+ });
+ }
+
+ private int[] getAvailableFormats() {
+ int[] availableFormats = getBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS);
+ if (availableFormats != null) {
+ for (int i = 0; i < availableFormats.length; i++) {
+ // JPEG has different value between native and managed side, need override.
+ if (availableFormats[i] == NATIVE_JPEG_FORMAT) {
+ availableFormats[i] = ImageFormat.JPEG;
+ }
+ }
+ }
+
+ return availableFormats;
+ }
+
+ private boolean setFaces(Face[] faces) {
+ if (faces == null) {
+ return false;
+ }
+
+ int numFaces = faces.length;
+
+ // Detect if all faces are SIMPLE or not; count # of valid faces
+ boolean fullMode = true;
+ for (Face face : faces) {
+ if (face == null) {
+ numFaces--;
+ Log.w(TAG, "setFaces - null face detected, skipping");
+ continue;
+ }
+
+ if (face.getId() == Face.ID_UNSUPPORTED) {
+ fullMode = false;
+ }
+ }
+
+ Rect[] faceRectangles = new Rect[numFaces];
+ byte[] faceScores = new byte[numFaces];
+ int[] faceIds = null;
+ int[] faceLandmarks = null;
+
+ if (fullMode) {
+ faceIds = new int[numFaces];
+ faceLandmarks = new int[numFaces * FACE_LANDMARK_SIZE];
+ }
+
+ int i = 0;
+ for (Face face : faces) {
+ if (face == null) {
+ continue;
+ }
+
+ faceRectangles[i] = face.getBounds();
+ faceScores[i] = (byte)face.getScore();
+
+ if (fullMode) {
+ faceIds[i] = face.getId();
+
+ int j = 0;
+
+ faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().x;
+ faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getLeftEyePosition().y;
+ faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().x;
+ faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getRightEyePosition().y;
+ faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().x;
+ faceLandmarks[i * FACE_LANDMARK_SIZE + j++] = face.getMouthPosition().y;
+ }
+
+ i++;
+ }
+
+ set(CaptureResult.STATISTICS_FACE_RECTANGLES, faceRectangles);
+ set(CaptureResult.STATISTICS_FACE_IDS, faceIds);
+ set(CaptureResult.STATISTICS_FACE_LANDMARKS, faceLandmarks);
+ set(CaptureResult.STATISTICS_FACE_SCORES, faceScores);
+
+ return true;
+ }
+
+ private Face[] getFaces() {
+ Integer faceDetectMode = get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
+ byte[] faceScores = get(CaptureResult.STATISTICS_FACE_SCORES);
+ Rect[] faceRectangles = get(CaptureResult.STATISTICS_FACE_RECTANGLES);
+ int[] faceIds = get(CaptureResult.STATISTICS_FACE_IDS);
+ int[] faceLandmarks = get(CaptureResult.STATISTICS_FACE_LANDMARKS);
+
+ if (areValuesAllNull(faceDetectMode, faceScores, faceRectangles, faceIds, faceLandmarks)) {
+ return null;
+ }
+
+ if (faceDetectMode == null) {
+ Log.w(TAG, "Face detect mode metadata is null, assuming the mode is SIMPLE");
+ faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE;
+ } else {
+ if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_OFF) {
+ return new Face[0];
+ }
+ if (faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE &&
+ faceDetectMode != CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
+ Log.w(TAG, "Unknown face detect mode: " + faceDetectMode);
+ return new Face[0];
+ }
+ }
+
+ // Face scores and rectangles are required by SIMPLE and FULL mode.
+ if (faceScores == null || faceRectangles == null) {
+ Log.w(TAG, "Expect face scores and rectangles to be non-null");
+ return new Face[0];
+ } else if (faceScores.length != faceRectangles.length) {
+ Log.w(TAG, String.format("Face score size(%d) doesn match face rectangle size(%d)!",
+ faceScores.length, faceRectangles.length));
+ }
+
+ // To be safe, make number of faces is the minimal of all face info metadata length.
+ int numFaces = Math.min(faceScores.length, faceRectangles.length);
+ // Face id and landmarks are only required by FULL mode.
+ if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL) {
+ if (faceIds == null || faceLandmarks == null) {
+ Log.w(TAG, "Expect face ids and landmarks to be non-null for FULL mode," +
+ "fallback to SIMPLE mode");
+ faceDetectMode = CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE;
+ } else {
+ if (faceIds.length != numFaces ||
+ faceLandmarks.length != numFaces * FACE_LANDMARK_SIZE) {
+ Log.w(TAG, String.format("Face id size(%d), or face landmark size(%d) don't" +
+ "match face number(%d)!",
+ faceIds.length, faceLandmarks.length * FACE_LANDMARK_SIZE, numFaces));
+ }
+ // To be safe, make number of faces is the minimal of all face info metadata length.
+ numFaces = Math.min(numFaces, faceIds.length);
+ numFaces = Math.min(numFaces, faceLandmarks.length / FACE_LANDMARK_SIZE);
+ }
+ }
+
+ ArrayList<Face> faceList = new ArrayList<Face>();
+ if (faceDetectMode == CaptureResult.STATISTICS_FACE_DETECT_MODE_SIMPLE) {
+ for (int i = 0; i < numFaces; i++) {
+ if (faceScores[i] <= Face.SCORE_MAX &&
+ faceScores[i] >= Face.SCORE_MIN) {
+ faceList.add(new Face(faceRectangles[i], faceScores[i]));
+ }
+ }
+ } else {
+ // CaptureResult.STATISTICS_FACE_DETECT_MODE_FULL
+ for (int i = 0; i < numFaces; i++) {
+ if (faceScores[i] <= Face.SCORE_MAX &&
+ faceScores[i] >= Face.SCORE_MIN &&
+ faceIds[i] >= 0) {
+ Point leftEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE],
+ faceLandmarks[i*FACE_LANDMARK_SIZE+1]);
+ Point rightEye = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+2],
+ faceLandmarks[i*FACE_LANDMARK_SIZE+3]);
+ Point mouth = new Point(faceLandmarks[i*FACE_LANDMARK_SIZE+4],
+ faceLandmarks[i*FACE_LANDMARK_SIZE+5]);
+ Face face = new Face(faceRectangles[i], faceScores[i], faceIds[i],
+ leftEye, rightEye, mouth);
+ faceList.add(face);
+ }
+ }
+ }
+ Face[] faces = new Face[faceList.size()];
+ faceList.toArray(faces);
+ return faces;
+ }
+
+ // Face rectangles are defined as (left, top, right, bottom) instead of
+ // (left, top, width, height) at the native level, so the normal Rect
+ // conversion that does (l, t, w, h) -> (l, t, r, b) is unnecessary. Undo
+ // that conversion here for just the faces.
+ private Rect[] getFaceRectangles() {
+ Rect[] faceRectangles = getBase(CaptureResult.STATISTICS_FACE_RECTANGLES);
+ if (faceRectangles == null) return null;
+
+ Rect[] fixedFaceRectangles = new Rect[faceRectangles.length];
+ for (int i = 0; i < faceRectangles.length; i++) {
+ fixedFaceRectangles[i] = new Rect(
+ faceRectangles[i].left,
+ faceRectangles[i].top,
+ faceRectangles[i].right - faceRectangles[i].left,
+ faceRectangles[i].bottom - faceRectangles[i].top);
+ }
+ return fixedFaceRectangles;
+ }
+
+ private LensShadingMap getLensShadingMap() {
+ float[] lsmArray = getBase(CaptureResult.STATISTICS_LENS_SHADING_MAP);
+ Size s = get(CameraCharacteristics.LENS_INFO_SHADING_MAP_SIZE);
+
+ // Do not warn if lsmArray is null while s is not. This is valid.
+ if (lsmArray == null) {
+ return null;
+ }
+
+ if (s == null) {
+ Log.w(TAG, "getLensShadingMap - Lens shading map size was null.");
+ return null;
+ }
+
+ LensShadingMap map = new LensShadingMap(lsmArray, s.getHeight(), s.getWidth());
+ return map;
+ }
+
+ private Location getGpsLocation() {
+ String processingMethod = get(CaptureResult.JPEG_GPS_PROCESSING_METHOD);
+ double[] coords = get(CaptureResult.JPEG_GPS_COORDINATES);
+ Long timeStamp = get(CaptureResult.JPEG_GPS_TIMESTAMP);
+
+ if (areValuesAllNull(processingMethod, coords, timeStamp)) {
+ return null;
+ }
+
+ Location l = new Location(translateProcessToLocationProvider(processingMethod));
+ if (timeStamp != null) {
+ // Location expects timestamp in [ms.]
+ l.setTime(timeStamp * 1000);
+ } else {
+ Log.w(TAG, "getGpsLocation - No timestamp for GPS location.");
+ }
+
+ if (coords != null) {
+ l.setLatitude(coords[0]);
+ l.setLongitude(coords[1]);
+ l.setAltitude(coords[2]);
+ } else {
+ Log.w(TAG, "getGpsLocation - No coordinates for GPS location");
+ }
+
+ return l;
+ }
+
+ private boolean setGpsLocation(Location l) {
+ if (l == null) {
+ return false;
+ }
+
+ double[] coords = { l.getLatitude(), l.getLongitude(), l.getAltitude() };
+ String processMethod = translateLocationProviderToProcess(l.getProvider());
+ //JPEG_GPS_TIMESTAMP expects sec. instead of msec.
+ long timestamp = l.getTime() / 1000;
+
+ set(CaptureRequest.JPEG_GPS_TIMESTAMP, timestamp);
+ set(CaptureRequest.JPEG_GPS_COORDINATES, coords);
+
+ if (processMethod == null) {
+ Log.w(TAG, "setGpsLocation - No process method, Location is not from a GPS or NETWORK" +
+ "provider");
+ } else {
+ setBase(CaptureRequest.JPEG_GPS_PROCESSING_METHOD, processMethod);
+ }
+ return true;
+ }
+
+ private StreamConfigurationMap getStreamConfigurationMap() {
+ StreamConfiguration[] configurations = getBase(
+ CameraCharacteristics.SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ StreamConfigurationDuration[] minFrameDurations = getBase(
+ CameraCharacteristics.SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+ StreamConfigurationDuration[] stallDurations = getBase(
+ CameraCharacteristics.SCALER_AVAILABLE_STALL_DURATIONS);
+ StreamConfiguration[] depthConfigurations = getBase(
+ CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS);
+ StreamConfigurationDuration[] depthMinFrameDurations = getBase(
+ CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS);
+ StreamConfigurationDuration[] depthStallDurations = getBase(
+ CameraCharacteristics.DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
+ HighSpeedVideoConfiguration[] highSpeedVideoConfigurations = getBase(
+ CameraCharacteristics.CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS);
+ ReprocessFormatsMap inputOutputFormatsMap = getBase(
+ CameraCharacteristics.SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP);
+ int[] capabilities = getBase(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+ boolean listHighResolution = false;
+ for (int capability : capabilities) {
+ if (capability == CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE) {
+ listHighResolution = true;
+ break;
+ }
+ }
+ return new StreamConfigurationMap(
+ configurations, minFrameDurations, stallDurations,
+ depthConfigurations, depthMinFrameDurations, depthStallDurations,
+ highSpeedVideoConfigurations, inputOutputFormatsMap,
+ listHighResolution);
+ }
+
+ private <T> Integer getMaxRegions(Key<T> key) {
+ final int AE = 0;
+ final int AWB = 1;
+ final int AF = 2;
+
+ // The order of the elements is: (AE, AWB, AF)
+ int[] maxRegions = getBase(CameraCharacteristics.CONTROL_MAX_REGIONS);
+
+ if (maxRegions == null) {
+ return null;
+ }
+
+ if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AE)) {
+ return maxRegions[AE];
+ } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB)) {
+ return maxRegions[AWB];
+ } else if (key.equals(CameraCharacteristics.CONTROL_MAX_REGIONS_AF)) {
+ return maxRegions[AF];
+ } else {
+ throw new AssertionError("Invalid key " + key);
+ }
+ }
+
+ private <T> Integer getMaxNumOutputs(Key<T> key) {
+ final int RAW = 0;
+ final int PROC = 1;
+ final int PROC_STALLING = 2;
+
+ // The order of the elements is: (raw, proc+nonstalling, proc+stalling)
+ int[] maxNumOutputs = getBase(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS);
+
+ if (maxNumOutputs == null) {
+ return null;
+ }
+
+ if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)) {
+ return maxNumOutputs[RAW];
+ } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)) {
+ return maxNumOutputs[PROC];
+ } else if (key.equals(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)) {
+ return maxNumOutputs[PROC_STALLING];
+ } else {
+ throw new AssertionError("Invalid key " + key);
+ }
+ }
+
+ private <T> TonemapCurve getTonemapCurve() {
+ float[] red = getBase(CaptureRequest.TONEMAP_CURVE_RED);
+ float[] green = getBase(CaptureRequest.TONEMAP_CURVE_GREEN);
+ float[] blue = getBase(CaptureRequest.TONEMAP_CURVE_BLUE);
+
+ if (areValuesAllNull(red, green, blue)) {
+ return null;
+ }
+
+ if (red == null || green == null || blue == null) {
+ Log.w(TAG, "getTonemapCurve - missing tone curve components");
+ return null;
+ }
+ TonemapCurve tc = new TonemapCurve(red, green, blue);
+ return tc;
+ }
+
+ private <T> void setBase(CameraCharacteristics.Key<T> key, T value) {
+ setBase(key.getNativeKey(), value);
+ }
+
+ private <T> void setBase(CaptureResult.Key<T> key, T value) {
+ setBase(key.getNativeKey(), value);
+ }
+
+ private <T> void setBase(CaptureRequest.Key<T> key, T value) {
+ setBase(key.getNativeKey(), value);
+ }
+
+ private <T> void setBase(Key<T> key, T value) {
+ int tag = nativeGetTagFromKeyLocal(key.getName());
+ if (value == null) {
+ // Erase the entry
+ writeValues(tag, /*src*/null);
+ return;
+ } // else update the entry to a new value
+
+ int nativeType = nativeGetTypeFromTagLocal(tag);
+ Marshaler<T> marshaler = getMarshalerForKey(key, nativeType);
+ int size = marshaler.calculateMarshalSize(value);
+
+ // TODO: Optimization. Cache the byte[] and reuse if the size is big enough.
+ byte[] values = new byte[size];
+
+ ByteBuffer buffer = ByteBuffer.wrap(values).order(ByteOrder.nativeOrder());
+ marshaler.marshal(value, buffer);
+
+ writeValues(tag, values);
+ }
+
+ // Use Command pattern here to avoid lots of expensive if/equals checks in get for overridden
+ // metadata.
+ private static final HashMap<Key<?>, SetCommand> sSetCommandMap =
+ new HashMap<Key<?>, SetCommand>();
+ static {
+ sSetCommandMap.put(CameraCharacteristics.SCALER_AVAILABLE_FORMATS.getNativeKey(),
+ new SetCommand() {
+ @Override
+ public <T> void setValue(CameraMetadataNative metadata, T value) {
+ metadata.setAvailableFormats((int[]) value);
+ }
+ });
+ sSetCommandMap.put(CaptureResult.STATISTICS_FACE_RECTANGLES.getNativeKey(),
+ new SetCommand() {
+ @Override
+ public <T> void setValue(CameraMetadataNative metadata, T value) {
+ metadata.setFaceRectangles((Rect[]) value);
+ }
+ });
+ sSetCommandMap.put(CaptureResult.STATISTICS_FACES.getNativeKey(),
+ new SetCommand() {
+ @Override
+ public <T> void setValue(CameraMetadataNative metadata, T value) {
+ metadata.setFaces((Face[])value);
+ }
+ });
+ sSetCommandMap.put(CaptureRequest.TONEMAP_CURVE.getNativeKey(), new SetCommand() {
+ @Override
+ public <T> void setValue(CameraMetadataNative metadata, T value) {
+ metadata.setTonemapCurve((TonemapCurve) value);
+ }
+ });
+ sSetCommandMap.put(CaptureResult.JPEG_GPS_LOCATION.getNativeKey(), new SetCommand() {
+ @Override
+ public <T> void setValue(CameraMetadataNative metadata, T value) {
+ metadata.setGpsLocation((Location) value);
+ }
+ });
+ }
+
+ private boolean setAvailableFormats(int[] value) {
+ int[] availableFormat = value;
+ if (value == null) {
+ // Let setBase() to handle the null value case.
+ return false;
+ }
+
+ int[] newValues = new int[availableFormat.length];
+ for (int i = 0; i < availableFormat.length; i++) {
+ newValues[i] = availableFormat[i];
+ if (availableFormat[i] == ImageFormat.JPEG) {
+ newValues[i] = NATIVE_JPEG_FORMAT;
+ }
+ }
+
+ setBase(CameraCharacteristics.SCALER_AVAILABLE_FORMATS, newValues);
+ return true;
+ }
+
+ /**
+ * Convert Face Rectangles from managed side to native side as they have different definitions.
+ * <p>
+ * Managed side face rectangles are defined as: left, top, width, height.
+ * Native side face rectangles are defined as: left, top, right, bottom.
+ * The input face rectangle need to be converted to native side definition when set is called.
+ * </p>
+ *
+ * @param faceRects Input face rectangles.
+ * @return true if face rectangles can be set successfully. Otherwise, Let the caller
+ * (setBase) to handle it appropriately.
+ */
+ private boolean setFaceRectangles(Rect[] faceRects) {
+ if (faceRects == null) {
+ return false;
+ }
+
+ Rect[] newFaceRects = new Rect[faceRects.length];
+ for (int i = 0; i < newFaceRects.length; i++) {
+ newFaceRects[i] = new Rect(
+ faceRects[i].left,
+ faceRects[i].top,
+ faceRects[i].right + faceRects[i].left,
+ faceRects[i].bottom + faceRects[i].top);
+ }
+
+ setBase(CaptureResult.STATISTICS_FACE_RECTANGLES, newFaceRects);
+ return true;
+ }
+
+ private <T> boolean setTonemapCurve(TonemapCurve tc) {
+ if (tc == null) {
+ return false;
+ }
+
+ float[][] curve = new float[3][];
+ for (int i = TonemapCurve.CHANNEL_RED; i <= TonemapCurve.CHANNEL_BLUE; i++) {
+ int pointCount = tc.getPointCount(i);
+ curve[i] = new float[pointCount * TonemapCurve.POINT_SIZE];
+ tc.copyColorCurve(i, curve[i], 0);
+ }
+ setBase(CaptureRequest.TONEMAP_CURVE_RED, curve[0]);
+ setBase(CaptureRequest.TONEMAP_CURVE_GREEN, curve[1]);
+ setBase(CaptureRequest.TONEMAP_CURVE_BLUE, curve[2]);
+
+ return true;
+ }
+
+ private long mMetadataPtr; // native CameraMetadata*
+
+ private native long nativeAllocate();
+ private native long nativeAllocateCopy(CameraMetadataNative other)
+ throws NullPointerException;
+
+ private native synchronized void nativeWriteToParcel(Parcel dest);
+ private native synchronized void nativeReadFromParcel(Parcel source);
+ private native synchronized void nativeSwap(CameraMetadataNative other)
+ throws NullPointerException;
+ private native synchronized void nativeClose();
+ private native synchronized boolean nativeIsEmpty();
+ private native synchronized int nativeGetEntryCount();
+
+ private native synchronized byte[] nativeReadValues(int tag);
+ private native synchronized void nativeWriteValues(int tag, byte[] src);
+ private native synchronized void nativeDump() throws IOException; // dump to ALOGD
+
+ private native synchronized ArrayList nativeGetAllVendorKeys(Class keyClass);
+ private native synchronized int nativeGetTagFromKeyLocal(String keyName)
+ throws IllegalArgumentException;
+ private native synchronized int nativeGetTypeFromTagLocal(int tag)
+ throws IllegalArgumentException;
+ private static native int nativeGetTagFromKey(String keyName, long vendorId)
+ throws IllegalArgumentException;
+ private static native int nativeGetTypeFromTag(int tag, long vendorId)
+ throws IllegalArgumentException;
+
+ /**
+ * <p>Perform a 0-copy swap of the internal metadata with another object.</p>
+ *
+ * <p>Useful to convert a CameraMetadata into e.g. a CaptureRequest.</p>
+ *
+ * @param other Metadata to swap with
+ * @throws NullPointerException if other was null
+ * @hide
+ */
+ public void swap(CameraMetadataNative other) {
+ nativeSwap(other);
+ }
+
+ /**
+ * @hide
+ */
+ public int getEntryCount() {
+ return nativeGetEntryCount();
+ }
+
+ /**
+ * Does this metadata contain at least 1 entry?
+ *
+ * @hide
+ */
+ public boolean isEmpty() {
+ return nativeIsEmpty();
+ }
+
+
+ /**
+ * Return a list containing keys of the given key class for all defined vendor tags.
+ *
+ * @hide
+ */
+ public <K> ArrayList<K> getAllVendorKeys(Class<K> keyClass) {
+ if (keyClass == null) {
+ throw new NullPointerException();
+ }
+ return (ArrayList<K>) nativeGetAllVendorKeys(keyClass);
+ }
+
+ /**
+ * Convert a key string into the equivalent native tag.
+ *
+ * @throws IllegalArgumentException if the key was not recognized
+ * @throws NullPointerException if the key was null
+ *
+ * @hide
+ */
+ public static int getTag(String key) {
+ return nativeGetTagFromKey(key, Long.MAX_VALUE);
+ }
+
+ /**
+ * Convert a key string into the equivalent native tag.
+ *
+ * @throws IllegalArgumentException if the key was not recognized
+ * @throws NullPointerException if the key was null
+ *
+ * @hide
+ */
+ public static int getTag(String key, long vendorId) {
+ return nativeGetTagFromKey(key, vendorId);
+ }
+
+ /**
+ * Get the underlying native type for a tag.
+ *
+ * @param tag An integer tag, see e.g. {@link #getTag}
+ * @param vendorId A vendor tag provider id
+ * @return An int enum for the metadata type, see e.g. {@link #TYPE_BYTE}
+ *
+ * @hide
+ */
+ public static int getNativeType(int tag, long vendorId) {
+ return nativeGetTypeFromTag(tag, vendorId);
+ }
+
+ /**
+ * <p>Updates the existing entry for tag with the new bytes pointed by src, erasing
+ * the entry if src was null.</p>
+ *
+ * <p>An empty array can be passed in to update the entry to 0 elements.</p>
+ *
+ * @param tag An integer tag, see e.g. {@link #getTag}
+ * @param src An array of bytes, or null to erase the entry
+ *
+ * @hide
+ */
+ public void writeValues(int tag, byte[] src) {
+ nativeWriteValues(tag, src);
+ }
+
+ /**
+ * <p>Returns a byte[] of data corresponding to this tag. Use a wrapped bytebuffer to unserialize
+ * the data properly.</p>
+ *
+ * <p>An empty array can be returned to denote an existing entry with 0 elements.</p>
+ *
+ * @param tag An integer tag, see e.g. {@link #getTag}
+ *
+ * @return {@code null} if there were 0 entries for this tag, a byte[] otherwise.
+ * @hide
+ */
+ public byte[] readValues(int tag) {
+ // TODO: Optimization. Native code returns a ByteBuffer instead.
+ return nativeReadValues(tag);
+ }
+
+ /**
+ * Dumps the native metadata contents to logcat.
+ *
+ * <p>Visibility for testing/debugging only. The results will not
+ * include any synthesized keys, as they are invisible to the native layer.</p>
+ *
+ * @hide
+ */
+ public void dumpToLog() {
+ try {
+ nativeDump();
+ } catch (IOException e) {
+ Log.wtf(TAG, "Dump logging failed", e);
+ }
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } finally {
+ super.finalize();
+ }
+ }
+
+ /**
+ * Get the marshaler compatible with the {@code key} and type {@code T}.
+ *
+ * @throws UnsupportedOperationException
+ * if the native/managed type combination for {@code key} is not supported
+ */
+ private static <T> Marshaler<T> getMarshalerForKey(Key<T> key, int nativeType) {
+ return MarshalRegistry.getMarshaler(key.getTypeReference(),
+ nativeType);
+ }
+
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ private static void registerAllMarshalers() {
+ if (DEBUG) {
+ Log.v(TAG, "Shall register metadata marshalers");
+ }
+
+ MarshalQueryable[] queryList = new MarshalQueryable[] {
+ // marshalers for standard types
+ new MarshalQueryablePrimitive(),
+ new MarshalQueryableEnum(),
+ new MarshalQueryableArray(),
+
+ // pseudo standard types, that expand/narrow the native type into a managed type
+ new MarshalQueryableBoolean(),
+ new MarshalQueryableNativeByteToInteger(),
+
+ // marshalers for custom types
+ new MarshalQueryableRect(),
+ new MarshalQueryableSize(),
+ new MarshalQueryableSizeF(),
+ new MarshalQueryableString(),
+ new MarshalQueryableReprocessFormatsMap(),
+ new MarshalQueryableRange(),
+ new MarshalQueryablePair(),
+ new MarshalQueryableMeteringRectangle(),
+ new MarshalQueryableColorSpaceTransform(),
+ new MarshalQueryableStreamConfiguration(),
+ new MarshalQueryableStreamConfigurationDuration(),
+ new MarshalQueryableRggbChannelVector(),
+ new MarshalQueryableBlackLevelPattern(),
+ new MarshalQueryableHighSpeedVideoConfiguration(),
+
+ // generic parcelable marshaler (MUST BE LAST since it has lowest priority)
+ new MarshalQueryableParcelable(),
+ };
+
+ for (MarshalQueryable query : queryList) {
+ MarshalRegistry.registerMarshalQueryable(query);
+ }
+ if (DEBUG) {
+ Log.v(TAG, "Registered metadata marshalers");
+ }
+ }
+
+ /** Check if input arguments are all {@code null}.
+ *
+ * @param objs Input arguments for null check
+ * @return {@code true} if input arguments are all {@code null}, otherwise {@code false}
+ */
+ private static boolean areValuesAllNull(Object... objs) {
+ for (Object o : objs) {
+ if (o != null) return false;
+ }
+ return true;
+ }
+
+ static {
+ registerAllMarshalers();
+ }
+}
diff --git a/android/hardware/camera2/impl/CaptureResultExtras.java b/android/hardware/camera2/impl/CaptureResultExtras.java
new file mode 100644
index 00000000..40535e23
--- /dev/null
+++ b/android/hardware/camera2/impl/CaptureResultExtras.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.impl;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * @hide
+ */
+public class CaptureResultExtras implements Parcelable {
+ private int requestId;
+ private int subsequenceId;
+ private int afTriggerId;
+ private int precaptureTriggerId;
+ private long frameNumber;
+ private int partialResultCount;
+ private int errorStreamId;
+
+ public static final Parcelable.Creator<CaptureResultExtras> CREATOR =
+ new Parcelable.Creator<CaptureResultExtras>() {
+ @Override
+ public CaptureResultExtras createFromParcel(Parcel in) {
+ return new CaptureResultExtras(in);
+ }
+
+ @Override
+ public CaptureResultExtras[] newArray(int size) {
+ return new CaptureResultExtras[size];
+ }
+ };
+
+ private CaptureResultExtras(Parcel in) {
+ readFromParcel(in);
+ }
+
+ public CaptureResultExtras(int requestId, int subsequenceId, int afTriggerId,
+ int precaptureTriggerId, long frameNumber,
+ int partialResultCount, int errorStreamId) {
+ this.requestId = requestId;
+ this.subsequenceId = subsequenceId;
+ this.afTriggerId = afTriggerId;
+ this.precaptureTriggerId = precaptureTriggerId;
+ this.frameNumber = frameNumber;
+ this.partialResultCount = partialResultCount;
+ this.errorStreamId = errorStreamId;
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(requestId);
+ dest.writeInt(subsequenceId);
+ dest.writeInt(afTriggerId);
+ dest.writeInt(precaptureTriggerId);
+ dest.writeLong(frameNumber);
+ dest.writeInt(partialResultCount);
+ dest.writeInt(errorStreamId);
+ }
+
+ public void readFromParcel(Parcel in) {
+ requestId = in.readInt();
+ subsequenceId = in.readInt();
+ afTriggerId = in.readInt();
+ precaptureTriggerId = in.readInt();
+ frameNumber = in.readLong();
+ partialResultCount = in.readInt();
+ errorStreamId = in.readInt();
+ }
+
+ public int getRequestId() {
+ return requestId;
+ }
+
+ public int getSubsequenceId() {
+ return subsequenceId;
+ }
+
+ public int getAfTriggerId() {
+ return afTriggerId;
+ }
+
+ public int getPrecaptureTriggerId() {
+ return precaptureTriggerId;
+ }
+
+ public long getFrameNumber() {
+ return frameNumber;
+ }
+
+ public int getPartialResultCount() {
+ return partialResultCount;
+ }
+
+ public int getErrorStreamId() {
+ return errorStreamId;
+ }
+}
diff --git a/android/hardware/camera2/impl/GetCommand.java b/android/hardware/camera2/impl/GetCommand.java
new file mode 100644
index 00000000..a3c677a6
--- /dev/null
+++ b/android/hardware/camera2/impl/GetCommand.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+/**
+ * Getter interface for use with Command pattern metadata value getters.
+ */
+public interface GetCommand {
+
+ /**
+ * Get the value from the given {@link CameraMetadataNative} object.
+ *
+ * @param metadata the {@link CameraMetadataNative} object to get the value from.
+ * @param key the {@link CameraMetadataNative.Key} to look up.
+ * @param <T> the type of the value.
+ * @return the value for a given {@link CameraMetadataNative.Key}.
+ */
+ public <T> T getValue(CameraMetadataNative metadata, CameraMetadataNative.Key<T> key);
+}
diff --git a/android/hardware/camera2/impl/ICameraDeviceUserWrapper.java b/android/hardware/camera2/impl/ICameraDeviceUserWrapper.java
new file mode 100644
index 00000000..27087a2e
--- /dev/null
+++ b/android/hardware/camera2/impl/ICameraDeviceUserWrapper.java
@@ -0,0 +1,228 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+import static android.hardware.camera2.CameraAccessException.CAMERA_DISABLED;
+import static android.hardware.camera2.CameraAccessException.CAMERA_DISCONNECTED;
+import static android.hardware.camera2.CameraAccessException.CAMERA_IN_USE;
+import static android.hardware.camera2.CameraAccessException.CAMERA_ERROR;
+import static android.hardware.camera2.CameraAccessException.MAX_CAMERAS_IN_USE;
+import static android.hardware.camera2.CameraAccessException.CAMERA_DEPRECATED_HAL;
+
+import android.hardware.ICameraService;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.ICameraDeviceUser;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.params.OutputConfiguration;
+import android.hardware.camera2.utils.SubmitInfo;
+import android.os.IBinder;
+import android.os.RemoteException;
+import android.view.Surface;
+
+/**
+ * A wrapper around ICameraDeviceUser.
+ *
+ * Mainly used to convert ServiceSpecificExceptions to the correct
+ * checked / unchecked exception.
+ *
+ * @hide
+ */
+public class ICameraDeviceUserWrapper {
+
+ private final ICameraDeviceUser mRemoteDevice;
+
+ public ICameraDeviceUserWrapper(ICameraDeviceUser remoteDevice) {
+ if (remoteDevice == null) {
+ throw new NullPointerException("Remote device may not be null");
+ }
+ mRemoteDevice = remoteDevice;
+ }
+
+ public void unlinkToDeath(IBinder.DeathRecipient recipient, int flags) {
+ if (mRemoteDevice.asBinder() != null) {
+ mRemoteDevice.asBinder().unlinkToDeath(recipient, flags);
+ }
+ }
+
+ public void disconnect() {
+ try {
+ mRemoteDevice.disconnect();
+ } catch (RemoteException t) {
+ // ignore binder errors for disconnect
+ }
+ }
+
+ public SubmitInfo submitRequest(CaptureRequest request, boolean streaming)
+ throws CameraAccessException {
+ try {
+ return mRemoteDevice.submitRequest(request, streaming);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public SubmitInfo submitRequestList(CaptureRequest[] requestList, boolean streaming)
+ throws CameraAccessException {
+ try {
+ return mRemoteDevice.submitRequestList(requestList, streaming);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public long cancelRequest(int requestId) throws CameraAccessException {
+ try {
+ return mRemoteDevice.cancelRequest(requestId);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public void beginConfigure() throws CameraAccessException {
+ try {
+ mRemoteDevice.beginConfigure();
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public void endConfigure(int operatingMode) throws CameraAccessException {
+ try {
+ mRemoteDevice.endConfigure(operatingMode);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public void deleteStream(int streamId) throws CameraAccessException {
+ try {
+ mRemoteDevice.deleteStream(streamId);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public int createStream(OutputConfiguration outputConfiguration)
+ throws CameraAccessException {
+ try {
+ return mRemoteDevice.createStream(outputConfiguration);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public int createInputStream(int width, int height, int format) throws CameraAccessException {
+ try {
+ return mRemoteDevice.createInputStream(width, height, format);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public Surface getInputSurface() throws CameraAccessException {
+ try {
+ return mRemoteDevice.getInputSurface();
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public CameraMetadataNative createDefaultRequest(int templateId) throws CameraAccessException {
+ try {
+ return mRemoteDevice.createDefaultRequest(templateId);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public CameraMetadataNative getCameraInfo() throws CameraAccessException {
+ try {
+ return mRemoteDevice.getCameraInfo();
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public void waitUntilIdle() throws CameraAccessException {
+ try {
+ mRemoteDevice.waitUntilIdle();
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public long flush() throws CameraAccessException {
+ try {
+ return mRemoteDevice.flush();
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public void prepare(int streamId) throws CameraAccessException {
+ try {
+ mRemoteDevice.prepare(streamId);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public void tearDown(int streamId) throws CameraAccessException {
+ try {
+ mRemoteDevice.tearDown(streamId);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public void prepare2(int maxCount, int streamId) throws CameraAccessException {
+ try {
+ mRemoteDevice.prepare2(maxCount, streamId);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+ public void finalizeOutputConfigurations(int streamId, OutputConfiguration deferredConfig)
+ throws CameraAccessException {
+ try {
+ mRemoteDevice.finalizeOutputConfigurations(streamId, deferredConfig);
+ } catch (Throwable t) {
+ CameraManager.throwAsPublicException(t);
+ throw new UnsupportedOperationException("Unexpected exception", t);
+ }
+ }
+
+}
diff --git a/android/hardware/camera2/impl/PublicKey.java b/android/hardware/camera2/impl/PublicKey.java
new file mode 100644
index 00000000..d894c1ff
--- /dev/null
+++ b/android/hardware/camera2/impl/PublicKey.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.impl;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Denote a static field {@code Key} as being public-visible (in the SDK).
+ *
+ * <p>Keys without this annotated are assumed to be {@code @hide}.</p>
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface PublicKey {
+
+}
diff --git a/android/hardware/camera2/impl/SetCommand.java b/android/hardware/camera2/impl/SetCommand.java
new file mode 100644
index 00000000..82a01b22
--- /dev/null
+++ b/android/hardware/camera2/impl/SetCommand.java
@@ -0,0 +1,33 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.impl;
+
+/**
+ * Setter interface for use with Command pattern metadata value setters.
+ */
+public interface SetCommand {
+
+ /**
+ * Set the value in the given metadata.
+ *
+ * @param metadata {@link CameraMetadataNative} to set value in.
+ * @param value value to set.
+ * @param <T> type of the value to set.
+ */
+ public <T> void setValue(/*inout*/CameraMetadataNative metadata,
+ T value);
+}
diff --git a/android/hardware/camera2/impl/SyntheticKey.java b/android/hardware/camera2/impl/SyntheticKey.java
new file mode 100644
index 00000000..034a0493
--- /dev/null
+++ b/android/hardware/camera2/impl/SyntheticKey.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.impl;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+/**
+ * Denote a static field {@code Key} as being synthetic (i.e. not having a native
+ * tag one-to-one counterpart).
+ *
+ * <p>Keys without this annotated are assumed to always have a native counter-part.</p>
+ *
+ * <p>In particular a key with a native counter-part will always have it's {@code #getTag}
+ * method succeed.</p>
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.FIELD)
+public @interface SyntheticKey {
+
+}
diff --git a/android/hardware/camera2/legacy/BurstHolder.java b/android/hardware/camera2/legacy/BurstHolder.java
new file mode 100644
index 00000000..23efe15f
--- /dev/null
+++ b/android/hardware/camera2/legacy/BurstHolder.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.CaptureRequest;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Immutable container for a burst of capture results.
+ */
+public class BurstHolder {
+ private static final String TAG = "BurstHolder";
+ private final ArrayList<RequestHolder.Builder> mRequestBuilders;
+ private final boolean mRepeating;
+ private final int mRequestId;
+
+ /**
+ * Immutable container for a burst of capture results.
+ *
+ * @param requestId id of the burst request.
+ * @param repeating true if this burst is repeating.
+ * @param requests the array of {@link CaptureRequest}s for this burst.
+ * @param jpegSurfaceIds a {@link Collection} of IDs for the surfaces that have jpeg outputs.
+ */
+ public BurstHolder(int requestId, boolean repeating, CaptureRequest[] requests,
+ Collection<Long> jpegSurfaceIds) {
+ mRequestBuilders = new ArrayList<>();
+ int i = 0;
+ for (CaptureRequest r : requests) {
+ mRequestBuilders.add(new RequestHolder.Builder(requestId, /*subsequenceId*/i,
+ /*request*/r, repeating, jpegSurfaceIds));
+ ++i;
+ }
+ mRepeating = repeating;
+ mRequestId = requestId;
+ }
+
+ /**
+ * Get the id of this request.
+ */
+ public int getRequestId() {
+ return mRequestId;
+ }
+
+ /**
+ * Return true if this repeating.
+ */
+ public boolean isRepeating() {
+ return mRepeating;
+ }
+
+ /**
+ * Return the number of requests in this burst sequence.
+ */
+ public int getNumberOfRequests() {
+ return mRequestBuilders.size();
+ }
+
+ /**
+ * Create a list of {@link RequestHolder} objects encapsulating the requests in this burst.
+ *
+ * @param frameNumber the starting framenumber for this burst.
+ * @return the list of {@link RequestHolder} objects.
+ */
+ public List<RequestHolder> produceRequestHolders(long frameNumber) {
+ ArrayList<RequestHolder> holders = new ArrayList<RequestHolder>();
+ int i = 0;
+ for (RequestHolder.Builder b : mRequestBuilders) {
+ holders.add(b.build(frameNumber + i));
+ ++i;
+ }
+ return holders;
+ }
+}
diff --git a/android/hardware/camera2/legacy/CameraDeviceState.java b/android/hardware/camera2/legacy/CameraDeviceState.java
new file mode 100644
index 00000000..89ecd5f1
--- /dev/null
+++ b/android/hardware/camera2/legacy/CameraDeviceState.java
@@ -0,0 +1,362 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.impl.CameraDeviceImpl;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.os.Handler;
+import android.util.Log;
+
+/**
+ * Emulates a the state of a single Camera2 device.
+ *
+ * <p>
+ * This class acts as the state machine for a camera device. Valid state transitions are given
+ * in the table below:
+ * </p>
+ *
+ * <ul>
+ * <li>{@code UNCONFIGURED -> CONFIGURING}</li>
+ * <li>{@code CONFIGURING -> IDLE}</li>
+ * <li>{@code IDLE -> CONFIGURING}</li>
+ * <li>{@code IDLE -> CAPTURING}</li>
+ * <li>{@code IDLE -> IDLE}</li>
+ * <li>{@code CAPTURING -> IDLE}</li>
+ * <li>{@code ANY -> ERROR}</li>
+ * </ul>
+ */
+public class CameraDeviceState {
+ private static final String TAG = "CameraDeviceState";
+ private static final boolean DEBUG = false;
+
+ private static final int STATE_ERROR = 0;
+ private static final int STATE_UNCONFIGURED = 1;
+ private static final int STATE_CONFIGURING = 2;
+ private static final int STATE_IDLE = 3;
+ private static final int STATE_CAPTURING = 4;
+
+ private static final String[] sStateNames = { "ERROR", "UNCONFIGURED", "CONFIGURING", "IDLE",
+ "CAPTURING"};
+
+ private int mCurrentState = STATE_UNCONFIGURED;
+ private int mCurrentError = NO_CAPTURE_ERROR;
+
+ private RequestHolder mCurrentRequest = null;
+
+ private Handler mCurrentHandler = null;
+ private CameraDeviceStateListener mCurrentListener = null;
+
+ /**
+ * Error code used by {@link #setCaptureStart} and {@link #setCaptureResult} to indicate that no
+ * error has occurred.
+ */
+ public static final int NO_CAPTURE_ERROR = -1;
+
+ /**
+ * CameraDeviceStateListener callbacks to be called after state transitions.
+ */
+ public interface CameraDeviceStateListener {
+ void onError(int errorCode, Object errorArg, RequestHolder holder);
+ void onConfiguring();
+ void onIdle();
+ void onBusy();
+ void onCaptureStarted(RequestHolder holder, long timestamp);
+ void onCaptureResult(CameraMetadataNative result, RequestHolder holder);
+ void onRequestQueueEmpty();
+ void onRepeatingRequestError(long lastFrameNumber, int repeatingRequestId);
+ }
+
+ /**
+ * Transition to the {@code ERROR} state.
+ *
+ * <p>
+ * The device cannot exit the {@code ERROR} state. If the device was not already in the
+ * {@code ERROR} state, {@link CameraDeviceStateListener#onError(int, RequestHolder)} will be
+ * called.
+ * </p>
+ *
+ * @param error the error to set. Should be one of the error codes defined in
+ * {@link CameraDeviceImpl.CameraDeviceCallbacks}.
+ */
+ public synchronized void setError(int error) {
+ mCurrentError = error;
+ doStateTransition(STATE_ERROR);
+ }
+
+ /**
+ * Transition to the {@code CONFIGURING} state, or {@code ERROR} if in an invalid state.
+ *
+ * <p>
+ * If the device was not already in the {@code CONFIGURING} state,
+ * {@link CameraDeviceStateListener#onConfiguring()} will be called.
+ * </p>
+ *
+ * @return {@code false} if an error has occurred.
+ */
+ public synchronized boolean setConfiguring() {
+ doStateTransition(STATE_CONFIGURING);
+ return mCurrentError == NO_CAPTURE_ERROR;
+ }
+
+ /**
+ * Transition to the {@code IDLE} state, or {@code ERROR} if in an invalid state.
+ *
+ * <p>
+ * If the device was not already in the {@code IDLE} state,
+ * {@link CameraDeviceStateListener#onIdle()} will be called.
+ * </p>
+ *
+ * @return {@code false} if an error has occurred.
+ */
+ public synchronized boolean setIdle() {
+ doStateTransition(STATE_IDLE);
+ return mCurrentError == NO_CAPTURE_ERROR;
+ }
+
+ /**
+ * Transition to the {@code CAPTURING} state, or {@code ERROR} if in an invalid state.
+ *
+ * <p>
+ * If the device was not already in the {@code CAPTURING} state,
+ * {@link CameraDeviceStateListener#onCaptureStarted(RequestHolder)} will be called.
+ * </p>
+ *
+ * @param request A {@link RequestHolder} containing the request for the current capture.
+ * @param timestamp The timestamp of the capture start in nanoseconds.
+ * @param captureError Report a recoverable error for a single request using a valid
+ * error code for {@code ICameraDeviceCallbacks}, or
+ * {@link #NO_CAPTURE_ERROR}
+ * @return {@code false} if an error has occurred.
+ */
+ public synchronized boolean setCaptureStart(final RequestHolder request, long timestamp,
+ int captureError) {
+ mCurrentRequest = request;
+ doStateTransition(STATE_CAPTURING, timestamp, captureError);
+ return mCurrentError == NO_CAPTURE_ERROR;
+ }
+
+ /**
+ * Set the result for a capture.
+ *
+ * <p>
+ * If the device was in the {@code CAPTURING} state,
+ * {@link CameraDeviceStateListener#onCaptureResult(CameraMetadataNative, RequestHolder)} will
+ * be called with the given result, otherwise this will result in the device transitioning to
+ * the {@code ERROR} state,
+ * </p>
+ *
+ * @param request The {@link RequestHolder} request that created this result.
+ * @param result The {@link CameraMetadataNative} result to set.
+ * @param captureError Report a recoverable error for a single buffer or result using a valid
+ * error code for {@code ICameraDeviceCallbacks}, or
+ * {@link #NO_CAPTURE_ERROR}.
+ * @param captureErrorArg An argument for some error captureError codes.
+ * @return {@code false} if an error has occurred.
+ */
+ public synchronized boolean setCaptureResult(final RequestHolder request,
+ final CameraMetadataNative result,
+ final int captureError, final Object captureErrorArg) {
+ if (mCurrentState != STATE_CAPTURING) {
+ Log.e(TAG, "Cannot receive result while in state: " + mCurrentState);
+ mCurrentError = CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE;
+ doStateTransition(STATE_ERROR);
+ return mCurrentError == NO_CAPTURE_ERROR;
+ }
+
+ if (mCurrentHandler != null && mCurrentListener != null) {
+ if (captureError != NO_CAPTURE_ERROR) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onError(captureError, captureErrorArg, request);
+ }
+ });
+ } else {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onCaptureResult(result, request);
+ }
+ });
+ }
+ }
+ return mCurrentError == NO_CAPTURE_ERROR;
+ }
+
+ public synchronized boolean setCaptureResult(final RequestHolder request,
+ final CameraMetadataNative result) {
+ return setCaptureResult(request, result, NO_CAPTURE_ERROR, /*errorArg*/null);
+ }
+
+ /**
+ * Set repeating request error.
+ *
+ * <p>Repeating request has been stopped due to an error such as abandoned output surfaces.</p>
+ *
+ * @param lastFrameNumber Frame number of the last repeating request before it is stopped.
+ * @param repeatingRequestId The ID of the repeating request being stopped
+ */
+ public synchronized void setRepeatingRequestError(final long lastFrameNumber,
+ final int repeatingRequestId) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onRepeatingRequestError(lastFrameNumber, repeatingRequestId);
+ }
+ });
+ }
+
+ /**
+ * Indicate that request queue (non-repeating) becomes empty.
+ *
+ * <p> Send notification that all non-repeating requests have been sent to camera device. </p>
+ */
+ public synchronized void setRequestQueueEmpty() {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onRequestQueueEmpty();
+ }
+ });
+ }
+
+ /**
+ * Set the listener for state transition callbacks.
+ *
+ * @param handler handler on which to call the callbacks.
+ * @param listener the {@link CameraDeviceStateListener} callbacks to call.
+ */
+ public synchronized void setCameraDeviceCallbacks(Handler handler,
+ CameraDeviceStateListener listener) {
+ mCurrentHandler = handler;
+ mCurrentListener = listener;
+ }
+
+ private void doStateTransition(int newState) {
+ doStateTransition(newState, /*timestamp*/0, NO_CAPTURE_ERROR);
+ }
+
+ private void doStateTransition(int newState, final long timestamp, final int error) {
+ if (newState != mCurrentState) {
+ String stateName = "UNKNOWN";
+ if (newState >= 0 && newState < sStateNames.length) {
+ stateName = sStateNames[newState];
+ }
+ Log.i(TAG, "Legacy camera service transitioning to state " + stateName);
+ }
+
+ // If we transitioned into a non-IDLE/non-ERROR state then mark the device as busy
+ if(newState != STATE_ERROR && newState != STATE_IDLE) {
+ if (mCurrentState != newState && mCurrentHandler != null &&
+ mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onBusy();
+ }
+ });
+ }
+ }
+
+ switch(newState) {
+ case STATE_ERROR:
+ if (mCurrentState != STATE_ERROR && mCurrentHandler != null &&
+ mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onError(mCurrentError, /*errorArg*/null, mCurrentRequest);
+ }
+ });
+ }
+ mCurrentState = STATE_ERROR;
+ break;
+ case STATE_CONFIGURING:
+ if (mCurrentState != STATE_UNCONFIGURED && mCurrentState != STATE_IDLE) {
+ Log.e(TAG, "Cannot call configure while in state: " + mCurrentState);
+ mCurrentError = CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE;
+ doStateTransition(STATE_ERROR);
+ break;
+ }
+ if (mCurrentState != STATE_CONFIGURING && mCurrentHandler != null &&
+ mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onConfiguring();
+ }
+ });
+ }
+ mCurrentState = STATE_CONFIGURING;
+ break;
+ case STATE_IDLE:
+ if (mCurrentState == STATE_IDLE) {
+ break;
+ }
+
+ if (mCurrentState != STATE_CONFIGURING && mCurrentState != STATE_CAPTURING) {
+ Log.e(TAG, "Cannot call idle while in state: " + mCurrentState);
+ mCurrentError = CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE;
+ doStateTransition(STATE_ERROR);
+ break;
+ }
+
+ if (mCurrentState != STATE_IDLE && mCurrentHandler != null &&
+ mCurrentListener != null) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onIdle();
+ }
+ });
+ }
+ mCurrentState = STATE_IDLE;
+ break;
+ case STATE_CAPTURING:
+ if (mCurrentState != STATE_IDLE && mCurrentState != STATE_CAPTURING) {
+ Log.e(TAG, "Cannot call capture while in state: " + mCurrentState);
+ mCurrentError = CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE;
+ doStateTransition(STATE_ERROR);
+ break;
+ }
+
+ if (mCurrentHandler != null && mCurrentListener != null) {
+ if (error != NO_CAPTURE_ERROR) {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onError(error, /*errorArg*/null, mCurrentRequest);
+ }
+ });
+ } else {
+ mCurrentHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mCurrentListener.onCaptureStarted(mCurrentRequest, timestamp);
+ }
+ });
+ }
+ }
+ mCurrentState = STATE_CAPTURING;
+ break;
+ default:
+ throw new IllegalStateException("Transition to unknown state: " + newState);
+ }
+ }
+
+
+}
diff --git a/android/hardware/camera2/legacy/CameraDeviceUserShim.java b/android/hardware/camera2/legacy/CameraDeviceUserShim.java
new file mode 100644
index 00000000..49d4096e
--- /dev/null
+++ b/android/hardware/camera2/legacy/CameraDeviceUserShim.java
@@ -0,0 +1,727 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.ICameraService;
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.ICameraDeviceCallbacks;
+import android.hardware.camera2.ICameraDeviceUser;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.impl.CaptureResultExtras;
+import android.hardware.camera2.params.OutputConfiguration;
+import android.hardware.camera2.utils.SubmitInfo;
+import android.os.ConditionVariable;
+import android.os.IBinder;
+import android.os.Looper;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Message;
+import android.os.RemoteException;
+import android.os.ServiceSpecificException;
+import android.util.Log;
+import android.util.SparseArray;
+import android.view.Surface;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static android.system.OsConstants.EACCES;
+import static android.system.OsConstants.ENODEV;
+
+/**
+ * Compatibility implementation of the Camera2 API binder interface.
+ *
+ * <p>
+ * This is intended to be called from the same process as client
+ * {@link android.hardware.camera2.CameraDevice}, and wraps a
+ * {@link android.hardware.camera2.legacy.LegacyCameraDevice} that emulates Camera2 service using
+ * the Camera1 API.
+ * </p>
+ *
+ * <p>
+ * Keep up to date with ICameraDeviceUser.aidl.
+ * </p>
+ */
+@SuppressWarnings("deprecation")
+public class CameraDeviceUserShim implements ICameraDeviceUser {
+ private static final String TAG = "CameraDeviceUserShim";
+
+ private static final boolean DEBUG = false;
+ private static final int OPEN_CAMERA_TIMEOUT_MS = 5000; // 5 sec (same as api1 cts timeout)
+
+ private final LegacyCameraDevice mLegacyDevice;
+
+ private final Object mConfigureLock = new Object();
+ private int mSurfaceIdCounter;
+ private boolean mConfiguring;
+ private final SparseArray<Surface> mSurfaces;
+ private final CameraCharacteristics mCameraCharacteristics;
+ private final CameraLooper mCameraInit;
+ private final CameraCallbackThread mCameraCallbacks;
+
+
+ protected CameraDeviceUserShim(int cameraId, LegacyCameraDevice legacyCamera,
+ CameraCharacteristics characteristics, CameraLooper cameraInit,
+ CameraCallbackThread cameraCallbacks) {
+ mLegacyDevice = legacyCamera;
+ mConfiguring = false;
+ mSurfaces = new SparseArray<Surface>();
+ mCameraCharacteristics = characteristics;
+ mCameraInit = cameraInit;
+ mCameraCallbacks = cameraCallbacks;
+
+ mSurfaceIdCounter = 0;
+ }
+
+ private static int translateErrorsFromCamera1(int errorCode) {
+ if (errorCode == -EACCES) {
+ return ICameraService.ERROR_PERMISSION_DENIED;
+ }
+
+ return errorCode;
+ }
+
+ /**
+ * Create a separate looper/thread for the camera to run on; open the camera.
+ *
+ * <p>Since the camera automatically latches on to the current thread's looper,
+ * it's important that we have our own thread with our own looper to guarantee
+ * that the camera callbacks get correctly posted to our own thread.</p>
+ */
+ private static class CameraLooper implements Runnable, AutoCloseable {
+ private final int mCameraId;
+ private Looper mLooper;
+ private volatile int mInitErrors;
+ private final Camera mCamera = Camera.openUninitialized();
+ private final ConditionVariable mStartDone = new ConditionVariable();
+ private final Thread mThread;
+
+ /**
+ * Spin up a new thread, immediately open the camera in the background.
+ *
+ * <p>Use {@link #waitForOpen} to block until the camera is finished opening.</p>
+ *
+ * @param cameraId numeric camera Id
+ *
+ * @see #waitForOpen
+ */
+ public CameraLooper(int cameraId) {
+ mCameraId = cameraId;
+
+ mThread = new Thread(this);
+ mThread.start();
+ }
+
+ public Camera getCamera() {
+ return mCamera;
+ }
+
+ @Override
+ public void run() {
+ // Set up a looper to be used by camera.
+ Looper.prepare();
+
+ // Save the looper so that we can terminate this thread
+ // after we are done with it.
+ mLooper = Looper.myLooper();
+ mInitErrors = mCamera.cameraInitUnspecified(mCameraId);
+ mStartDone.open();
+ Looper.loop(); // Blocks forever until #close is called.
+ }
+
+ /**
+ * Quit the looper safely; then join until the thread shuts down.
+ */
+ @Override
+ public void close() {
+ if (mLooper == null) {
+ return;
+ }
+
+ mLooper.quitSafely();
+ try {
+ mThread.join();
+ } catch (InterruptedException e) {
+ throw new AssertionError(e);
+ }
+
+ mLooper = null;
+ }
+
+ /**
+ * Block until the camera opens; then return its initialization error code (if any).
+ *
+ * @param timeoutMs timeout in milliseconds
+ *
+ * @return int error code
+ *
+ * @throws ServiceSpecificException if the camera open times out with ({@code CAMERA_ERROR})
+ */
+ public int waitForOpen(int timeoutMs) {
+ // Block until the camera is open asynchronously
+ if (!mStartDone.block(timeoutMs)) {
+ Log.e(TAG, "waitForOpen - Camera failed to open after timeout of "
+ + OPEN_CAMERA_TIMEOUT_MS + " ms");
+ try {
+ mCamera.release();
+ } catch (RuntimeException e) {
+ Log.e(TAG, "connectBinderShim - Failed to release camera after timeout ", e);
+ }
+
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION);
+ }
+
+ return mInitErrors;
+ }
+ }
+
+ /**
+ * A thread to process callbacks to send back to the camera client.
+ *
+ * <p>This effectively emulates one-way binder semantics when in the same process as the
+ * callee.</p>
+ */
+ private static class CameraCallbackThread implements ICameraDeviceCallbacks {
+ private static final int CAMERA_ERROR = 0;
+ private static final int CAMERA_IDLE = 1;
+ private static final int CAPTURE_STARTED = 2;
+ private static final int RESULT_RECEIVED = 3;
+ private static final int PREPARED = 4;
+ private static final int REPEATING_REQUEST_ERROR = 5;
+ private static final int REQUEST_QUEUE_EMPTY = 6;
+
+ private final HandlerThread mHandlerThread;
+ private Handler mHandler;
+
+ private final ICameraDeviceCallbacks mCallbacks;
+
+ public CameraCallbackThread(ICameraDeviceCallbacks callbacks) {
+ mCallbacks = callbacks;
+
+ mHandlerThread = new HandlerThread("LegacyCameraCallback");
+ mHandlerThread.start();
+ }
+
+ public void close() {
+ mHandlerThread.quitSafely();
+ }
+
+ @Override
+ public void onDeviceError(final int errorCode, final CaptureResultExtras resultExtras) {
+ Message msg = getHandler().obtainMessage(CAMERA_ERROR,
+ /*arg1*/ errorCode, /*arg2*/ 0,
+ /*obj*/ resultExtras);
+ getHandler().sendMessage(msg);
+ }
+
+ @Override
+ public void onDeviceIdle() {
+ Message msg = getHandler().obtainMessage(CAMERA_IDLE);
+ getHandler().sendMessage(msg);
+ }
+
+ @Override
+ public void onCaptureStarted(final CaptureResultExtras resultExtras, final long timestamp) {
+ Message msg = getHandler().obtainMessage(CAPTURE_STARTED,
+ /*arg1*/ (int) (timestamp & 0xFFFFFFFFL),
+ /*arg2*/ (int) ( (timestamp >> 32) & 0xFFFFFFFFL),
+ /*obj*/ resultExtras);
+ getHandler().sendMessage(msg);
+ }
+
+ @Override
+ public void onResultReceived(final CameraMetadataNative result,
+ final CaptureResultExtras resultExtras) {
+ Object[] resultArray = new Object[] { result, resultExtras };
+ Message msg = getHandler().obtainMessage(RESULT_RECEIVED,
+ /*obj*/ resultArray);
+ getHandler().sendMessage(msg);
+ }
+
+ @Override
+ public void onPrepared(int streamId) {
+ Message msg = getHandler().obtainMessage(PREPARED,
+ /*arg1*/ streamId, /*arg2*/ 0);
+ getHandler().sendMessage(msg);
+ }
+
+ @Override
+ public void onRepeatingRequestError(long lastFrameNumber, int repeatingRequestId) {
+ Object[] objArray = new Object[] { lastFrameNumber, repeatingRequestId };
+ Message msg = getHandler().obtainMessage(REPEATING_REQUEST_ERROR,
+ /*obj*/ objArray);
+ getHandler().sendMessage(msg);
+ }
+
+ @Override
+ public void onRequestQueueEmpty() {
+ Message msg = getHandler().obtainMessage(REQUEST_QUEUE_EMPTY,
+ /* arg1 */ 0, /* arg2 */ 0);
+ getHandler().sendMessage(msg);
+ }
+
+ @Override
+ public IBinder asBinder() {
+ // This is solely intended to be used for in-process binding.
+ return null;
+ }
+
+ private Handler getHandler() {
+ if (mHandler == null) {
+ mHandler = new CallbackHandler(mHandlerThread.getLooper());
+ }
+ return mHandler;
+ }
+
+ private class CallbackHandler extends Handler {
+ public CallbackHandler(Looper l) {
+ super(l);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ try {
+ switch (msg.what) {
+ case CAMERA_ERROR: {
+ int errorCode = msg.arg1;
+ CaptureResultExtras resultExtras = (CaptureResultExtras) msg.obj;
+ mCallbacks.onDeviceError(errorCode, resultExtras);
+ break;
+ }
+ case CAMERA_IDLE:
+ mCallbacks.onDeviceIdle();
+ break;
+ case CAPTURE_STARTED: {
+ long timestamp = msg.arg2 & 0xFFFFFFFFL;
+ timestamp = (timestamp << 32) | (msg.arg1 & 0xFFFFFFFFL);
+ CaptureResultExtras resultExtras = (CaptureResultExtras) msg.obj;
+ mCallbacks.onCaptureStarted(resultExtras, timestamp);
+ break;
+ }
+ case RESULT_RECEIVED: {
+ Object[] resultArray = (Object[]) msg.obj;
+ CameraMetadataNative result = (CameraMetadataNative) resultArray[0];
+ CaptureResultExtras resultExtras = (CaptureResultExtras) resultArray[1];
+ mCallbacks.onResultReceived(result, resultExtras);
+ break;
+ }
+ case PREPARED: {
+ int streamId = msg.arg1;
+ mCallbacks.onPrepared(streamId);
+ break;
+ }
+ case REPEATING_REQUEST_ERROR: {
+ Object[] objArray = (Object[]) msg.obj;
+ long lastFrameNumber = (Long) objArray[0];
+ int repeatingRequestId = (Integer) objArray[1];
+ mCallbacks.onRepeatingRequestError(lastFrameNumber, repeatingRequestId);
+ break;
+ }
+ case REQUEST_QUEUE_EMPTY: {
+ mCallbacks.onRequestQueueEmpty();
+ break;
+ }
+ default:
+ throw new IllegalArgumentException(
+ "Unknown callback message " + msg.what);
+ }
+ } catch (RemoteException e) {
+ throw new IllegalStateException(
+ "Received remote exception during camera callback " + msg.what, e);
+ }
+ }
+ }
+ }
+
+ public static CameraDeviceUserShim connectBinderShim(ICameraDeviceCallbacks callbacks,
+ int cameraId) {
+ if (DEBUG) {
+ Log.d(TAG, "Opening shim Camera device");
+ }
+
+ /*
+ * Put the camera open on a separate thread with its own looper; otherwise
+ * if the main thread is used then the callbacks might never get delivered
+ * (e.g. in CTS which run its own default looper only after tests)
+ */
+
+ CameraLooper init = new CameraLooper(cameraId);
+
+ CameraCallbackThread threadCallbacks = new CameraCallbackThread(callbacks);
+
+ // TODO: Make this async instead of blocking
+ int initErrors = init.waitForOpen(OPEN_CAMERA_TIMEOUT_MS);
+ Camera legacyCamera = init.getCamera();
+
+ // Check errors old HAL initialization
+ LegacyExceptionUtils.throwOnServiceError(initErrors);
+
+ // Disable shutter sounds (this will work unconditionally) for api2 clients
+ legacyCamera.disableShutterSound();
+
+ CameraInfo info = new CameraInfo();
+ Camera.getCameraInfo(cameraId, info);
+
+ Camera.Parameters legacyParameters = null;
+ try {
+ legacyParameters = legacyCamera.getParameters();
+ } catch (RuntimeException e) {
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION,
+ "Unable to get initial parameters: " + e.getMessage());
+ }
+
+ CameraCharacteristics characteristics =
+ LegacyMetadataMapper.createCharacteristics(legacyParameters, info);
+ LegacyCameraDevice device = new LegacyCameraDevice(
+ cameraId, legacyCamera, characteristics, threadCallbacks);
+ return new CameraDeviceUserShim(cameraId, device, characteristics, init, threadCallbacks);
+ }
+
+ @Override
+ public void disconnect() {
+ if (DEBUG) {
+ Log.d(TAG, "disconnect called.");
+ }
+
+ if (mLegacyDevice.isClosed()) {
+ Log.w(TAG, "Cannot disconnect, device has already been closed.");
+ }
+
+ try {
+ mLegacyDevice.close();
+ } finally {
+ mCameraInit.close();
+ mCameraCallbacks.close();
+ }
+ }
+
+ @Override
+ public SubmitInfo submitRequest(CaptureRequest request, boolean streaming) {
+ if (DEBUG) {
+ Log.d(TAG, "submitRequest called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot submit request, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ String err = "Cannot submit request, configuration change in progress.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ }
+ return mLegacyDevice.submitRequest(request, streaming);
+ }
+
+ @Override
+ public SubmitInfo submitRequestList(CaptureRequest[] request, boolean streaming) {
+ if (DEBUG) {
+ Log.d(TAG, "submitRequestList called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot submit request list, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ String err = "Cannot submit request, configuration change in progress.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ }
+ return mLegacyDevice.submitRequestList(request, streaming);
+ }
+
+ @Override
+ public long cancelRequest(int requestId) {
+ if (DEBUG) {
+ Log.d(TAG, "cancelRequest called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot cancel request, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ String err = "Cannot cancel request, configuration change in progress.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ }
+ return mLegacyDevice.cancelRequest(requestId);
+ }
+
+ @Override
+ public void beginConfigure() {
+ if (DEBUG) {
+ Log.d(TAG, "beginConfigure called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot begin configure, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ String err = "Cannot begin configure, configuration change already in progress.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ mConfiguring = true;
+ }
+ }
+
+ @Override
+ public void endConfigure(int operatingMode) {
+ if (DEBUG) {
+ Log.d(TAG, "endConfigure called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot end configure, device has been closed.";
+ Log.e(TAG, err);
+ synchronized(mConfigureLock) {
+ mConfiguring = false;
+ }
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ if (operatingMode != ICameraDeviceUser.NORMAL_MODE) {
+ String err = "LEGACY devices do not support this operating mode";
+ Log.e(TAG, err);
+ synchronized(mConfigureLock) {
+ mConfiguring = false;
+ }
+ throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
+ }
+
+ SparseArray<Surface> surfaces = null;
+ synchronized(mConfigureLock) {
+ if (!mConfiguring) {
+ String err = "Cannot end configure, no configuration change in progress.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ if (mSurfaces != null) {
+ surfaces = mSurfaces.clone();
+ }
+ mConfiguring = false;
+ }
+ mLegacyDevice.configureOutputs(surfaces);
+ }
+
+ @Override
+ public void deleteStream(int streamId) {
+ if (DEBUG) {
+ Log.d(TAG, "deleteStream called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot delete stream, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ synchronized(mConfigureLock) {
+ if (!mConfiguring) {
+ String err = "Cannot delete stream, no configuration change in progress.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ int index = mSurfaces.indexOfKey(streamId);
+ if (index < 0) {
+ String err = "Cannot delete stream, stream id " + streamId + " doesn't exist.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
+ }
+ mSurfaces.removeAt(index);
+ }
+ }
+
+ @Override
+ public int createStream(OutputConfiguration outputConfiguration) {
+ if (DEBUG) {
+ Log.d(TAG, "createStream called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot create stream, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ synchronized(mConfigureLock) {
+ if (!mConfiguring) {
+ String err = "Cannot create stream, beginConfigure hasn't been called yet.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ if (outputConfiguration.getRotation() != OutputConfiguration.ROTATION_0) {
+ String err = "Cannot create stream, stream rotation is not supported.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
+ }
+ int id = ++mSurfaceIdCounter;
+ mSurfaces.put(id, outputConfiguration.getSurface());
+ return id;
+ }
+ }
+
+ @Override
+ public void finalizeOutputConfigurations(int steamId, OutputConfiguration config) {
+ String err = "Finalizing output configuration is not supported on legacy devices";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+
+ @Override
+ public int createInputStream(int width, int height, int format) {
+ String err = "Creating input stream is not supported on legacy devices";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+
+ @Override
+ public Surface getInputSurface() {
+ String err = "Getting input surface is not supported on legacy devices";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+
+ @Override
+ public CameraMetadataNative createDefaultRequest(int templateId) {
+ if (DEBUG) {
+ Log.d(TAG, "createDefaultRequest called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot create default request, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ CameraMetadataNative template;
+ try {
+ template =
+ LegacyMetadataMapper.createRequestTemplate(mCameraCharacteristics, templateId);
+ } catch (IllegalArgumentException e) {
+ String err = "createDefaultRequest - invalid templateId specified";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_ILLEGAL_ARGUMENT, err);
+ }
+
+ return template;
+ }
+
+ @Override
+ public CameraMetadataNative getCameraInfo() {
+ if (DEBUG) {
+ Log.d(TAG, "getCameraInfo called.");
+ }
+ // TODO: implement getCameraInfo.
+ Log.e(TAG, "getCameraInfo unimplemented.");
+ return null;
+ }
+
+ @Override
+ public void waitUntilIdle() throws RemoteException {
+ if (DEBUG) {
+ Log.d(TAG, "waitUntilIdle called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot wait until idle, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ String err = "Cannot wait until idle, configuration change in progress.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ }
+ mLegacyDevice.waitUntilIdle();
+ }
+
+ @Override
+ public long flush() {
+ if (DEBUG) {
+ Log.d(TAG, "flush called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot flush, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ synchronized(mConfigureLock) {
+ if (mConfiguring) {
+ String err = "Cannot flush, configuration change in progress.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_INVALID_OPERATION, err);
+ }
+ }
+ return mLegacyDevice.flush();
+ }
+
+ public void prepare(int streamId) {
+ if (DEBUG) {
+ Log.d(TAG, "prepare called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot prepare stream, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ // LEGACY doesn't support actual prepare, just signal success right away
+ mCameraCallbacks.onPrepared(streamId);
+ }
+
+ public void prepare2(int maxCount, int streamId) {
+ // We don't support this in LEGACY mode.
+ prepare(streamId);
+ }
+
+ public void tearDown(int streamId) {
+ if (DEBUG) {
+ Log.d(TAG, "tearDown called.");
+ }
+ if (mLegacyDevice.isClosed()) {
+ String err = "Cannot tear down stream, device has been closed.";
+ Log.e(TAG, err);
+ throw new ServiceSpecificException(ICameraService.ERROR_DISCONNECTED, err);
+ }
+
+ // LEGACY doesn't support actual teardown, so just a no-op
+ }
+
+ @Override
+ public IBinder asBinder() {
+ // This is solely intended to be used for in-process binding.
+ return null;
+ }
+}
diff --git a/android/hardware/camera2/legacy/CaptureCollector.java b/android/hardware/camera2/legacy/CaptureCollector.java
new file mode 100644
index 00000000..113927c4
--- /dev/null
+++ b/android/hardware/camera2/legacy/CaptureCollector.java
@@ -0,0 +1,673 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.impl.CameraDeviceImpl;
+import android.util.Log;
+import android.util.MutableLong;
+import android.util.Pair;
+import android.view.Surface;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.TreeSet;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Collect timestamps and state for each {@link CaptureRequest} as it passes through
+ * the Legacy camera pipeline.
+ */
+public class CaptureCollector {
+ private static final String TAG = "CaptureCollector";
+
+ private static final boolean DEBUG = false;
+
+ private static final int FLAG_RECEIVED_JPEG = 1;
+ private static final int FLAG_RECEIVED_JPEG_TS = 2;
+ private static final int FLAG_RECEIVED_PREVIEW = 4;
+ private static final int FLAG_RECEIVED_PREVIEW_TS = 8;
+ private static final int FLAG_RECEIVED_ALL_JPEG = FLAG_RECEIVED_JPEG | FLAG_RECEIVED_JPEG_TS;
+ private static final int FLAG_RECEIVED_ALL_PREVIEW = FLAG_RECEIVED_PREVIEW |
+ FLAG_RECEIVED_PREVIEW_TS;
+
+ private static final int MAX_JPEGS_IN_FLIGHT = 1;
+
+ private class CaptureHolder implements Comparable<CaptureHolder>{
+ private final RequestHolder mRequest;
+ private final LegacyRequest mLegacy;
+ public final boolean needsJpeg;
+ public final boolean needsPreview;
+
+ private long mTimestamp = 0;
+ private int mReceivedFlags = 0;
+ private boolean mHasStarted = false;
+ private boolean mFailedJpeg = false;
+ private boolean mFailedPreview = false;
+ private boolean mCompleted = false;
+ private boolean mPreviewCompleted = false;
+
+ public CaptureHolder(RequestHolder request, LegacyRequest legacyHolder) {
+ mRequest = request;
+ mLegacy = legacyHolder;
+ needsJpeg = request.hasJpegTargets();
+ needsPreview = request.hasPreviewTargets();
+ }
+
+ public boolean isPreviewCompleted() {
+ return (mReceivedFlags & FLAG_RECEIVED_ALL_PREVIEW) == FLAG_RECEIVED_ALL_PREVIEW;
+ }
+
+ public boolean isJpegCompleted() {
+ return (mReceivedFlags & FLAG_RECEIVED_ALL_JPEG) == FLAG_RECEIVED_ALL_JPEG;
+ }
+
+ public boolean isCompleted() {
+ return (needsJpeg == isJpegCompleted()) && (needsPreview == isPreviewCompleted());
+ }
+
+ public void tryComplete() {
+ if (!mPreviewCompleted && needsPreview && isPreviewCompleted()) {
+ CaptureCollector.this.onPreviewCompleted();
+ mPreviewCompleted = true;
+ }
+
+ if (isCompleted() && !mCompleted) {
+ if (mFailedPreview || mFailedJpeg) {
+ if (!mHasStarted) {
+ // Send a request error if the capture has not yet started.
+ mRequest.failRequest();
+ CaptureCollector.this.mDeviceState.setCaptureStart(mRequest, mTimestamp,
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_REQUEST);
+ } else {
+ // Send buffer dropped errors for each pending buffer if the request has
+ // started.
+ for (Surface targetSurface : mRequest.getRequest().getTargets() ) {
+ try {
+ if (mRequest.jpegType(targetSurface)) {
+ if (mFailedJpeg) {
+ CaptureCollector.this.mDeviceState.setCaptureResult(mRequest,
+ /*result*/null,
+ CameraDeviceImpl.CameraDeviceCallbacks.
+ ERROR_CAMERA_BUFFER,
+ targetSurface);
+ }
+ } else {
+ // preview buffer
+ if (mFailedPreview) {
+ CaptureCollector.this.mDeviceState.setCaptureResult(mRequest,
+ /*result*/null,
+ CameraDeviceImpl.CameraDeviceCallbacks.
+ ERROR_CAMERA_BUFFER,
+ targetSurface);
+ }
+ }
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.e(TAG, "Unexpected exception when querying Surface: " + e);
+ }
+ }
+ }
+ }
+ CaptureCollector.this.onRequestCompleted(CaptureHolder.this);
+ mCompleted = true;
+ }
+ }
+
+ public void setJpegTimestamp(long timestamp) {
+ if (DEBUG) {
+ Log.d(TAG, "setJpegTimestamp - called for request " + mRequest.getRequestId());
+ }
+ if (!needsJpeg) {
+ throw new IllegalStateException(
+ "setJpegTimestamp called for capture with no jpeg targets.");
+ }
+ if (isCompleted()) {
+ throw new IllegalStateException(
+ "setJpegTimestamp called on already completed request.");
+ }
+
+ mReceivedFlags |= FLAG_RECEIVED_JPEG_TS;
+
+ if (mTimestamp == 0) {
+ mTimestamp = timestamp;
+ }
+
+ if (!mHasStarted) {
+ mHasStarted = true;
+ CaptureCollector.this.mDeviceState.setCaptureStart(mRequest, mTimestamp,
+ CameraDeviceState.NO_CAPTURE_ERROR);
+ }
+
+ tryComplete();
+ }
+
+ public void setJpegProduced() {
+ if (DEBUG) {
+ Log.d(TAG, "setJpegProduced - called for request " + mRequest.getRequestId());
+ }
+ if (!needsJpeg) {
+ throw new IllegalStateException(
+ "setJpegProduced called for capture with no jpeg targets.");
+ }
+ if (isCompleted()) {
+ throw new IllegalStateException(
+ "setJpegProduced called on already completed request.");
+ }
+
+ mReceivedFlags |= FLAG_RECEIVED_JPEG;
+ tryComplete();
+ }
+
+ public void setJpegFailed() {
+ if (DEBUG) {
+ Log.d(TAG, "setJpegFailed - called for request " + mRequest.getRequestId());
+ }
+ if (!needsJpeg || isJpegCompleted()) {
+ return;
+ }
+ mFailedJpeg = true;
+
+ mReceivedFlags |= FLAG_RECEIVED_JPEG;
+ mReceivedFlags |= FLAG_RECEIVED_JPEG_TS;
+ tryComplete();
+ }
+
+ public void setPreviewTimestamp(long timestamp) {
+ if (DEBUG) {
+ Log.d(TAG, "setPreviewTimestamp - called for request " + mRequest.getRequestId());
+ }
+ if (!needsPreview) {
+ throw new IllegalStateException(
+ "setPreviewTimestamp called for capture with no preview targets.");
+ }
+ if (isCompleted()) {
+ throw new IllegalStateException(
+ "setPreviewTimestamp called on already completed request.");
+ }
+
+ mReceivedFlags |= FLAG_RECEIVED_PREVIEW_TS;
+
+ if (mTimestamp == 0) {
+ mTimestamp = timestamp;
+ }
+
+ if (!needsJpeg) {
+ if (!mHasStarted) {
+ mHasStarted = true;
+ CaptureCollector.this.mDeviceState.setCaptureStart(mRequest, mTimestamp,
+ CameraDeviceState.NO_CAPTURE_ERROR);
+ }
+ }
+
+ tryComplete();
+ }
+
+ public void setPreviewProduced() {
+ if (DEBUG) {
+ Log.d(TAG, "setPreviewProduced - called for request " + mRequest.getRequestId());
+ }
+ if (!needsPreview) {
+ throw new IllegalStateException(
+ "setPreviewProduced called for capture with no preview targets.");
+ }
+ if (isCompleted()) {
+ throw new IllegalStateException(
+ "setPreviewProduced called on already completed request.");
+ }
+
+ mReceivedFlags |= FLAG_RECEIVED_PREVIEW;
+ tryComplete();
+ }
+
+ public void setPreviewFailed() {
+ if (DEBUG) {
+ Log.d(TAG, "setPreviewFailed - called for request " + mRequest.getRequestId());
+ }
+ if (!needsPreview || isPreviewCompleted()) {
+ return;
+ }
+ mFailedPreview = true;
+
+ mReceivedFlags |= FLAG_RECEIVED_PREVIEW;
+ mReceivedFlags |= FLAG_RECEIVED_PREVIEW_TS;
+ tryComplete();
+ }
+
+ // Comparison and equals based on frame number.
+ @Override
+ public int compareTo(CaptureHolder captureHolder) {
+ return (mRequest.getFrameNumber() > captureHolder.mRequest.getFrameNumber()) ? 1 :
+ ((mRequest.getFrameNumber() == captureHolder.mRequest.getFrameNumber()) ? 0 :
+ -1);
+ }
+
+ // Comparison and equals based on frame number.
+ @Override
+ public boolean equals(Object o) {
+ return o instanceof CaptureHolder && compareTo((CaptureHolder) o) == 0;
+ }
+ }
+
+ private final TreeSet<CaptureHolder> mActiveRequests;
+ private final ArrayDeque<CaptureHolder> mJpegCaptureQueue;
+ private final ArrayDeque<CaptureHolder> mJpegProduceQueue;
+ private final ArrayDeque<CaptureHolder> mPreviewCaptureQueue;
+ private final ArrayDeque<CaptureHolder> mPreviewProduceQueue;
+ private final ArrayList<CaptureHolder> mCompletedRequests = new ArrayList<>();
+
+ private final ReentrantLock mLock = new ReentrantLock();
+ private final Condition mIsEmpty;
+ private final Condition mPreviewsEmpty;
+ private final Condition mNotFull;
+ private final CameraDeviceState mDeviceState;
+ private int mInFlight = 0;
+ private int mInFlightPreviews = 0;
+ private final int mMaxInFlight;
+
+ /**
+ * Create a new {@link CaptureCollector} that can modify the given {@link CameraDeviceState}.
+ *
+ * @param maxInFlight max allowed in-flight requests.
+ * @param deviceState the {@link CameraDeviceState} to update as requests are processed.
+ */
+ public CaptureCollector(int maxInFlight, CameraDeviceState deviceState) {
+ mMaxInFlight = maxInFlight;
+ mJpegCaptureQueue = new ArrayDeque<>(MAX_JPEGS_IN_FLIGHT);
+ mJpegProduceQueue = new ArrayDeque<>(MAX_JPEGS_IN_FLIGHT);
+ mPreviewCaptureQueue = new ArrayDeque<>(mMaxInFlight);
+ mPreviewProduceQueue = new ArrayDeque<>(mMaxInFlight);
+ mActiveRequests = new TreeSet<>();
+ mIsEmpty = mLock.newCondition();
+ mNotFull = mLock.newCondition();
+ mPreviewsEmpty = mLock.newCondition();
+ mDeviceState = deviceState;
+ }
+
+ /**
+ * Queue a new request.
+ *
+ * <p>
+ * For requests that use the Camera1 API preview output stream, this will block if there are
+ * already {@code maxInFlight} requests in progress (until at least one prior request has
+ * completed). For requests that use the Camera1 API jpeg callbacks, this will block until
+ * all prior requests have been completed to avoid stopping preview for
+ * {@link android.hardware.Camera#takePicture} before prior preview requests have been
+ * completed.
+ * </p>
+ * @param holder the {@link RequestHolder} for this request.
+ * @param legacy the {@link LegacyRequest} for this request; this will not be mutated.
+ * @param timeout a timeout to use for this call.
+ * @param unit the units to use for the timeout.
+ * @return {@code false} if this method timed out.
+ * @throws InterruptedException if this thread is interrupted.
+ */
+ public boolean queueRequest(RequestHolder holder, LegacyRequest legacy, long timeout,
+ TimeUnit unit)
+ throws InterruptedException {
+ CaptureHolder h = new CaptureHolder(holder, legacy);
+ long nanos = unit.toNanos(timeout);
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ if (DEBUG) {
+ Log.d(TAG, "queueRequest for request " + holder.getRequestId() +
+ " - " + mInFlight + " requests remain in flight.");
+ }
+
+ if (!(h.needsJpeg || h.needsPreview)) {
+ throw new IllegalStateException("Request must target at least one output surface!");
+ }
+
+ if (h.needsJpeg) {
+ // Wait for all current requests to finish before queueing jpeg.
+ while (mInFlight > 0) {
+ if (nanos <= 0) {
+ return false;
+ }
+ nanos = mIsEmpty.awaitNanos(nanos);
+ }
+ mJpegCaptureQueue.add(h);
+ mJpegProduceQueue.add(h);
+ }
+ if (h.needsPreview) {
+ while (mInFlight >= mMaxInFlight) {
+ if (nanos <= 0) {
+ return false;
+ }
+ nanos = mNotFull.awaitNanos(nanos);
+ }
+ mPreviewCaptureQueue.add(h);
+ mPreviewProduceQueue.add(h);
+ mInFlightPreviews++;
+ }
+ mActiveRequests.add(h);
+
+ mInFlight++;
+ return true;
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Wait all queued requests to complete.
+ *
+ * @param timeout a timeout to use for this call.
+ * @param unit the units to use for the timeout.
+ * @return {@code false} if this method timed out.
+ * @throws InterruptedException if this thread is interrupted.
+ */
+ public boolean waitForEmpty(long timeout, TimeUnit unit) throws InterruptedException {
+ long nanos = unit.toNanos(timeout);
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ while (mInFlight > 0) {
+ if (nanos <= 0) {
+ return false;
+ }
+ nanos = mIsEmpty.awaitNanos(nanos);
+ }
+ return true;
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Wait all queued requests that use the Camera1 API preview output to complete.
+ *
+ * @param timeout a timeout to use for this call.
+ * @param unit the units to use for the timeout.
+ * @return {@code false} if this method timed out.
+ * @throws InterruptedException if this thread is interrupted.
+ */
+ public boolean waitForPreviewsEmpty(long timeout, TimeUnit unit) throws InterruptedException {
+ long nanos = unit.toNanos(timeout);
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ while (mInFlightPreviews > 0) {
+ if (nanos <= 0) {
+ return false;
+ }
+ nanos = mPreviewsEmpty.awaitNanos(nanos);
+ }
+ return true;
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Wait for the specified request to be completed (all buffers available).
+ *
+ * <p>May not wait for the same request more than once, since a successful wait
+ * will erase the history of that request.</p>
+ *
+ * @param holder the {@link RequestHolder} for this request.
+ * @param timeout a timeout to use for this call.
+ * @param unit the units to use for the timeout.
+ * @param timestamp the timestamp of the request will be written out to here, in ns
+ *
+ * @return {@code false} if this method timed out.
+ *
+ * @throws InterruptedException if this thread is interrupted.
+ */
+ public boolean waitForRequestCompleted(RequestHolder holder, long timeout, TimeUnit unit,
+ MutableLong timestamp)
+ throws InterruptedException {
+ long nanos = unit.toNanos(timeout);
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ while (!removeRequestIfCompleted(holder, /*out*/timestamp)) {
+ if (nanos <= 0) {
+ return false;
+ }
+ nanos = mNotFull.awaitNanos(nanos);
+ }
+ return true;
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ private boolean removeRequestIfCompleted(RequestHolder holder, MutableLong timestamp) {
+ int i = 0;
+ for (CaptureHolder h : mCompletedRequests) {
+ if (h.mRequest.equals(holder)) {
+ timestamp.value = h.mTimestamp;
+ mCompletedRequests.remove(i);
+ return true;
+ }
+ i++;
+ }
+
+ return false;
+ }
+
+ /**
+ * Called to alert the {@link CaptureCollector} that the jpeg capture has begun.
+ *
+ * @param timestamp the time of the jpeg capture.
+ * @return the {@link RequestHolder} for the request associated with this capture.
+ */
+ public RequestHolder jpegCaptured(long timestamp) {
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ CaptureHolder h = mJpegCaptureQueue.poll();
+ if (h == null) {
+ Log.w(TAG, "jpegCaptured called with no jpeg request on queue!");
+ return null;
+ }
+ h.setJpegTimestamp(timestamp);
+ return h.mRequest;
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Called to alert the {@link CaptureCollector} that the jpeg capture has completed.
+ *
+ * @return a pair containing the {@link RequestHolder} and the timestamp of the capture.
+ */
+ public Pair<RequestHolder, Long> jpegProduced() {
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ CaptureHolder h = mJpegProduceQueue.poll();
+ if (h == null) {
+ Log.w(TAG, "jpegProduced called with no jpeg request on queue!");
+ return null;
+ }
+ h.setJpegProduced();
+ return new Pair<>(h.mRequest, h.mTimestamp);
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Check if there are any pending capture requests that use the Camera1 API preview output.
+ *
+ * @return {@code true} if there are pending preview requests.
+ */
+ public boolean hasPendingPreviewCaptures() {
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ return !mPreviewCaptureQueue.isEmpty();
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Called to alert the {@link CaptureCollector} that the preview capture has begun.
+ *
+ * @param timestamp the time of the preview capture.
+ * @return a pair containing the {@link RequestHolder} and the timestamp of the capture.
+ */
+ public Pair<RequestHolder, Long> previewCaptured(long timestamp) {
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ CaptureHolder h = mPreviewCaptureQueue.poll();
+ if (h == null) {
+ if (DEBUG) {
+ Log.d(TAG, "previewCaptured called with no preview request on queue!");
+ }
+ return null;
+ }
+ h.setPreviewTimestamp(timestamp);
+ return new Pair<>(h.mRequest, h.mTimestamp);
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Called to alert the {@link CaptureCollector} that the preview capture has completed.
+ *
+ * @return the {@link RequestHolder} for the request associated with this capture.
+ */
+ public RequestHolder previewProduced() {
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ CaptureHolder h = mPreviewProduceQueue.poll();
+ if (h == null) {
+ Log.w(TAG, "previewProduced called with no preview request on queue!");
+ return null;
+ }
+ h.setPreviewProduced();
+ return h.mRequest;
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Called to alert the {@link CaptureCollector} that the next pending preview capture has failed.
+ */
+ public void failNextPreview() {
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ CaptureHolder h1 = mPreviewCaptureQueue.peek();
+ CaptureHolder h2 = mPreviewProduceQueue.peek();
+
+ // Find the request with the lowest frame number.
+ CaptureHolder h = (h1 == null) ? h2 :
+ ((h2 == null) ? h1 :
+ ((h1.compareTo(h2) <= 0) ? h1 :
+ h2));
+
+ if (h != null) {
+ mPreviewCaptureQueue.remove(h);
+ mPreviewProduceQueue.remove(h);
+ mActiveRequests.remove(h);
+ h.setPreviewFailed();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Called to alert the {@link CaptureCollector} that the next pending jpeg capture has failed.
+ */
+ public void failNextJpeg() {
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ CaptureHolder h1 = mJpegCaptureQueue.peek();
+ CaptureHolder h2 = mJpegProduceQueue.peek();
+
+ // Find the request with the lowest frame number.
+ CaptureHolder h = (h1 == null) ? h2 :
+ ((h2 == null) ? h1 :
+ ((h1.compareTo(h2) <= 0) ? h1 :
+ h2));
+
+ if (h != null) {
+ mJpegCaptureQueue.remove(h);
+ mJpegProduceQueue.remove(h);
+ mActiveRequests.remove(h);
+ h.setJpegFailed();
+ }
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ /**
+ * Called to alert the {@link CaptureCollector} all pending captures have failed.
+ */
+ public void failAll() {
+ final ReentrantLock lock = this.mLock;
+ lock.lock();
+ try {
+ CaptureHolder h;
+ while ((h = mActiveRequests.pollFirst()) != null) {
+ h.setPreviewFailed();
+ h.setJpegFailed();
+ }
+ mPreviewCaptureQueue.clear();
+ mPreviewProduceQueue.clear();
+ mJpegCaptureQueue.clear();
+ mJpegProduceQueue.clear();
+ } finally {
+ lock.unlock();
+ }
+ }
+
+ private void onPreviewCompleted() {
+ mInFlightPreviews--;
+ if (mInFlightPreviews < 0) {
+ throw new IllegalStateException(
+ "More preview captures completed than requests queued.");
+ }
+ if (mInFlightPreviews == 0) {
+ mPreviewsEmpty.signalAll();
+ }
+ }
+
+ private void onRequestCompleted(CaptureHolder capture) {
+ RequestHolder request = capture.mRequest;
+
+ mInFlight--;
+ if (DEBUG) {
+ Log.d(TAG, "Completed request " + request.getRequestId() +
+ ", " + mInFlight + " requests remain in flight.");
+ }
+ if (mInFlight < 0) {
+ throw new IllegalStateException(
+ "More captures completed than requests queued.");
+ }
+
+ mCompletedRequests.add(capture);
+ mActiveRequests.remove(capture);
+
+ mNotFull.signalAll();
+ if (mInFlight == 0) {
+ mIsEmpty.signalAll();
+ }
+ }
+}
diff --git a/android/hardware/camera2/legacy/GLThreadManager.java b/android/hardware/camera2/legacy/GLThreadManager.java
new file mode 100644
index 00000000..152d82d5
--- /dev/null
+++ b/android/hardware/camera2/legacy/GLThreadManager.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.impl.CameraDeviceImpl;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.Message;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Size;
+import android.view.Surface;
+
+import java.util.Collection;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * GLThreadManager handles the thread used for rendering into the configured output surfaces.
+ */
+public class GLThreadManager {
+ private final String TAG;
+ private static final boolean DEBUG = false;
+
+ private static final int MSG_NEW_CONFIGURATION = 1;
+ private static final int MSG_NEW_FRAME = 2;
+ private static final int MSG_CLEANUP = 3;
+ private static final int MSG_DROP_FRAMES = 4;
+ private static final int MSG_ALLOW_FRAMES = 5;
+
+ private CaptureCollector mCaptureCollector;
+
+ private final CameraDeviceState mDeviceState;
+
+ private final SurfaceTextureRenderer mTextureRenderer;
+
+ private final RequestHandlerThread mGLHandlerThread;
+
+ private final RequestThreadManager.FpsCounter mPrevCounter =
+ new RequestThreadManager.FpsCounter("GL Preview Producer");
+
+ /**
+ * Container object for Configure messages.
+ */
+ private static class ConfigureHolder {
+ public final ConditionVariable condition;
+ public final Collection<Pair<Surface, Size>> surfaces;
+ public final CaptureCollector collector;
+
+ public ConfigureHolder(ConditionVariable condition, Collection<Pair<Surface,
+ Size>> surfaces, CaptureCollector collector) {
+ this.condition = condition;
+ this.surfaces = surfaces;
+ this.collector = collector;
+ }
+ }
+
+ private final Handler.Callback mGLHandlerCb = new Handler.Callback() {
+ private boolean mCleanup = false;
+ private boolean mConfigured = false;
+ private boolean mDroppingFrames = false;
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public boolean handleMessage(Message msg) {
+ if (mCleanup) {
+ return true;
+ }
+ try {
+ switch (msg.what) {
+ case MSG_NEW_CONFIGURATION:
+ ConfigureHolder configure = (ConfigureHolder) msg.obj;
+ mTextureRenderer.cleanupEGLContext();
+ mTextureRenderer.configureSurfaces(configure.surfaces);
+ mCaptureCollector = checkNotNull(configure.collector);
+ configure.condition.open();
+ mConfigured = true;
+ break;
+ case MSG_NEW_FRAME:
+ if (mDroppingFrames) {
+ Log.w(TAG, "Ignoring frame.");
+ break;
+ }
+ if (DEBUG) {
+ mPrevCounter.countAndLog();
+ }
+ if (!mConfigured) {
+ Log.e(TAG, "Dropping frame, EGL context not configured!");
+ }
+ mTextureRenderer.drawIntoSurfaces(mCaptureCollector);
+ break;
+ case MSG_CLEANUP:
+ mTextureRenderer.cleanupEGLContext();
+ mCleanup = true;
+ mConfigured = false;
+ break;
+ case MSG_DROP_FRAMES:
+ mDroppingFrames = true;
+ break;
+ case MSG_ALLOW_FRAMES:
+ mDroppingFrames = false;
+ break;
+ case RequestHandlerThread.MSG_POKE_IDLE_HANDLER:
+ // OK: Ignore message.
+ break;
+ default:
+ Log.e(TAG, "Unhandled message " + msg.what + " on GLThread.");
+ break;
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "Received exception on GL render thread: ", e);
+ mDeviceState.setError(CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ }
+ return true;
+ }
+ };
+
+ /**
+ * Create a new GL thread and renderer.
+ *
+ * @param cameraId the camera id for this thread.
+ * @param facing direction the camera is facing.
+ * @param state {@link CameraDeviceState} to use for error handling.
+ */
+ public GLThreadManager(int cameraId, int facing, CameraDeviceState state) {
+ mTextureRenderer = new SurfaceTextureRenderer(facing);
+ TAG = String.format("CameraDeviceGLThread-%d", cameraId);
+ mGLHandlerThread = new RequestHandlerThread(TAG, mGLHandlerCb);
+ mDeviceState = state;
+ }
+
+ /**
+ * Start the thread.
+ *
+ * <p>
+ * This must be called before queueing new frames.
+ * </p>
+ */
+ public void start() {
+ mGLHandlerThread.start();
+ }
+
+ /**
+ * Wait until the thread has started.
+ */
+ public void waitUntilStarted() {
+ mGLHandlerThread.waitUntilStarted();
+ }
+
+ /**
+ * Quit the thread.
+ *
+ * <p>
+ * No further methods can be called after this.
+ * </p>
+ */
+ public void quit() {
+ Handler handler = mGLHandlerThread.getHandler();
+ handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
+ mGLHandlerThread.quitSafely();
+ try {
+ mGLHandlerThread.join();
+ } catch (InterruptedException e) {
+ Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
+ mGLHandlerThread.getName(), mGLHandlerThread.getId()));
+ }
+ }
+
+ /**
+ * Queue a new call to draw into the surfaces specified in the next available preview
+ * request from the {@link CaptureCollector} passed to
+ * {@link #setConfigurationAndWait(java.util.Collection, CaptureCollector)};
+ */
+ public void queueNewFrame() {
+ Handler handler = mGLHandlerThread.getHandler();
+
+ /**
+ * Avoid queuing more than one new frame. If we are not consuming faster than frames
+ * are produced, drop frames rather than allowing the queue to back up.
+ */
+ if (!handler.hasMessages(MSG_NEW_FRAME)) {
+ handler.sendMessage(handler.obtainMessage(MSG_NEW_FRAME));
+ } else {
+ Log.e(TAG, "GLThread dropping frame. Not consuming frames quickly enough!");
+ }
+ }
+
+ /**
+ * Configure the GL renderer for the given set of output surfaces, and block until
+ * this configuration has been applied.
+ *
+ * @param surfaces a collection of pairs of {@link android.view.Surface}s and their
+ * corresponding sizes to configure.
+ * @param collector a {@link CaptureCollector} to retrieve requests from.
+ */
+ public void setConfigurationAndWait(Collection<Pair<Surface, Size>> surfaces,
+ CaptureCollector collector) {
+ checkNotNull(collector, "collector must not be null");
+ Handler handler = mGLHandlerThread.getHandler();
+
+ final ConditionVariable condition = new ConditionVariable(/*closed*/false);
+ ConfigureHolder configure = new ConfigureHolder(condition, surfaces, collector);
+
+ Message m = handler.obtainMessage(MSG_NEW_CONFIGURATION, /*arg1*/0, /*arg2*/0, configure);
+ handler.sendMessage(m);
+
+ // Block until configuration applied.
+ condition.block();
+ }
+
+ /**
+ * Get the underlying surface to produce frames from.
+ *
+ * <p>
+ * This returns the surface that is drawn into the set of surfaces passed in for each frame.
+ * This method should only be called after a call to
+ * {@link #setConfigurationAndWait(java.util.Collection)}. Calling this before the first call
+ * to {@link #setConfigurationAndWait(java.util.Collection)}, after {@link #quit()}, or
+ * concurrently to one of these calls may result in an invalid
+ * {@link android.graphics.SurfaceTexture} being returned.
+ * </p>
+ *
+ * @return an {@link android.graphics.SurfaceTexture} to draw to.
+ */
+ public SurfaceTexture getCurrentSurfaceTexture() {
+ return mTextureRenderer.getSurfaceTexture();
+ }
+
+ /**
+ * Ignore any subsequent calls to {@link #queueNewFrame(java.util.Collection)}.
+ */
+ public void ignoreNewFrames() {
+ mGLHandlerThread.getHandler().sendEmptyMessage(MSG_DROP_FRAMES);
+ }
+
+ /**
+ * Wait until no messages are queued.
+ */
+ public void waitUntilIdle() {
+ mGLHandlerThread.waitUntilIdle();
+ }
+
+ /**
+ * Re-enable drawing new frames after a call to {@link #ignoreNewFrames()}.
+ */
+ public void allowNewFrames() {
+ mGLHandlerThread.getHandler().sendEmptyMessage(MSG_ALLOW_FRAMES);
+ }
+}
diff --git a/android/hardware/camera2/legacy/LegacyCameraDevice.java b/android/hardware/camera2/legacy/LegacyCameraDevice.java
new file mode 100644
index 00000000..cb59fd14
--- /dev/null
+++ b/android/hardware/camera2/legacy/LegacyCameraDevice.java
@@ -0,0 +1,850 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.impl.CameraDeviceImpl;
+import android.hardware.camera2.impl.CaptureResultExtras;
+import android.hardware.camera2.ICameraDeviceCallbacks;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.utils.ArrayUtils;
+import android.hardware.camera2.utils.SubmitInfo;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.RemoteException;
+import android.os.ServiceSpecificException;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Size;
+import android.util.SparseArray;
+import android.view.Surface;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+import static android.hardware.camera2.legacy.LegacyExceptionUtils.*;
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * This class emulates the functionality of a Camera2 device using a the old Camera class.
+ *
+ * <p>
+ * There are two main components that are used to implement this:
+ * - A state machine containing valid Camera2 device states ({@link CameraDeviceState}).
+ * - A message-queue based pipeline that manages an old Camera class, and executes capture and
+ * configuration requests.
+ * </p>
+ */
+public class LegacyCameraDevice implements AutoCloseable {
+ private final String TAG;
+
+ private static final boolean DEBUG = false;
+ private final int mCameraId;
+ private final CameraCharacteristics mStaticCharacteristics;
+ private final ICameraDeviceCallbacks mDeviceCallbacks;
+ private final CameraDeviceState mDeviceState = new CameraDeviceState();
+ private SparseArray<Surface> mConfiguredSurfaces;
+ private boolean mClosed = false;
+
+ private final ConditionVariable mIdle = new ConditionVariable(/*open*/true);
+
+ private final HandlerThread mResultThread = new HandlerThread("ResultThread");
+ private final HandlerThread mCallbackHandlerThread = new HandlerThread("CallbackThread");
+ private final Handler mCallbackHandler;
+ private final Handler mResultHandler;
+ private static final int ILLEGAL_VALUE = -1;
+
+ // Keep up to date with values in hardware/libhardware/include/hardware/gralloc.h
+ private static final int GRALLOC_USAGE_RENDERSCRIPT = 0x00100000;
+ private static final int GRALLOC_USAGE_SW_READ_OFTEN = 0x00000003;
+ private static final int GRALLOC_USAGE_HW_TEXTURE = 0x00000100;
+ private static final int GRALLOC_USAGE_HW_COMPOSER = 0x00000800;
+ private static final int GRALLOC_USAGE_HW_RENDER = 0x00000200;
+ private static final int GRALLOC_USAGE_HW_VIDEO_ENCODER = 0x00010000;
+
+ public static final int MAX_DIMEN_FOR_ROUNDING = 1920; // maximum allowed width for rounding
+
+ // Keep up to date with values in system/core/include/system/window.h
+ public static final int NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW = 1;
+
+ private CaptureResultExtras getExtrasFromRequest(RequestHolder holder) {
+ return getExtrasFromRequest(holder,
+ /*errorCode*/CameraDeviceState.NO_CAPTURE_ERROR, /*errorArg*/null);
+ }
+
+ private CaptureResultExtras getExtrasFromRequest(RequestHolder holder,
+ int errorCode, Object errorArg) {
+ int errorStreamId = -1;
+ if (errorCode == CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_BUFFER) {
+ Surface errorTarget = (Surface) errorArg;
+ int indexOfTarget = mConfiguredSurfaces.indexOfValue(errorTarget);
+ if (indexOfTarget < 0) {
+ Log.e(TAG, "Buffer drop error reported for unknown Surface");
+ } else {
+ errorStreamId = mConfiguredSurfaces.keyAt(indexOfTarget);
+ }
+ }
+ if (holder == null) {
+ return new CaptureResultExtras(ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE,
+ ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE, ILLEGAL_VALUE);
+ }
+ return new CaptureResultExtras(holder.getRequestId(), holder.getSubsequeceId(),
+ /*afTriggerId*/0, /*precaptureTriggerId*/0, holder.getFrameNumber(),
+ /*partialResultCount*/1, errorStreamId);
+ }
+
+ /**
+ * Listener for the camera device state machine. Calls the appropriate
+ * {@link ICameraDeviceCallbacks} for each state transition.
+ */
+ private final CameraDeviceState.CameraDeviceStateListener mStateListener =
+ new CameraDeviceState.CameraDeviceStateListener() {
+ @Override
+ public void onError(final int errorCode, final Object errorArg, final RequestHolder holder) {
+ if (DEBUG) {
+ Log.d(TAG, "onError called, errorCode = " + errorCode + ", errorArg = " + errorArg);
+ }
+ switch (errorCode) {
+ /*
+ * Only be considered idle if we hit a fatal error
+ * and no further requests can be processed.
+ */
+ case CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DISCONNECTED:
+ case CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_SERVICE:
+ case CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE: {
+ mIdle.open();
+
+ if (DEBUG) {
+ Log.d(TAG, "onError - opening idle");
+ }
+ }
+ }
+
+ final CaptureResultExtras extras = getExtrasFromRequest(holder, errorCode, errorArg);
+ mResultHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (DEBUG) {
+ Log.d(TAG, "doing onError callback for request " + holder.getRequestId() +
+ ", with error code " + errorCode);
+ }
+ try {
+ mDeviceCallbacks.onDeviceError(errorCode, extras);
+ } catch (RemoteException e) {
+ throw new IllegalStateException(
+ "Received remote exception during onCameraError callback: ", e);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onConfiguring() {
+ // Do nothing
+ if (DEBUG) {
+ Log.d(TAG, "doing onConfiguring callback.");
+ }
+ }
+
+ @Override
+ public void onIdle() {
+ if (DEBUG) {
+ Log.d(TAG, "onIdle called");
+ }
+
+ mIdle.open();
+
+ mResultHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (DEBUG) {
+ Log.d(TAG, "doing onIdle callback.");
+ }
+ try {
+ mDeviceCallbacks.onDeviceIdle();
+ } catch (RemoteException e) {
+ throw new IllegalStateException(
+ "Received remote exception during onCameraIdle callback: ", e);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onBusy() {
+ mIdle.close();
+
+ if (DEBUG) {
+ Log.d(TAG, "onBusy called");
+ }
+ }
+
+ @Override
+ public void onCaptureStarted(final RequestHolder holder, final long timestamp) {
+ final CaptureResultExtras extras = getExtrasFromRequest(holder);
+
+ mResultHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (DEBUG) {
+ Log.d(TAG, "doing onCaptureStarted callback for request " +
+ holder.getRequestId());
+ }
+ try {
+ mDeviceCallbacks.onCaptureStarted(extras, timestamp);
+ } catch (RemoteException e) {
+ throw new IllegalStateException(
+ "Received remote exception during onCameraError callback: ", e);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRequestQueueEmpty() {
+ mResultHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (DEBUG) {
+ Log.d(TAG, "doing onRequestQueueEmpty callback");
+ }
+ try {
+ mDeviceCallbacks.onRequestQueueEmpty();
+ } catch (RemoteException e) {
+ throw new IllegalStateException(
+ "Received remote exception during onRequestQueueEmpty callback: ",
+ e);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onCaptureResult(final CameraMetadataNative result, final RequestHolder holder) {
+ final CaptureResultExtras extras = getExtrasFromRequest(holder);
+
+ mResultHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (DEBUG) {
+ Log.d(TAG, "doing onCaptureResult callback for request " +
+ holder.getRequestId());
+ }
+ try {
+ mDeviceCallbacks.onResultReceived(result, extras);
+ } catch (RemoteException e) {
+ throw new IllegalStateException(
+ "Received remote exception during onCameraError callback: ", e);
+ }
+ }
+ });
+ }
+
+ @Override
+ public void onRepeatingRequestError(final long lastFrameNumber,
+ final int repeatingRequestId) {
+ mResultHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (DEBUG) {
+ Log.d(TAG, "doing onRepeatingRequestError callback.");
+ }
+ try {
+ mDeviceCallbacks.onRepeatingRequestError(lastFrameNumber,
+ repeatingRequestId);
+ } catch (RemoteException e) {
+ throw new IllegalStateException(
+ "Received remote exception during onRepeatingRequestError " +
+ "callback: ", e);
+ }
+ }
+ });
+ }
+ };
+
+ private final RequestThreadManager mRequestThreadManager;
+
+ /**
+ * Check if a given surface uses {@link ImageFormat#YUV_420_888} or format that can be readily
+ * converted to this; YV12 and NV21 are the two currently supported formats.
+ *
+ * @param s the surface to check.
+ * @return {@code true} if the surfaces uses {@link ImageFormat#YUV_420_888} or a compatible
+ * format.
+ */
+ static boolean needsConversion(Surface s) throws BufferQueueAbandonedException {
+ int nativeType = detectSurfaceType(s);
+ return nativeType == ImageFormat.YUV_420_888 || nativeType == ImageFormat.YV12 ||
+ nativeType == ImageFormat.NV21;
+ }
+
+ /**
+ * Create a new emulated camera device from a given Camera 1 API camera.
+ *
+ * <p>
+ * The {@link Camera} provided to this constructor must already have been successfully opened,
+ * and ownership of the provided camera is passed to this object. No further calls to the
+ * camera methods should be made following this constructor.
+ * </p>
+ *
+ * @param cameraId the id of the camera.
+ * @param camera an open {@link Camera} device.
+ * @param characteristics the static camera characteristics for this camera device
+ * @param callbacks {@link ICameraDeviceCallbacks} callbacks to call for Camera2 API operations.
+ */
+ public LegacyCameraDevice(int cameraId, Camera camera, CameraCharacteristics characteristics,
+ ICameraDeviceCallbacks callbacks) {
+ mCameraId = cameraId;
+ mDeviceCallbacks = callbacks;
+ TAG = String.format("CameraDevice-%d-LE", mCameraId);
+
+ mResultThread.start();
+ mResultHandler = new Handler(mResultThread.getLooper());
+ mCallbackHandlerThread.start();
+ mCallbackHandler = new Handler(mCallbackHandlerThread.getLooper());
+ mDeviceState.setCameraDeviceCallbacks(mCallbackHandler, mStateListener);
+ mStaticCharacteristics = characteristics;
+ mRequestThreadManager =
+ new RequestThreadManager(cameraId, camera, characteristics, mDeviceState);
+ mRequestThreadManager.start();
+ }
+
+ /**
+ * Configure the device with a set of output surfaces.
+ *
+ * <p>Using empty or {@code null} {@code outputs} is the same as unconfiguring.</p>
+ *
+ * <p>Every surface in {@code outputs} must be non-{@code null}.</p>
+ *
+ * @param outputs a list of surfaces to set. LegacyCameraDevice will take ownership of this
+ * list; it must not be modified by the caller once it's passed in.
+ * @return an error code for this binder operation, or {@link NO_ERROR}
+ * on success.
+ */
+ public int configureOutputs(SparseArray<Surface> outputs) {
+ List<Pair<Surface, Size>> sizedSurfaces = new ArrayList<>();
+ if (outputs != null) {
+ int count = outputs.size();
+ for (int i = 0; i < count; i++) {
+ Surface output = outputs.valueAt(i);
+ if (output == null) {
+ Log.e(TAG, "configureOutputs - null outputs are not allowed");
+ return BAD_VALUE;
+ }
+ if (!output.isValid()) {
+ Log.e(TAG, "configureOutputs - invalid output surfaces are not allowed");
+ return BAD_VALUE;
+ }
+ StreamConfigurationMap streamConfigurations = mStaticCharacteristics.
+ get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+
+ // Validate surface size and format.
+ try {
+ Size s = getSurfaceSize(output);
+ int surfaceType = detectSurfaceType(output);
+
+ boolean flexibleConsumer = isFlexibleConsumer(output);
+
+ Size[] sizes = streamConfigurations.getOutputSizes(surfaceType);
+ if (sizes == null) {
+ if (surfaceType == ImageFormat.PRIVATE) {
+
+ // YUV_420_888 is always present in LEGACY for all
+ // IMPLEMENTATION_DEFINED output sizes, and is publicly visible in the
+ // API (i.e. {@code #getOutputSizes} works here).
+ sizes = streamConfigurations.getOutputSizes(ImageFormat.YUV_420_888);
+ } else if (surfaceType == LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB) {
+ sizes = streamConfigurations.getOutputSizes(ImageFormat.JPEG);
+ }
+ }
+
+ if (!ArrayUtils.contains(sizes, s)) {
+ if (flexibleConsumer && (s = findClosestSize(s, sizes)) != null) {
+ sizedSurfaces.add(new Pair<>(output, s));
+ } else {
+ String reason = (sizes == null) ? "format is invalid." :
+ ("size not in valid set: " + Arrays.toString(sizes));
+ Log.e(TAG, String.format("Surface with size (w=%d, h=%d) and format " +
+ "0x%x is not valid, %s", s.getWidth(), s.getHeight(),
+ surfaceType, reason));
+ return BAD_VALUE;
+ }
+ } else {
+ sizedSurfaces.add(new Pair<>(output, s));
+ }
+ // Lock down the size before configuration
+ setSurfaceDimens(output, s.getWidth(), s.getHeight());
+ } catch (BufferQueueAbandonedException e) {
+ Log.e(TAG, "Surface bufferqueue is abandoned, cannot configure as output: ", e);
+ return BAD_VALUE;
+ }
+
+ }
+ }
+
+ boolean success = false;
+ if (mDeviceState.setConfiguring()) {
+ mRequestThreadManager.configure(sizedSurfaces);
+ success = mDeviceState.setIdle();
+ }
+
+ if (success) {
+ mConfiguredSurfaces = outputs;
+ } else {
+ return LegacyExceptionUtils.INVALID_OPERATION;
+ }
+ return LegacyExceptionUtils.NO_ERROR;
+ }
+
+ /**
+ * Submit a burst of capture requests.
+ *
+ * @param requestList a list of capture requests to execute.
+ * @param repeating {@code true} if this burst is repeating.
+ * @return the submission info, including the new request id, and the last frame number, which
+ * contains either the frame number of the last frame that will be returned for this request,
+ * or the frame number of the last frame that will be returned for the current repeating
+ * request if this burst is set to be repeating.
+ */
+ public SubmitInfo submitRequestList(CaptureRequest[] requestList, boolean repeating) {
+ if (requestList == null || requestList.length == 0) {
+ Log.e(TAG, "submitRequestList - Empty/null requests are not allowed");
+ throw new ServiceSpecificException(BAD_VALUE,
+ "submitRequestList - Empty/null requests are not allowed");
+ }
+
+ List<Long> surfaceIds;
+
+ try {
+ surfaceIds = (mConfiguredSurfaces == null) ? new ArrayList<Long>() :
+ getSurfaceIds(mConfiguredSurfaces);
+ } catch (BufferQueueAbandonedException e) {
+ throw new ServiceSpecificException(BAD_VALUE,
+ "submitRequestList - configured surface is abandoned.");
+ }
+
+ // Make sure that there all requests have at least 1 surface; all surfaces are non-null
+ for (CaptureRequest request : requestList) {
+ if (request.getTargets().isEmpty()) {
+ Log.e(TAG, "submitRequestList - "
+ + "Each request must have at least one Surface target");
+ throw new ServiceSpecificException(BAD_VALUE,
+ "submitRequestList - "
+ + "Each request must have at least one Surface target");
+ }
+
+ for (Surface surface : request.getTargets()) {
+ if (surface == null) {
+ Log.e(TAG, "submitRequestList - Null Surface targets are not allowed");
+ throw new ServiceSpecificException(BAD_VALUE,
+ "submitRequestList - Null Surface targets are not allowed");
+ } else if (mConfiguredSurfaces == null) {
+ Log.e(TAG, "submitRequestList - must configure " +
+ " device with valid surfaces before submitting requests");
+ throw new ServiceSpecificException(INVALID_OPERATION,
+ "submitRequestList - must configure " +
+ " device with valid surfaces before submitting requests");
+ } else if (!containsSurfaceId(surface, surfaceIds)) {
+ Log.e(TAG, "submitRequestList - cannot use a surface that wasn't configured");
+ throw new ServiceSpecificException(BAD_VALUE,
+ "submitRequestList - cannot use a surface that wasn't configured");
+ }
+ }
+ }
+
+ // TODO: further validation of request here
+ mIdle.close();
+ return mRequestThreadManager.submitCaptureRequests(requestList, repeating);
+ }
+
+ /**
+ * Submit a single capture request.
+ *
+ * @param request the capture request to execute.
+ * @param repeating {@code true} if this request is repeating.
+ * @return the submission info, including the new request id, and the last frame number, which
+ * contains either the frame number of the last frame that will be returned for this request,
+ * or the frame number of the last frame that will be returned for the current repeating
+ * request if this burst is set to be repeating.
+ */
+ public SubmitInfo submitRequest(CaptureRequest request, boolean repeating) {
+ CaptureRequest[] requestList = { request };
+ return submitRequestList(requestList, repeating);
+ }
+
+ /**
+ * Cancel the repeating request with the given request id.
+ *
+ * @param requestId the request id of the request to cancel.
+ * @return the last frame number to be returned from the HAL for the given repeating request, or
+ * {@code INVALID_FRAME} if none exists.
+ */
+ public long cancelRequest(int requestId) {
+ return mRequestThreadManager.cancelRepeating(requestId);
+ }
+
+ /**
+ * Block until the {@link ICameraDeviceCallbacks#onCameraIdle()} callback is received.
+ */
+ public void waitUntilIdle() {
+ mIdle.block();
+ }
+
+ /**
+ * Flush any pending requests.
+ *
+ * @return the last frame number.
+ */
+ public long flush() {
+ long lastFrame = mRequestThreadManager.flush();
+ waitUntilIdle();
+ return lastFrame;
+ }
+
+ /**
+ * Return {@code true} if the device has been closed.
+ */
+ public boolean isClosed() {
+ return mClosed;
+ }
+
+ @Override
+ public void close() {
+ mRequestThreadManager.quit();
+ mCallbackHandlerThread.quitSafely();
+ mResultThread.quitSafely();
+
+ try {
+ mCallbackHandlerThread.join();
+ } catch (InterruptedException e) {
+ Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
+ mCallbackHandlerThread.getName(), mCallbackHandlerThread.getId()));
+ }
+
+ try {
+ mResultThread.join();
+ } catch (InterruptedException e) {
+ Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
+ mResultThread.getName(), mResultThread.getId()));
+ }
+
+ mClosed = true;
+ }
+
+ @Override
+ protected void finalize() throws Throwable {
+ try {
+ close();
+ } catch (ServiceSpecificException e) {
+ Log.e(TAG, "Got error while trying to finalize, ignoring: " + e.getMessage());
+ } finally {
+ super.finalize();
+ }
+ }
+
+ static long findEuclidDistSquare(Size a, Size b) {
+ long d0 = a.getWidth() - b.getWidth();
+ long d1 = a.getHeight() - b.getHeight();
+ return d0 * d0 + d1 * d1;
+ }
+
+ // Keep up to date with rounding behavior in
+ // frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+ static Size findClosestSize(Size size, Size[] supportedSizes) {
+ if (size == null || supportedSizes == null) {
+ return null;
+ }
+ Size bestSize = null;
+ for (Size s : supportedSizes) {
+ if (s.equals(size)) {
+ return size;
+ } else if (s.getWidth() <= MAX_DIMEN_FOR_ROUNDING && (bestSize == null ||
+ LegacyCameraDevice.findEuclidDistSquare(size, s) <
+ LegacyCameraDevice.findEuclidDistSquare(bestSize, s))) {
+ bestSize = s;
+ }
+ }
+ return bestSize;
+ }
+
+ /**
+ * Query the surface for its currently configured default buffer size.
+ * @param surface a non-{@code null} {@code Surface}
+ * @return the width and height of the surface
+ *
+ * @throws NullPointerException if the {@code surface} was {@code null}
+ * @throws BufferQueueAbandonedException if the {@code surface} was invalid
+ */
+ public static Size getSurfaceSize(Surface surface) throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+
+ int[] dimens = new int[2];
+ LegacyExceptionUtils.throwOnError(nativeDetectSurfaceDimens(surface, /*out*/dimens));
+
+ return new Size(dimens[0], dimens[1]);
+ }
+
+ public static boolean isFlexibleConsumer(Surface output) {
+ int usageFlags = detectSurfaceUsageFlags(output);
+
+ // Keep up to date with allowed consumer types in
+ // frameworks/av/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+ int disallowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT;
+ int allowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_SW_READ_OFTEN |
+ GRALLOC_USAGE_HW_COMPOSER;
+ boolean flexibleConsumer = ((usageFlags & disallowedFlags) == 0 &&
+ (usageFlags & allowedFlags) != 0);
+ return flexibleConsumer;
+ }
+
+ public static boolean isPreviewConsumer(Surface output) {
+ int usageFlags = detectSurfaceUsageFlags(output);
+ int disallowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_RENDERSCRIPT |
+ GRALLOC_USAGE_SW_READ_OFTEN;
+ int allowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
+ GRALLOC_USAGE_HW_RENDER;
+ boolean previewConsumer = ((usageFlags & disallowedFlags) == 0 &&
+ (usageFlags & allowedFlags) != 0);
+ int surfaceFormat = ImageFormat.UNKNOWN;
+ try {
+ surfaceFormat = detectSurfaceType(output);
+ } catch(BufferQueueAbandonedException e) {
+ throw new IllegalArgumentException("Surface was abandoned", e);
+ }
+
+ return previewConsumer;
+ }
+
+ public static boolean isVideoEncoderConsumer(Surface output) {
+ int usageFlags = detectSurfaceUsageFlags(output);
+ int disallowedFlags = GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER |
+ GRALLOC_USAGE_RENDERSCRIPT | GRALLOC_USAGE_SW_READ_OFTEN;
+ int allowedFlags = GRALLOC_USAGE_HW_VIDEO_ENCODER;
+ boolean videoEncoderConsumer = ((usageFlags & disallowedFlags) == 0 &&
+ (usageFlags & allowedFlags) != 0);
+
+ int surfaceFormat = ImageFormat.UNKNOWN;
+ try {
+ surfaceFormat = detectSurfaceType(output);
+ } catch(BufferQueueAbandonedException e) {
+ throw new IllegalArgumentException("Surface was abandoned", e);
+ }
+
+ return videoEncoderConsumer;
+ }
+
+ /**
+ * Query the surface for its currently configured usage flags
+ */
+ static int detectSurfaceUsageFlags(Surface surface) {
+ checkNotNull(surface);
+ return nativeDetectSurfaceUsageFlags(surface);
+ }
+
+ /**
+ * Query the surface for its currently configured format
+ */
+ public static int detectSurfaceType(Surface surface) throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+ int surfaceType = nativeDetectSurfaceType(surface);
+
+ // TODO: remove this override since the default format should be
+ // ImageFormat.PRIVATE. b/9487482
+ if ((surfaceType >= LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888 &&
+ surfaceType <= LegacyMetadataMapper.HAL_PIXEL_FORMAT_BGRA_8888)) {
+ surfaceType = ImageFormat.PRIVATE;
+ }
+
+ return LegacyExceptionUtils.throwOnError(surfaceType);
+ }
+
+ /**
+ * Query the surface for its currently configured dataspace
+ */
+ public static int detectSurfaceDataspace(Surface surface) throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+ return LegacyExceptionUtils.throwOnError(nativeDetectSurfaceDataspace(surface));
+ }
+
+ static void connectSurface(Surface surface) throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+
+ LegacyExceptionUtils.throwOnError(nativeConnectSurface(surface));
+ }
+
+ static void disconnectSurface(Surface surface) throws BufferQueueAbandonedException {
+ if (surface == null) return;
+
+ LegacyExceptionUtils.throwOnError(nativeDisconnectSurface(surface));
+ }
+
+ static void produceFrame(Surface surface, byte[] pixelBuffer, int width,
+ int height, int pixelFormat)
+ throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+ checkNotNull(pixelBuffer);
+ checkArgumentPositive(width, "width must be positive.");
+ checkArgumentPositive(height, "height must be positive.");
+
+ LegacyExceptionUtils.throwOnError(nativeProduceFrame(surface, pixelBuffer, width, height,
+ pixelFormat));
+ }
+
+ static void setSurfaceFormat(Surface surface, int pixelFormat)
+ throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+
+ LegacyExceptionUtils.throwOnError(nativeSetSurfaceFormat(surface, pixelFormat));
+ }
+
+ static void setSurfaceDimens(Surface surface, int width, int height)
+ throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+ checkArgumentPositive(width, "width must be positive.");
+ checkArgumentPositive(height, "height must be positive.");
+
+ LegacyExceptionUtils.throwOnError(nativeSetSurfaceDimens(surface, width, height));
+ }
+
+ static long getSurfaceId(Surface surface) throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+ try {
+ return nativeGetSurfaceId(surface);
+ } catch (IllegalArgumentException e) {
+ throw new BufferQueueAbandonedException();
+ }
+ }
+
+ static List<Long> getSurfaceIds(SparseArray<Surface> surfaces)
+ throws BufferQueueAbandonedException {
+ if (surfaces == null) {
+ throw new NullPointerException("Null argument surfaces");
+ }
+ List<Long> surfaceIds = new ArrayList<>();
+ int count = surfaces.size();
+ for (int i = 0; i < count; i++) {
+ long id = getSurfaceId(surfaces.valueAt(i));
+ if (id == 0) {
+ throw new IllegalStateException(
+ "Configured surface had null native GraphicBufferProducer pointer!");
+ }
+ surfaceIds.add(id);
+ }
+ return surfaceIds;
+ }
+
+ static List<Long> getSurfaceIds(Collection<Surface> surfaces)
+ throws BufferQueueAbandonedException {
+ if (surfaces == null) {
+ throw new NullPointerException("Null argument surfaces");
+ }
+ List<Long> surfaceIds = new ArrayList<>();
+ for (Surface s : surfaces) {
+ long id = getSurfaceId(s);
+ if (id == 0) {
+ throw new IllegalStateException(
+ "Configured surface had null native GraphicBufferProducer pointer!");
+ }
+ surfaceIds.add(id);
+ }
+ return surfaceIds;
+ }
+
+ static boolean containsSurfaceId(Surface s, Collection<Long> ids) {
+ long id = 0;
+ try {
+ id = getSurfaceId(s);
+ } catch (BufferQueueAbandonedException e) {
+ // If surface is abandoned, return false.
+ return false;
+ }
+ return ids.contains(id);
+ }
+
+ static void setSurfaceOrientation(Surface surface, int facing, int sensorOrientation)
+ throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+ LegacyExceptionUtils.throwOnError(nativeSetSurfaceOrientation(surface, facing,
+ sensorOrientation));
+ }
+
+ static Size getTextureSize(SurfaceTexture surfaceTexture)
+ throws BufferQueueAbandonedException {
+ checkNotNull(surfaceTexture);
+
+ int[] dimens = new int[2];
+ LegacyExceptionUtils.throwOnError(nativeDetectTextureDimens(surfaceTexture,
+ /*out*/dimens));
+
+ return new Size(dimens[0], dimens[1]);
+ }
+
+ static void setNextTimestamp(Surface surface, long timestamp)
+ throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+ LegacyExceptionUtils.throwOnError(nativeSetNextTimestamp(surface, timestamp));
+ }
+
+ static void setScalingMode(Surface surface, int mode)
+ throws BufferQueueAbandonedException {
+ checkNotNull(surface);
+ LegacyExceptionUtils.throwOnError(nativeSetScalingMode(surface, mode));
+ }
+
+
+ private static native int nativeDetectSurfaceType(Surface surface);
+
+ private static native int nativeDetectSurfaceDataspace(Surface surface);
+
+ private static native int nativeDetectSurfaceDimens(Surface surface,
+ /*out*/int[/*2*/] dimens);
+
+ private static native int nativeConnectSurface(Surface surface);
+
+ private static native int nativeProduceFrame(Surface surface, byte[] pixelBuffer, int width,
+ int height, int pixelFormat);
+
+ private static native int nativeSetSurfaceFormat(Surface surface, int pixelFormat);
+
+ private static native int nativeSetSurfaceDimens(Surface surface, int width, int height);
+
+ private static native long nativeGetSurfaceId(Surface surface);
+
+ private static native int nativeSetSurfaceOrientation(Surface surface, int facing,
+ int sensorOrientation);
+
+ private static native int nativeDetectTextureDimens(SurfaceTexture surfaceTexture,
+ /*out*/int[/*2*/] dimens);
+
+ private static native int nativeSetNextTimestamp(Surface surface, long timestamp);
+
+ private static native int nativeDetectSurfaceUsageFlags(Surface surface);
+
+ private static native int nativeSetScalingMode(Surface surface, int scalingMode);
+
+ private static native int nativeDisconnectSurface(Surface surface);
+
+ static native int nativeGetJpegFooterSize();
+}
diff --git a/android/hardware/camera2/legacy/LegacyExceptionUtils.java b/android/hardware/camera2/legacy/LegacyExceptionUtils.java
new file mode 100644
index 00000000..93d6001c
--- /dev/null
+++ b/android/hardware/camera2/legacy/LegacyExceptionUtils.java
@@ -0,0 +1,138 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.ICameraService;
+import android.os.ServiceSpecificException;
+import android.util.AndroidException;
+
+import static android.system.OsConstants.*;
+
+/**
+ * Utility class containing exception handling used solely by the compatibility mode shim.
+ */
+public class LegacyExceptionUtils {
+ private static final String TAG = "LegacyExceptionUtils";
+
+ public static final int NO_ERROR = 0;
+ public static final int PERMISSION_DENIED = -EPERM;
+ public static final int ALREADY_EXISTS = -EEXIST;
+ public static final int BAD_VALUE = -EINVAL;
+ public static final int DEAD_OBJECT = -ENOSYS;
+ public static final int INVALID_OPERATION = -EPIPE;
+ public static final int TIMED_OUT = -ETIMEDOUT;
+
+ /**
+ * Checked exception thrown when a BufferQueue has been abandoned by its consumer.
+ */
+ public static class BufferQueueAbandonedException extends AndroidException {
+ public BufferQueueAbandonedException () {}
+
+ public BufferQueueAbandonedException(String name) {
+ super(name);
+ }
+
+ public BufferQueueAbandonedException(String name, Throwable cause) {
+ super(name, cause);
+ }
+
+ public BufferQueueAbandonedException(Exception cause) {
+ super(cause);
+ }
+ }
+
+ /**
+ * Throw error codes used by legacy device methods as exceptions.
+ *
+ * <p>Non-negative return values are passed through, negative return values are thrown as
+ * exceptions.</p>
+ *
+ * @param errorFlag error to throw as an exception.
+ * @throws {@link BufferQueueAbandonedException} for -ENODEV.
+ * @throws {@link UnsupportedOperationException} for an unknown negative error code.
+ * @return {@code errorFlag} if the value was non-negative, throws otherwise.
+ */
+ public static int throwOnError(int errorFlag) throws BufferQueueAbandonedException {
+ if (errorFlag == NO_ERROR) {
+ return NO_ERROR;
+ } else if (errorFlag == -ENODEV) {
+ throw new BufferQueueAbandonedException();
+ }
+
+ if (errorFlag < 0) {
+ throw new UnsupportedOperationException("Unknown error " + errorFlag);
+ }
+ return errorFlag;
+ }
+
+ /**
+ * Throw error codes returned by the camera service as exceptions.
+ *
+ * @param errorFlag error to throw as an exception.
+ */
+ public static void throwOnServiceError(int errorFlag) {
+ int errorCode = ICameraService.ERROR_INVALID_OPERATION;
+ String errorMsg;
+
+ if (errorFlag >= NO_ERROR) {
+ return;
+ } else if (errorFlag == PERMISSION_DENIED) {
+ errorCode = ICameraService.ERROR_PERMISSION_DENIED;
+ errorMsg = "Lacking privileges to access camera service";
+ } else if (errorFlag == ALREADY_EXISTS) {
+ // This should be handled at the call site. Typically this isn't bad,
+ // just means we tried to do an operation that already completed.
+ return;
+ } else if (errorFlag == BAD_VALUE) {
+ errorCode = ICameraService.ERROR_ILLEGAL_ARGUMENT;
+ errorMsg = "Bad argument passed to camera service";
+ } else if (errorFlag == DEAD_OBJECT) {
+ errorCode = ICameraService.ERROR_DISCONNECTED;
+ errorMsg = "Camera service not available";
+ } else if (errorFlag == TIMED_OUT) {
+ errorCode = ICameraService.ERROR_INVALID_OPERATION;
+ errorMsg = "Operation timed out in camera service";
+ } else if (errorFlag == -EACCES) {
+ errorCode = ICameraService.ERROR_DISABLED;
+ errorMsg = "Camera disabled by policy";
+ } else if (errorFlag == -EBUSY) {
+ errorCode = ICameraService.ERROR_CAMERA_IN_USE;
+ errorMsg = "Camera already in use";
+ } else if (errorFlag == -EUSERS) {
+ errorCode = ICameraService.ERROR_MAX_CAMERAS_IN_USE;
+ errorMsg = "Maximum number of cameras in use";
+ } else if (errorFlag == -ENODEV) {
+ errorCode = ICameraService.ERROR_DISCONNECTED;
+ errorMsg = "Camera device not available";
+ } else if (errorFlag == -EOPNOTSUPP) {
+ errorCode = ICameraService.ERROR_DEPRECATED_HAL;
+ errorMsg = "Deprecated camera HAL does not support this";
+ } else if (errorFlag == INVALID_OPERATION) {
+ errorCode = ICameraService.ERROR_INVALID_OPERATION;
+ errorMsg = "Illegal state encountered in camera service.";
+ } else {
+ errorCode = ICameraService.ERROR_INVALID_OPERATION;
+ errorMsg = "Unknown camera device error " + errorFlag;
+ }
+
+ throw new ServiceSpecificException(errorCode, errorMsg);
+ }
+
+ private LegacyExceptionUtils() {
+ throw new AssertionError();
+ }
+}
diff --git a/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java b/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java
new file mode 100644
index 00000000..882a7f4a
--- /dev/null
+++ b/android/hardware/camera2/legacy/LegacyFaceDetectMapper.java
@@ -0,0 +1,263 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.Rect;
+import android.hardware.Camera;
+import android.hardware.Camera.FaceDetectionListener;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.legacy.ParameterUtils.ZoomData;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.Face;
+import android.hardware.camera2.utils.ListUtils;
+import android.hardware.camera2.utils.ParamsUtils;
+import android.util.Log;
+import android.util.Size;
+
+import com.android.internal.util.ArrayUtils;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static android.hardware.camera2.CaptureRequest.*;
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Map legacy face detect callbacks into face detection results.
+ */
+@SuppressWarnings("deprecation")
+public class LegacyFaceDetectMapper {
+ private static String TAG = "LegacyFaceDetectMapper";
+ private static final boolean DEBUG = false;
+
+ private final Camera mCamera;
+ /** Is the camera capable of face detection? */
+ private final boolean mFaceDetectSupported;
+ /** Is the camera is running face detection? */
+ private boolean mFaceDetectEnabled = false;
+ /** Did the last request say to use SCENE_MODE = FACE_PRIORITY? */
+ private boolean mFaceDetectScenePriority = false;
+ /** Did the last request enable the face detect mode to ON? */
+ private boolean mFaceDetectReporting = false;
+
+ /** Synchronize access to all fields */
+ private final Object mLock = new Object();
+ private Camera.Face[] mFaces;
+ private Camera.Face[] mFacesPrev;
+ /**
+ * Instantiate a new face detect mapper.
+ *
+ * @param camera a non-{@code null} camera1 device
+ * @param characteristics a non-{@code null} camera characteristics for that camera1
+ *
+ * @throws NullPointerException if any of the args were {@code null}
+ */
+ public LegacyFaceDetectMapper(Camera camera, CameraCharacteristics characteristics) {
+ mCamera = checkNotNull(camera, "camera must not be null");
+ checkNotNull(characteristics, "characteristics must not be null");
+
+ mFaceDetectSupported = ArrayUtils.contains(
+ characteristics.get(
+ CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES),
+ STATISTICS_FACE_DETECT_MODE_SIMPLE);
+
+ if (!mFaceDetectSupported) {
+ return;
+ }
+
+ mCamera.setFaceDetectionListener(new FaceDetectionListener() {
+
+ @Override
+ public void onFaceDetection(Camera.Face[] faces, Camera camera) {
+ int lengthFaces = faces == null ? 0 : faces.length;
+ synchronized (mLock) {
+ if (mFaceDetectEnabled) {
+ mFaces = faces;
+ } else if (lengthFaces > 0) {
+ // stopFaceDetectMode could race against the requests, print a debug log
+ Log.d(TAG,
+ "onFaceDetection - Ignored some incoming faces since" +
+ "face detection was disabled");
+ }
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "onFaceDetection - read " + lengthFaces + " faces");
+ }
+ }
+ });
+ }
+
+ /**
+ * Process the face detect mode from the capture request into an api1 face detect toggle.
+ *
+ * <p>This method should be called after the parameters are {@link LegacyRequestMapper mapped}
+ * with the request.</p>
+ *
+ * <p>Callbacks are processed in the background, and the next call to {@link #mapResultTriggers}
+ * will have the latest faces detected as reflected by the camera1 callbacks.</p>
+ *
+ * <p>None of the arguments will be mutated.</p>
+ *
+ * @param captureRequest a non-{@code null} request
+ * @param parameters a non-{@code null} parameters corresponding to this request (read-only)
+ */
+ public void processFaceDetectMode(CaptureRequest captureRequest,
+ Camera.Parameters parameters) {
+ checkNotNull(captureRequest, "captureRequest must not be null");
+
+ /*
+ * statistics.faceDetectMode
+ */
+ int fdMode = ParamsUtils.getOrDefault(captureRequest, STATISTICS_FACE_DETECT_MODE,
+ STATISTICS_FACE_DETECT_MODE_OFF);
+
+ if (fdMode != STATISTICS_FACE_DETECT_MODE_OFF && !mFaceDetectSupported) {
+ Log.w(TAG,
+ "processFaceDetectMode - Ignoring statistics.faceDetectMode; " +
+ "face detection is not available");
+ return;
+ }
+
+ /*
+ * control.sceneMode
+ */
+ int sceneMode = ParamsUtils.getOrDefault(captureRequest, CONTROL_SCENE_MODE,
+ CONTROL_SCENE_MODE_DISABLED);
+ if (sceneMode == CONTROL_SCENE_MODE_FACE_PRIORITY && !mFaceDetectSupported) {
+ Log.w(TAG, "processFaceDetectMode - ignoring control.sceneMode == FACE_PRIORITY; " +
+ "face detection is not available");
+ return;
+ }
+
+ // Print some warnings out in case the values were wrong
+ switch (fdMode) {
+ case STATISTICS_FACE_DETECT_MODE_OFF:
+ case STATISTICS_FACE_DETECT_MODE_SIMPLE:
+ break;
+ case STATISTICS_FACE_DETECT_MODE_FULL:
+ Log.w(TAG,
+ "processFaceDetectMode - statistics.faceDetectMode == FULL unsupported, " +
+ "downgrading to SIMPLE");
+ break;
+ default:
+ Log.w(TAG, "processFaceDetectMode - ignoring unknown statistics.faceDetectMode = "
+ + fdMode);
+ return;
+ }
+
+ boolean enableFaceDetect = (fdMode != STATISTICS_FACE_DETECT_MODE_OFF)
+ || (sceneMode == CONTROL_SCENE_MODE_FACE_PRIORITY);
+ synchronized (mLock) {
+ // Enable/disable face detection if it's changed since last time
+ if (enableFaceDetect != mFaceDetectEnabled) {
+ if (enableFaceDetect) {
+ mCamera.startFaceDetection();
+
+ if (DEBUG) {
+ Log.v(TAG, "processFaceDetectMode - start face detection");
+ }
+ } else {
+ mCamera.stopFaceDetection();
+
+ if (DEBUG) {
+ Log.v(TAG, "processFaceDetectMode - stop face detection");
+ }
+
+ mFaces = null;
+ }
+
+ mFaceDetectEnabled = enableFaceDetect;
+ mFaceDetectScenePriority = sceneMode == CONTROL_SCENE_MODE_FACE_PRIORITY;
+ mFaceDetectReporting = fdMode != STATISTICS_FACE_DETECT_MODE_OFF;
+ }
+ }
+ }
+
+ /**
+ * Update the {@code result} camera metadata map with the new value for the
+ * {@code statistics.faces} and {@code statistics.faceDetectMode}.
+ *
+ * <p>Face detect callbacks are processed in the background, and each call to
+ * {@link #mapResultFaces} will have the latest faces as reflected by the camera1 callbacks.</p>
+ *
+ * <p>If the scene mode was set to {@code FACE_PRIORITY} but face detection is disabled,
+ * the camera will still run face detection in the background, but no faces will be reported
+ * in the capture result.</p>
+ *
+ * @param result a non-{@code null} result
+ * @param legacyRequest a non-{@code null} request (read-only)
+ */
+ public void mapResultFaces(CameraMetadataNative result, LegacyRequest legacyRequest) {
+ checkNotNull(result, "result must not be null");
+ checkNotNull(legacyRequest, "legacyRequest must not be null");
+
+ Camera.Face[] faces, previousFaces;
+ int fdMode;
+ boolean fdScenePriority;
+ synchronized (mLock) {
+ fdMode = mFaceDetectReporting ?
+ STATISTICS_FACE_DETECT_MODE_SIMPLE : STATISTICS_FACE_DETECT_MODE_OFF;
+
+ if (mFaceDetectReporting) {
+ faces = mFaces;
+ } else {
+ faces = null;
+ }
+
+ fdScenePriority = mFaceDetectScenePriority;
+
+ previousFaces = mFacesPrev;
+ mFacesPrev = faces;
+ }
+
+ CameraCharacteristics characteristics = legacyRequest.characteristics;
+ CaptureRequest request = legacyRequest.captureRequest;
+ Size previewSize = legacyRequest.previewSize;
+ Camera.Parameters params = legacyRequest.parameters;
+
+ Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArray,
+ request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
+
+ List<Face> convertedFaces = new ArrayList<>();
+ if (faces != null) {
+ for (Camera.Face face : faces) {
+ if (face != null) {
+ convertedFaces.add(
+ ParameterUtils.convertFaceFromLegacy(face, activeArray, zoomData));
+ } else {
+ Log.w(TAG, "mapResultFaces - read NULL face from camera1 device");
+ }
+ }
+ }
+
+ if (DEBUG && previousFaces != faces) { // Log only in verbose and IF the faces changed
+ Log.v(TAG, "mapResultFaces - changed to " + ListUtils.listToString(convertedFaces));
+ }
+
+ result.set(CaptureResult.STATISTICS_FACES, convertedFaces.toArray(new Face[0]));
+ result.set(CaptureResult.STATISTICS_FACE_DETECT_MODE, fdMode);
+
+ // Override scene mode with FACE_PRIORITY if the request was using FACE_PRIORITY
+ if (fdScenePriority) {
+ result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_FACE_PRIORITY);
+ }
+ }
+}
diff --git a/android/hardware/camera2/legacy/LegacyFocusStateMapper.java b/android/hardware/camera2/legacy/LegacyFocusStateMapper.java
new file mode 100644
index 00000000..d33c09ea
--- /dev/null
+++ b/android/hardware/camera2/legacy/LegacyFocusStateMapper.java
@@ -0,0 +1,321 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.Camera;
+import android.hardware.Camera.Parameters;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.utils.ParamsUtils;
+import android.util.Log;
+
+import java.util.Objects;
+
+import static android.hardware.camera2.CaptureRequest.*;
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Map capture request data into legacy focus state transitions.
+ *
+ * <p>This object will asynchronously process auto-focus changes, so no interaction
+ * with it is necessary beyond reading the current state and updating with the latest trigger.</p>
+ */
+@SuppressWarnings("deprecation")
+public class LegacyFocusStateMapper {
+ private static String TAG = "LegacyFocusStateMapper";
+ private static final boolean DEBUG = false;
+
+ private final Camera mCamera;
+
+ private int mAfStatePrevious = CONTROL_AF_STATE_INACTIVE;
+ private String mAfModePrevious = null;
+
+ /** Guard mAfRun and mAfState */
+ private final Object mLock = new Object();
+ /** Guard access with mLock */
+ private int mAfRun = 0;
+ /** Guard access with mLock */
+ private int mAfState = CONTROL_AF_STATE_INACTIVE;
+
+ /**
+ * Instantiate a new focus state mapper.
+ *
+ * @param camera a non-{@code null} camera1 device
+ *
+ * @throws NullPointerException if any of the args were {@code null}
+ */
+ public LegacyFocusStateMapper(Camera camera) {
+ mCamera = checkNotNull(camera, "camera must not be null");
+ }
+
+ /**
+ * Process the AF triggers from the request as a camera1 autofocus routine.
+ *
+ * <p>This method should be called after the parameters are {@link LegacyRequestMapper mapped}
+ * with the request.</p>
+ *
+ * <p>Callbacks are processed in the background, and the next call to {@link #mapResultTriggers}
+ * will have the latest AF state as reflected by the camera1 callbacks.</p>
+ *
+ * <p>None of the arguments will be mutated.</p>
+ *
+ * @param captureRequest a non-{@code null} request
+ * @param parameters a non-{@code null} parameters corresponding to this request (read-only)
+ */
+ public void processRequestTriggers(CaptureRequest captureRequest,
+ Camera.Parameters parameters) {
+ checkNotNull(captureRequest, "captureRequest must not be null");
+
+ /*
+ * control.afTrigger
+ */
+ int afTrigger = ParamsUtils.getOrDefault(captureRequest, CONTROL_AF_TRIGGER,
+ CONTROL_AF_TRIGGER_IDLE);
+
+ final String afMode = parameters.getFocusMode();
+
+ if (!Objects.equals(mAfModePrevious, afMode)) {
+ if (DEBUG) {
+ Log.v(TAG, "processRequestTriggers - AF mode switched from " + mAfModePrevious +
+ " to " + afMode);
+ }
+
+ // Switching modes always goes back to INACTIVE; ignore callbacks from previous modes
+
+ synchronized (mLock) {
+ ++mAfRun;
+ mAfState = CONTROL_AF_STATE_INACTIVE;
+ }
+ mCamera.cancelAutoFocus();
+ }
+
+ mAfModePrevious = afMode;
+
+ // Passive AF Scanning
+ {
+ final int currentAfRun;
+
+ synchronized (mLock) {
+ currentAfRun = mAfRun;
+ }
+
+ Camera.AutoFocusMoveCallback afMoveCallback = new Camera.AutoFocusMoveCallback() {
+ @Override
+ public void onAutoFocusMoving(boolean start, Camera camera) {
+ synchronized (mLock) {
+ int latestAfRun = mAfRun;
+
+ if (DEBUG) {
+ Log.v(TAG,
+ "onAutoFocusMoving - start " + start + " latest AF run " +
+ latestAfRun + ", last AF run " + currentAfRun
+ );
+ }
+
+ if (currentAfRun != latestAfRun) {
+ Log.d(TAG,
+ "onAutoFocusMoving - ignoring move callbacks from old af run"
+ + currentAfRun
+ );
+ return;
+ }
+
+ int newAfState = start ?
+ CONTROL_AF_STATE_PASSIVE_SCAN :
+ CONTROL_AF_STATE_PASSIVE_FOCUSED;
+ // We never send CONTROL_AF_STATE_PASSIVE_UNFOCUSED
+
+ switch (afMode) {
+ case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
+ case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
+ break;
+ // This callback should never be sent in any other AF mode
+ default:
+ Log.w(TAG, "onAutoFocus - got unexpected onAutoFocus in mode "
+ + afMode);
+
+ }
+
+ mAfState = newAfState;
+ }
+ }
+ };
+
+ // Only set move callback if we can call autofocus.
+ switch (afMode) {
+ case Parameters.FOCUS_MODE_AUTO:
+ case Parameters.FOCUS_MODE_MACRO:
+ case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
+ case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
+ mCamera.setAutoFocusMoveCallback(afMoveCallback);
+ }
+ }
+
+
+ // AF Locking
+ switch (afTrigger) {
+ case CONTROL_AF_TRIGGER_START:
+
+ int afStateAfterStart;
+ switch (afMode) {
+ case Parameters.FOCUS_MODE_AUTO:
+ case Parameters.FOCUS_MODE_MACRO:
+ afStateAfterStart = CONTROL_AF_STATE_ACTIVE_SCAN;
+ break;
+ case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
+ case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
+ afStateAfterStart = CONTROL_AF_STATE_PASSIVE_SCAN;
+ break;
+ default:
+ // EDOF, INFINITY
+ afStateAfterStart = CONTROL_AF_STATE_INACTIVE;
+ }
+
+ final int currentAfRun;
+ synchronized (mLock) {
+ currentAfRun = ++mAfRun;
+ mAfState = afStateAfterStart;
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "processRequestTriggers - got AF_TRIGGER_START, " +
+ "new AF run is " + currentAfRun);
+ }
+
+ // Avoid calling autofocus unless we are in a state that supports calling this.
+ if (afStateAfterStart == CONTROL_AF_STATE_INACTIVE) {
+ break;
+ }
+
+ mCamera.autoFocus(new Camera.AutoFocusCallback() {
+ @Override
+ public void onAutoFocus(boolean success, Camera camera) {
+ synchronized (mLock) {
+ int latestAfRun = mAfRun;
+
+ if (DEBUG) {
+ Log.v(TAG, "onAutoFocus - success " + success + " latest AF run " +
+ latestAfRun + ", last AF run " + currentAfRun);
+ }
+
+ // Ignore old auto-focus results, since another trigger was requested
+ if (latestAfRun != currentAfRun) {
+ Log.d(TAG, String.format("onAutoFocus - ignoring AF callback " +
+ "(old run %d, new run %d)", currentAfRun, latestAfRun));
+
+ return;
+ }
+
+ int newAfState = success ?
+ CONTROL_AF_STATE_FOCUSED_LOCKED :
+ CONTROL_AF_STATE_NOT_FOCUSED_LOCKED;
+
+ switch (afMode) {
+ case Parameters.FOCUS_MODE_AUTO:
+ case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
+ case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
+ case Parameters.FOCUS_MODE_MACRO:
+ break;
+ // This callback should never be sent in any other AF mode
+ default:
+ Log.w(TAG, "onAutoFocus - got unexpected onAutoFocus in mode "
+ + afMode);
+
+ }
+
+ mAfState = newAfState;
+ }
+ }
+ });
+
+ break;
+ case CONTROL_AF_TRIGGER_CANCEL:
+ synchronized (mLock) {
+ int updatedAfRun;
+
+ synchronized (mLock) {
+ updatedAfRun = ++mAfRun;
+ mAfState = CONTROL_AF_STATE_INACTIVE;
+ }
+
+ mCamera.cancelAutoFocus();
+
+ if (DEBUG) {
+ Log.v(TAG, "processRequestTriggers - got AF_TRIGGER_CANCEL, " +
+ "new AF run is " + updatedAfRun);
+ }
+ }
+
+ break;
+ case CONTROL_AF_TRIGGER_IDLE:
+ // No action necessary. The callbacks will handle transitions.
+ break;
+ default:
+ Log.w(TAG, "processRequestTriggers - ignoring unknown control.afTrigger = "
+ + afTrigger);
+ }
+ }
+
+ /**
+ * Update the {@code result} camera metadata map with the new value for the
+ * {@code control.afState}.
+ *
+ * <p>AF callbacks are processed in the background, and each call to {@link #mapResultTriggers}
+ * will have the latest AF state as reflected by the camera1 callbacks.</p>
+ *
+ * @param result a non-{@code null} result
+ */
+ public void mapResultTriggers(CameraMetadataNative result) {
+ checkNotNull(result, "result must not be null");
+
+ int newAfState;
+ synchronized (mLock) {
+ newAfState = mAfState;
+ }
+
+ if (DEBUG && newAfState != mAfStatePrevious) {
+ Log.v(TAG, String.format("mapResultTriggers - afState changed from %s to %s",
+ afStateToString(mAfStatePrevious), afStateToString(newAfState)));
+ }
+
+ result.set(CaptureResult.CONTROL_AF_STATE, newAfState);
+
+ mAfStatePrevious = newAfState;
+ }
+
+ private static String afStateToString(int afState) {
+ switch (afState) {
+ case CONTROL_AF_STATE_ACTIVE_SCAN:
+ return "ACTIVE_SCAN";
+ case CONTROL_AF_STATE_FOCUSED_LOCKED:
+ return "FOCUSED_LOCKED";
+ case CONTROL_AF_STATE_INACTIVE:
+ return "INACTIVE";
+ case CONTROL_AF_STATE_NOT_FOCUSED_LOCKED:
+ return "NOT_FOCUSED_LOCKED";
+ case CONTROL_AF_STATE_PASSIVE_FOCUSED:
+ return "PASSIVE_FOCUSED";
+ case CONTROL_AF_STATE_PASSIVE_SCAN:
+ return "PASSIVE_SCAN";
+ case CONTROL_AF_STATE_PASSIVE_UNFOCUSED:
+ return "PASSIVE_UNFOCUSED";
+ default :
+ return "UNKNOWN(" + afState + ")";
+ }
+ }
+}
diff --git a/android/hardware/camera2/legacy/LegacyMetadataMapper.java b/android/hardware/camera2/legacy/LegacyMetadataMapper.java
new file mode 100644
index 00000000..5423ca97
--- /dev/null
+++ b/android/hardware/camera2/legacy/LegacyMetadataMapper.java
@@ -0,0 +1,1511 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.graphics.Rect;
+import android.hardware.Camera;
+import android.hardware.Camera.CameraInfo;
+import android.hardware.Camera.Parameters;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.params.StreamConfiguration;
+import android.hardware.camera2.params.StreamConfigurationDuration;
+import android.hardware.camera2.utils.ArrayUtils;
+import android.hardware.camera2.utils.ListUtils;
+import android.hardware.camera2.utils.ParamsUtils;
+import android.util.Log;
+import android.util.Range;
+import android.util.Size;
+import android.util.SizeF;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+
+import static com.android.internal.util.Preconditions.*;
+import static android.hardware.camera2.CameraCharacteristics.*;
+import static android.hardware.camera2.legacy.ParameterUtils.*;
+
+/**
+ * Provide legacy-specific implementations of camera2 metadata for legacy devices, such as the
+ * camera characteristics.
+ */
+@SuppressWarnings("deprecation")
+public class LegacyMetadataMapper {
+ private static final String TAG = "LegacyMetadataMapper";
+ private static final boolean DEBUG = false;
+
+ private static final long NS_PER_MS = 1000000;
+
+ // from graphics.h
+ public static final int HAL_PIXEL_FORMAT_RGBA_8888 = PixelFormat.RGBA_8888;
+ public static final int HAL_PIXEL_FORMAT_BGRA_8888 = 0x5;
+ public static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
+ public static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
+
+ // for metadata
+ private static final float LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS = 0.0f;
+
+ private static final int REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_RAW = 0; // no raw support
+ private static final int REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_PROC = 3; // preview, video, cb
+ private static final int REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_PROC_STALL = 1; // 1 jpeg only
+ private static final int REQUEST_MAX_NUM_INPUT_STREAMS_COUNT = 0; // no reprocessing
+
+ /** Assume 3 HAL1 stages: Exposure, Read-out, Post-Processing */
+ private static final int REQUEST_PIPELINE_MAX_DEPTH_HAL1 = 3;
+ /** Assume 3 shim stages: Preview input, Split output, Format conversion for output */
+ private static final int REQUEST_PIPELINE_MAX_DEPTH_OURS = 3;
+ /* TODO: Update above maxDepth values once we do more performance measurements */
+
+ // For approximating JPEG stall durations
+ private static final long APPROXIMATE_CAPTURE_DELAY_MS = 200; // 200 milliseconds
+ private static final long APPROXIMATE_SENSOR_AREA_PX = (1 << 23); // 8 megapixels
+ private static final long APPROXIMATE_JPEG_ENCODE_TIME_MS = 600; // 600 milliseconds
+
+ static final int UNKNOWN_MODE = -1;
+
+ // Maximum difference between a preview size aspect ratio and a jpeg size aspect ratio
+ private static final float PREVIEW_ASPECT_RATIO_TOLERANCE = 0.01f;
+
+ /*
+ * Development hijinks: Lie about not supporting certain capabilities
+ *
+ * - Unblock some CTS tests from running whose main intent is not the metadata itself
+ *
+ * TODO: Remove these constants and strip out any code that previously relied on them
+ * being set to true.
+ */
+ static final boolean LIE_ABOUT_AE_STATE = false;
+ static final boolean LIE_ABOUT_AE_MAX_REGIONS = false;
+ static final boolean LIE_ABOUT_AF = false;
+ static final boolean LIE_ABOUT_AF_MAX_REGIONS = false;
+ static final boolean LIE_ABOUT_AWB_STATE = false;
+ static final boolean LIE_ABOUT_AWB = false;
+
+
+ /**
+ * Create characteristics for a legacy device by mapping the {@code parameters}
+ * and {@code info}
+ *
+ * @param parameters A non-{@code null} parameters set
+ * @param info Camera info with camera facing direction and angle of orientation
+ *
+ * @return static camera characteristics for a camera device
+ *
+ * @throws NullPointerException if any of the args were {@code null}
+ */
+ public static CameraCharacteristics createCharacteristics(Camera.Parameters parameters,
+ CameraInfo info) {
+ checkNotNull(parameters, "parameters must not be null");
+ checkNotNull(info, "info must not be null");
+
+ String paramStr = parameters.flatten();
+ android.hardware.CameraInfo outerInfo = new android.hardware.CameraInfo();
+ outerInfo.info = info;
+
+ return createCharacteristics(paramStr, outerInfo);
+ }
+
+ /**
+ * Create characteristics for a legacy device by mapping the {@code parameters}
+ * and {@code info}
+ *
+ * @param parameters A string parseable by {@link Camera.Parameters#unflatten}
+ * @param info Camera info with camera facing direction and angle of orientation
+ * @return static camera characteristics for a camera device
+ *
+ * @throws NullPointerException if any of the args were {@code null}
+ */
+ public static CameraCharacteristics createCharacteristics(String parameters,
+ android.hardware.CameraInfo info) {
+ checkNotNull(parameters, "parameters must not be null");
+ checkNotNull(info, "info must not be null");
+ checkNotNull(info.info, "info.info must not be null");
+
+ CameraMetadataNative m = new CameraMetadataNative();
+
+ mapCharacteristicsFromInfo(m, info.info);
+
+ Camera.Parameters params = Camera.getEmptyParameters();
+ params.unflatten(parameters);
+ mapCharacteristicsFromParameters(m, params);
+
+ if (DEBUG) {
+ Log.v(TAG, "createCharacteristics metadata:");
+ Log.v(TAG, "--------------------------------------------------- (start)");
+ m.dumpToLog();
+ Log.v(TAG, "--------------------------------------------------- (end)");
+ }
+
+ return new CameraCharacteristics(m);
+ }
+
+ private static void mapCharacteristicsFromInfo(CameraMetadataNative m, CameraInfo i) {
+ m.set(LENS_FACING, i.facing == CameraInfo.CAMERA_FACING_BACK ?
+ LENS_FACING_BACK : LENS_FACING_FRONT);
+ m.set(SENSOR_ORIENTATION, i.orientation);
+ }
+
+ private static void mapCharacteristicsFromParameters(CameraMetadataNative m,
+ Camera.Parameters p) {
+
+ /*
+ * colorCorrection.*
+ */
+ m.set(COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
+ new int[] { COLOR_CORRECTION_ABERRATION_MODE_FAST,
+ COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY });
+ /*
+ * control.ae*
+ */
+ mapControlAe(m, p);
+ /*
+ * control.af*
+ */
+ mapControlAf(m, p);
+ /*
+ * control.awb*
+ */
+ mapControlAwb(m, p);
+ /*
+ * control.*
+ * - Anything that doesn't have a set of related fields
+ */
+ mapControlOther(m, p);
+ /*
+ * lens.*
+ */
+ mapLens(m, p);
+ /*
+ * flash.*
+ */
+ mapFlash(m, p);
+ /*
+ * jpeg.*
+ */
+ mapJpeg(m, p);
+
+ /*
+ * noiseReduction.*
+ */
+ m.set(NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
+ new int[] { NOISE_REDUCTION_MODE_FAST,
+ NOISE_REDUCTION_MODE_HIGH_QUALITY});
+
+ /*
+ * scaler.*
+ */
+ mapScaler(m, p);
+
+ /*
+ * sensor.*
+ */
+ mapSensor(m, p);
+
+ /*
+ * statistics.*
+ */
+ mapStatistics(m, p);
+
+ /*
+ * sync.*
+ */
+ mapSync(m, p);
+
+ /*
+ * info.supportedHardwareLevel
+ */
+ m.set(INFO_SUPPORTED_HARDWARE_LEVEL, INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY);
+
+ /*
+ * scaler.availableStream*, scaler.available*Durations, sensor.info.maxFrameDuration
+ */
+ mapScalerStreamConfigs(m, p);
+
+ // Order matters below: Put this last so that we can read the metadata set previously
+
+ /*
+ * request.*
+ */
+ mapRequest(m, p);
+
+ }
+
+ private static void mapScalerStreamConfigs(CameraMetadataNative m, Camera.Parameters p) {
+
+ ArrayList<StreamConfiguration> availableStreamConfigs = new ArrayList<>();
+ /*
+ * Implementation-defined (preview, recording, etc) -> use camera1 preview sizes
+ * YUV_420_888 cpu callbacks -> use camera1 preview sizes
+ * Other preview callbacks (CPU) -> use camera1 preview sizes
+ * JPEG still capture -> use camera1 still capture sizes
+ *
+ * Use platform-internal format constants here, since StreamConfigurationMap does the
+ * remapping to public format constants.
+ */
+ List<Camera.Size> previewSizes = p.getSupportedPreviewSizes();
+ List<Camera.Size> jpegSizes = p.getSupportedPictureSizes();
+ /*
+ * Work-around for b/17589233:
+ * - Some HALs's largest preview size aspect ratio does not match the largest JPEG size AR
+ * - This causes a large amount of problems with focus/metering because it's relative to
+ * preview, making the difference between the JPEG and preview viewport inaccessible
+ * - This boils down to metering or focusing areas being "arbitrarily" cropped
+ * in the capture result.
+ * - Work-around the HAL limitations by removing all of the largest preview sizes
+ * until we get one with the same aspect ratio as the jpeg size.
+ */
+ {
+ SizeAreaComparator areaComparator = new SizeAreaComparator();
+
+ // Sort preview to min->max
+ Collections.sort(previewSizes, areaComparator);
+
+ Camera.Size maxJpegSize = SizeAreaComparator.findLargestByArea(jpegSizes);
+ float jpegAspectRatio = maxJpegSize.width * 1.0f / maxJpegSize.height;
+
+ if (DEBUG) {
+ Log.v(TAG, String.format("mapScalerStreamConfigs - largest JPEG area %dx%d, AR=%f",
+ maxJpegSize.width, maxJpegSize.height, jpegAspectRatio));
+ }
+
+ // Now remove preview sizes from the end (largest->smallest) until aspect ratio matches
+ while (!previewSizes.isEmpty()) {
+ int index = previewSizes.size() - 1; // max is always at the end
+ Camera.Size size = previewSizes.get(index);
+
+ float previewAspectRatio = size.width * 1.0f / size.height;
+
+ if (Math.abs(jpegAspectRatio - previewAspectRatio) >=
+ PREVIEW_ASPECT_RATIO_TOLERANCE) {
+ previewSizes.remove(index); // Assume removing from end is O(1)
+
+ if (DEBUG) {
+ Log.v(TAG, String.format(
+ "mapScalerStreamConfigs - removed preview size %dx%d, AR=%f "
+ + "was not the same",
+ size.width, size.height, previewAspectRatio));
+ }
+ } else {
+ break;
+ }
+ }
+
+ if (previewSizes.isEmpty()) {
+ // Fall-back to the original faulty behavior, but at least work
+ Log.w(TAG, "mapScalerStreamConfigs - failed to find any preview size matching " +
+ "JPEG aspect ratio " + jpegAspectRatio);
+ previewSizes = p.getSupportedPreviewSizes();
+ }
+
+ // Sort again, this time in descending order max->min
+ Collections.sort(previewSizes, Collections.reverseOrder(areaComparator));
+ }
+
+ appendStreamConfig(availableStreamConfigs,
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, previewSizes);
+ appendStreamConfig(availableStreamConfigs,
+ ImageFormat.YUV_420_888, previewSizes);
+ for (int format : p.getSupportedPreviewFormats()) {
+ if (ImageFormat.isPublicFormat(format) && format != ImageFormat.NV21) {
+ appendStreamConfig(availableStreamConfigs, format, previewSizes);
+ } else if (DEBUG) {
+ /*
+ * Do not add any formats unknown to us
+ * (since it would fail runtime checks in StreamConfigurationMap)
+ */
+ Log.v(TAG,
+ String.format("mapStreamConfigs - Skipping format %x", format));
+ }
+ }
+
+ appendStreamConfig(availableStreamConfigs,
+ HAL_PIXEL_FORMAT_BLOB, p.getSupportedPictureSizes());
+ /*
+ * scaler.availableStreamConfigurations
+ */
+ m.set(SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
+ availableStreamConfigs.toArray(new StreamConfiguration[0]));
+
+ /*
+ * scaler.availableMinFrameDurations
+ */
+ // No frame durations available
+ m.set(SCALER_AVAILABLE_MIN_FRAME_DURATIONS, new StreamConfigurationDuration[0]);
+
+ StreamConfigurationDuration[] jpegStalls =
+ new StreamConfigurationDuration[jpegSizes.size()];
+ int i = 0;
+ long longestStallDuration = -1;
+ for (Camera.Size s : jpegSizes) {
+ long stallDuration = calculateJpegStallDuration(s);
+ jpegStalls[i++] = new StreamConfigurationDuration(HAL_PIXEL_FORMAT_BLOB, s.width,
+ s.height, stallDuration);
+ if (longestStallDuration < stallDuration) {
+ longestStallDuration = stallDuration;
+ }
+ }
+ /*
+ * scaler.availableStallDurations
+ */
+ // Set stall durations for jpeg, other formats use default stall duration
+ m.set(SCALER_AVAILABLE_STALL_DURATIONS, jpegStalls);
+
+ /*
+ * sensor.info.maxFrameDuration
+ */
+ m.set(SENSOR_INFO_MAX_FRAME_DURATION, longestStallDuration);
+ }
+
+ @SuppressWarnings({"unchecked"})
+ private static void mapControlAe(CameraMetadataNative m, Camera.Parameters p) {
+ /*
+ * control.aeAvailableAntiBandingModes
+ */
+ List<String> antiBandingModes = p.getSupportedAntibanding();
+ if (antiBandingModes != null && antiBandingModes.size() > 0) { // antibanding is optional
+ int[] modes = new int[antiBandingModes.size()];
+ int j = 0;
+ for (String mode : antiBandingModes) {
+ int convertedMode = convertAntiBandingMode(mode);
+ if (DEBUG && convertedMode == -1) {
+ Log.v(TAG, "Antibanding mode " + ((mode == null) ? "NULL" : mode) +
+ " not supported, skipping...");
+ } else {
+ modes[j++] = convertedMode;
+ }
+ }
+ m.set(CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, Arrays.copyOf(modes, j));
+ } else {
+ m.set(CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, new int[0]);
+ }
+
+ /*
+ * control.aeAvailableTargetFpsRanges
+ */
+ {
+ List<int[]> fpsRanges = p.getSupportedPreviewFpsRange();
+ if (fpsRanges == null) {
+ throw new AssertionError("Supported FPS ranges cannot be null.");
+ }
+ int rangesSize = fpsRanges.size();
+ if (rangesSize <= 0) {
+ throw new AssertionError("At least one FPS range must be supported.");
+ }
+ Range<Integer>[] ranges = new Range[rangesSize];
+ int i = 0;
+ for (int[] r : fpsRanges) {
+ ranges[i++] = Range.create(
+ (int) Math.floor(r[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] / 1000.0),
+ (int) Math.ceil(r[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] / 1000.0));
+ }
+ m.set(CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, ranges);
+ }
+
+ /*
+ * control.aeAvailableModes
+ */
+ {
+ List<String> flashModes = p.getSupportedFlashModes();
+
+ String[] flashModeStrings = new String[] {
+ Camera.Parameters.FLASH_MODE_OFF,
+ Camera.Parameters.FLASH_MODE_AUTO,
+ Camera.Parameters.FLASH_MODE_ON,
+ Camera.Parameters.FLASH_MODE_RED_EYE,
+ // Map these manually
+ Camera.Parameters.FLASH_MODE_TORCH,
+ };
+ int[] flashModeInts = new int[] {
+ CONTROL_AE_MODE_ON,
+ CONTROL_AE_MODE_ON_AUTO_FLASH,
+ CONTROL_AE_MODE_ON_ALWAYS_FLASH,
+ CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE
+ };
+ int[] aeAvail = ArrayUtils.convertStringListToIntArray(
+ flashModes, flashModeStrings, flashModeInts);
+
+ // No flash control -> AE is always on
+ if (aeAvail == null || aeAvail.length == 0) {
+ aeAvail = new int[] {
+ CONTROL_AE_MODE_ON
+ };
+ }
+
+ // Note that AE_MODE_OFF is never available.
+ m.set(CONTROL_AE_AVAILABLE_MODES, aeAvail);
+ }
+
+ /*
+ * control.aeCompensationRanges
+ */
+ {
+ int min = p.getMinExposureCompensation();
+ int max = p.getMaxExposureCompensation();
+
+ m.set(CONTROL_AE_COMPENSATION_RANGE, Range.create(min, max));
+ }
+
+ /*
+ * control.aeCompensationStep
+ */
+ {
+ float step = p.getExposureCompensationStep();
+
+ m.set(CONTROL_AE_COMPENSATION_STEP, ParamsUtils.createRational(step));
+ }
+
+ /*
+ * control.aeLockAvailable
+ */
+ {
+ boolean aeLockAvailable = p.isAutoExposureLockSupported();
+
+ m.set(CONTROL_AE_LOCK_AVAILABLE, aeLockAvailable);
+ }
+ }
+
+
+ @SuppressWarnings({"unchecked"})
+ private static void mapControlAf(CameraMetadataNative m, Camera.Parameters p) {
+ /*
+ * control.afAvailableModes
+ */
+ {
+ List<String> focusModes = p.getSupportedFocusModes();
+
+ String[] focusModeStrings = new String[] {
+ Camera.Parameters.FOCUS_MODE_AUTO,
+ Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE,
+ Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO,
+ Camera.Parameters.FOCUS_MODE_EDOF,
+ Camera.Parameters.FOCUS_MODE_INFINITY,
+ Camera.Parameters.FOCUS_MODE_MACRO,
+ Camera.Parameters.FOCUS_MODE_FIXED,
+ };
+
+ int[] focusModeInts = new int[] {
+ CONTROL_AF_MODE_AUTO,
+ CONTROL_AF_MODE_CONTINUOUS_PICTURE,
+ CONTROL_AF_MODE_CONTINUOUS_VIDEO,
+ CONTROL_AF_MODE_EDOF,
+ CONTROL_AF_MODE_OFF,
+ CONTROL_AF_MODE_MACRO,
+ CONTROL_AF_MODE_OFF
+ };
+
+ List<Integer> afAvail = ArrayUtils.convertStringListToIntList(
+ focusModes, focusModeStrings, focusModeInts);
+
+ // No AF modes supported? That's unpossible!
+ if (afAvail == null || afAvail.size() == 0) {
+ Log.w(TAG, "No AF modes supported (HAL bug); defaulting to AF_MODE_OFF only");
+ afAvail = new ArrayList<Integer>(/*capacity*/1);
+ afAvail.add(CONTROL_AF_MODE_OFF);
+ }
+
+ m.set(CONTROL_AF_AVAILABLE_MODES, ArrayUtils.toIntArray(afAvail));
+
+ if (DEBUG) {
+ Log.v(TAG, "mapControlAf - control.afAvailableModes set to " +
+ ListUtils.listToString(afAvail));
+ }
+ }
+ }
+
+ private static void mapControlAwb(CameraMetadataNative m, Camera.Parameters p) {
+ /*
+ * control.awbAvailableModes
+ */
+
+ {
+ List<String> wbModes = p.getSupportedWhiteBalance();
+
+ String[] wbModeStrings = new String[] {
+ Camera.Parameters.WHITE_BALANCE_AUTO ,
+ Camera.Parameters.WHITE_BALANCE_INCANDESCENT ,
+ Camera.Parameters.WHITE_BALANCE_FLUORESCENT ,
+ Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT ,
+ Camera.Parameters.WHITE_BALANCE_DAYLIGHT ,
+ Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT ,
+ Camera.Parameters.WHITE_BALANCE_TWILIGHT ,
+ Camera.Parameters.WHITE_BALANCE_SHADE ,
+ };
+
+ int[] wbModeInts = new int[] {
+ CONTROL_AWB_MODE_AUTO,
+ CONTROL_AWB_MODE_INCANDESCENT ,
+ CONTROL_AWB_MODE_FLUORESCENT ,
+ CONTROL_AWB_MODE_WARM_FLUORESCENT ,
+ CONTROL_AWB_MODE_DAYLIGHT ,
+ CONTROL_AWB_MODE_CLOUDY_DAYLIGHT ,
+ CONTROL_AWB_MODE_TWILIGHT ,
+ CONTROL_AWB_MODE_SHADE ,
+ // Note that CONTROL_AWB_MODE_OFF is unsupported
+ };
+
+ List<Integer> awbAvail = ArrayUtils.convertStringListToIntList(
+ wbModes, wbModeStrings, wbModeInts);
+
+ // No AWB modes supported? That's unpossible!
+ if (awbAvail == null || awbAvail.size() == 0) {
+ Log.w(TAG, "No AWB modes supported (HAL bug); defaulting to AWB_MODE_AUTO only");
+ awbAvail = new ArrayList<Integer>(/*capacity*/1);
+ awbAvail.add(CONTROL_AWB_MODE_AUTO);
+ }
+
+ m.set(CONTROL_AWB_AVAILABLE_MODES, ArrayUtils.toIntArray(awbAvail));
+
+ if (DEBUG) {
+ Log.v(TAG, "mapControlAwb - control.awbAvailableModes set to " +
+ ListUtils.listToString(awbAvail));
+ }
+
+
+ /*
+ * control.awbLockAvailable
+ */
+ {
+ boolean awbLockAvailable = p.isAutoWhiteBalanceLockSupported();
+
+ m.set(CONTROL_AWB_LOCK_AVAILABLE, awbLockAvailable);
+ }
+ }
+ }
+
+ private static void mapControlOther(CameraMetadataNative m, Camera.Parameters p) {
+ /*
+ * android.control.availableVideoStabilizationModes
+ */
+ {
+ int stabModes[] = p.isVideoStabilizationSupported() ?
+ new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+ CONTROL_VIDEO_STABILIZATION_MODE_ON } :
+ new int[] { CONTROL_VIDEO_STABILIZATION_MODE_OFF };
+
+ m.set(CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, stabModes);
+ }
+
+ /*
+ * android.control.maxRegions
+ */
+ final int AE = 0, AWB = 1, AF = 2;
+
+ int[] maxRegions = new int[3];
+ maxRegions[AE] = p.getMaxNumMeteringAreas();
+ maxRegions[AWB] = 0; // AWB regions not supported in API1
+ maxRegions[AF] = p.getMaxNumFocusAreas();
+
+ if (LIE_ABOUT_AE_MAX_REGIONS) {
+ maxRegions[AE] = 0;
+ }
+ if (LIE_ABOUT_AF_MAX_REGIONS) {
+ maxRegions[AF] = 0;
+ }
+
+ m.set(CONTROL_MAX_REGIONS, maxRegions);
+
+ /*
+ * android.control.availableEffects
+ */
+ List<String> effectModes = p.getSupportedColorEffects();
+ int[] supportedEffectModes = (effectModes == null) ? new int[0] :
+ ArrayUtils.convertStringListToIntArray(effectModes, sLegacyEffectMode,
+ sEffectModes);
+ m.set(CONTROL_AVAILABLE_EFFECTS, supportedEffectModes);
+
+ /*
+ * android.control.availableSceneModes
+ */
+ int maxNumDetectedFaces = p.getMaxNumDetectedFaces();
+ List<String> sceneModes = p.getSupportedSceneModes();
+ List<Integer> supportedSceneModes =
+ ArrayUtils.convertStringListToIntList(sceneModes, sLegacySceneModes, sSceneModes);
+
+ // Special case where the only scene mode listed is AUTO => no scene mode
+ if (sceneModes != null && sceneModes.size() == 1 &&
+ sceneModes.get(0).equals(Parameters.SCENE_MODE_AUTO)) {
+ supportedSceneModes = null;
+ }
+
+ boolean sceneModeSupported = true;
+ if (supportedSceneModes == null && maxNumDetectedFaces == 0) {
+ sceneModeSupported = false;
+ }
+
+ if (sceneModeSupported) {
+ if (supportedSceneModes == null) {
+ supportedSceneModes = new ArrayList<Integer>();
+ }
+ if (maxNumDetectedFaces > 0) { // always supports FACE_PRIORITY when face detecting
+ supportedSceneModes.add(CONTROL_SCENE_MODE_FACE_PRIORITY);
+ }
+ // Remove all DISABLED occurrences
+ if (supportedSceneModes.contains(CONTROL_SCENE_MODE_DISABLED)) {
+ while(supportedSceneModes.remove(new Integer(CONTROL_SCENE_MODE_DISABLED))) {}
+ }
+ m.set(CONTROL_AVAILABLE_SCENE_MODES, ArrayUtils.toIntArray(supportedSceneModes));
+ } else {
+ m.set(CONTROL_AVAILABLE_SCENE_MODES, new int[] {CONTROL_SCENE_MODE_DISABLED});
+ }
+
+ /*
+ * android.control.availableModes
+ */
+ m.set(CONTROL_AVAILABLE_MODES, sceneModeSupported ?
+ new int[] { CONTROL_MODE_AUTO, CONTROL_MODE_USE_SCENE_MODE } :
+ new int[] { CONTROL_MODE_AUTO });
+ }
+
+ private static void mapLens(CameraMetadataNative m, Camera.Parameters p) {
+ /*
+ * We can tell if the lens is fixed focus;
+ * but if it's not, we can't tell the minimum focus distance, so leave it null then.
+ */
+ if (DEBUG) {
+ Log.v(TAG, "mapLens - focus-mode='" + p.getFocusMode() + "'");
+ }
+
+ if (Camera.Parameters.FOCUS_MODE_FIXED.equals(p.getFocusMode())) {
+ /*
+ * lens.info.minimumFocusDistance
+ */
+ m.set(LENS_INFO_MINIMUM_FOCUS_DISTANCE, LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS);
+
+ if (DEBUG) {
+ Log.v(TAG, "mapLens - lens.info.minimumFocusDistance = 0");
+ }
+ } else {
+ if (DEBUG) {
+ Log.v(TAG, "mapLens - lens.info.minimumFocusDistance is unknown");
+ }
+ }
+
+ float[] focalLengths = new float[] { p.getFocalLength() };
+ m.set(LENS_INFO_AVAILABLE_FOCAL_LENGTHS, focalLengths);
+ }
+
+ private static void mapFlash(CameraMetadataNative m, Camera.Parameters p) {
+ boolean flashAvailable = false;
+ List<String> supportedFlashModes = p.getSupportedFlashModes();
+
+ if (supportedFlashModes != null) {
+ // If only 'OFF' is available, we don't really have flash support
+ flashAvailable = !ListUtils.listElementsEqualTo(
+ supportedFlashModes, Camera.Parameters.FLASH_MODE_OFF);
+ }
+
+ /*
+ * flash.info.available
+ */
+ m.set(FLASH_INFO_AVAILABLE, flashAvailable);
+ }
+
+ private static void mapJpeg(CameraMetadataNative m, Camera.Parameters p) {
+ List<Camera.Size> thumbnailSizes = p.getSupportedJpegThumbnailSizes();
+
+ if (thumbnailSizes != null) {
+ Size[] sizes = convertSizeListToArray(thumbnailSizes);
+ Arrays.sort(sizes, new android.hardware.camera2.utils.SizeAreaComparator());
+ m.set(JPEG_AVAILABLE_THUMBNAIL_SIZES, sizes);
+ }
+ }
+
+ private static void mapRequest(CameraMetadataNative m, Parameters p) {
+ /*
+ * request.availableCapabilities
+ */
+ int[] capabilities = { REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE };
+ m.set(REQUEST_AVAILABLE_CAPABILITIES, capabilities);
+
+ /*
+ * request.availableCharacteristicsKeys
+ */
+ {
+ // TODO: check if the underlying key is supported before listing a key as available
+
+ // Note: We only list public keys. Native HALs should list ALL keys regardless of visibility.
+
+ Key<?> availableKeys[] = new Key<?>[] {
+ CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES ,
+ CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES ,
+ CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES ,
+ CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES ,
+ CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE ,
+ CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP ,
+ CameraCharacteristics.CONTROL_AE_LOCK_AVAILABLE ,
+ CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES ,
+ CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS ,
+ CameraCharacteristics.CONTROL_AVAILABLE_MODES ,
+ CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES ,
+ CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES ,
+ CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES ,
+ CameraCharacteristics.CONTROL_AWB_LOCK_AVAILABLE ,
+ CameraCharacteristics.CONTROL_MAX_REGIONS ,
+ CameraCharacteristics.FLASH_INFO_AVAILABLE ,
+ CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL ,
+ CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES ,
+ CameraCharacteristics.LENS_FACING ,
+ CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS ,
+ CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES ,
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES ,
+ CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_STREAMS ,
+ CameraCharacteristics.REQUEST_PARTIAL_RESULT_COUNT ,
+ CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH ,
+ CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM ,
+// CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP ,
+ CameraCharacteristics.SCALER_CROPPING_TYPE ,
+ CameraCharacteristics.SENSOR_AVAILABLE_TEST_PATTERN_MODES ,
+ CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE ,
+ CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE ,
+ CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE ,
+ CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE ,
+ CameraCharacteristics.SENSOR_ORIENTATION ,
+ CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES ,
+ CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT ,
+ CameraCharacteristics.SYNC_MAX_LATENCY ,
+ };
+ List<Key<?>> characteristicsKeys = new ArrayList<>(Arrays.asList(availableKeys));
+
+ /*
+ * Add the conditional keys
+ */
+ if (m.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE) != null) {
+ characteristicsKeys.add(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+ }
+
+ m.set(REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
+ getTagsForKeys(characteristicsKeys.toArray(new Key<?>[0])));
+ }
+
+ /*
+ * request.availableRequestKeys
+ */
+ {
+ CaptureRequest.Key<?> defaultAvailableKeys[] = new CaptureRequest.Key<?>[] {
+ CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
+ CaptureRequest.CONTROL_AE_ANTIBANDING_MODE,
+ CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION,
+ CaptureRequest.CONTROL_AE_LOCK,
+ CaptureRequest.CONTROL_AE_MODE,
+ CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE,
+ CaptureRequest.CONTROL_AF_MODE,
+ CaptureRequest.CONTROL_AF_TRIGGER,
+ CaptureRequest.CONTROL_AWB_LOCK,
+ CaptureRequest.CONTROL_AWB_MODE,
+ CaptureRequest.CONTROL_CAPTURE_INTENT,
+ CaptureRequest.CONTROL_EFFECT_MODE,
+ CaptureRequest.CONTROL_MODE,
+ CaptureRequest.CONTROL_SCENE_MODE,
+ CaptureRequest.CONTROL_VIDEO_STABILIZATION_MODE,
+ CaptureRequest.FLASH_MODE,
+ CaptureRequest.JPEG_GPS_COORDINATES,
+ CaptureRequest.JPEG_GPS_PROCESSING_METHOD,
+ CaptureRequest.JPEG_GPS_TIMESTAMP,
+ CaptureRequest.JPEG_ORIENTATION,
+ CaptureRequest.JPEG_QUALITY,
+ CaptureRequest.JPEG_THUMBNAIL_QUALITY,
+ CaptureRequest.JPEG_THUMBNAIL_SIZE,
+ CaptureRequest.LENS_FOCAL_LENGTH,
+ CaptureRequest.NOISE_REDUCTION_MODE,
+ CaptureRequest.SCALER_CROP_REGION,
+ CaptureRequest.STATISTICS_FACE_DETECT_MODE,
+ };
+ ArrayList<CaptureRequest.Key<?>> availableKeys =
+ new ArrayList<CaptureRequest.Key<?>>(Arrays.asList(defaultAvailableKeys));
+
+ if (p.getMaxNumMeteringAreas() > 0) {
+ availableKeys.add(CaptureRequest.CONTROL_AE_REGIONS);
+ }
+ if (p.getMaxNumFocusAreas() > 0) {
+ availableKeys.add(CaptureRequest.CONTROL_AF_REGIONS);
+ }
+
+ CaptureRequest.Key<?> availableRequestKeys[] =
+ new CaptureRequest.Key<?>[availableKeys.size()];
+ availableKeys.toArray(availableRequestKeys);
+ m.set(REQUEST_AVAILABLE_REQUEST_KEYS, getTagsForKeys(availableRequestKeys));
+ }
+
+ /*
+ * request.availableResultKeys
+ */
+ {
+ CaptureResult.Key<?> defaultAvailableKeys[] = new CaptureResult.Key<?>[] {
+ CaptureResult.COLOR_CORRECTION_ABERRATION_MODE ,
+ CaptureResult.CONTROL_AE_ANTIBANDING_MODE ,
+ CaptureResult.CONTROL_AE_EXPOSURE_COMPENSATION ,
+ CaptureResult.CONTROL_AE_LOCK ,
+ CaptureResult.CONTROL_AE_MODE ,
+ CaptureResult.CONTROL_AF_MODE ,
+ CaptureResult.CONTROL_AF_STATE ,
+ CaptureResult.CONTROL_AWB_MODE ,
+ CaptureResult.CONTROL_AWB_LOCK ,
+ CaptureResult.CONTROL_MODE ,
+ CaptureResult.FLASH_MODE ,
+ CaptureResult.JPEG_GPS_COORDINATES ,
+ CaptureResult.JPEG_GPS_PROCESSING_METHOD ,
+ CaptureResult.JPEG_GPS_TIMESTAMP ,
+ CaptureResult.JPEG_ORIENTATION ,
+ CaptureResult.JPEG_QUALITY ,
+ CaptureResult.JPEG_THUMBNAIL_QUALITY ,
+ CaptureResult.LENS_FOCAL_LENGTH ,
+ CaptureResult.NOISE_REDUCTION_MODE ,
+ CaptureResult.REQUEST_PIPELINE_DEPTH ,
+ CaptureResult.SCALER_CROP_REGION ,
+ CaptureResult.SENSOR_TIMESTAMP ,
+ CaptureResult.STATISTICS_FACE_DETECT_MODE ,
+// CaptureResult.STATISTICS_FACES ,
+ };
+ List<CaptureResult.Key<?>> availableKeys =
+ new ArrayList<CaptureResult.Key<?>>(Arrays.asList(defaultAvailableKeys));
+
+ if (p.getMaxNumMeteringAreas() > 0) {
+ availableKeys.add(CaptureResult.CONTROL_AE_REGIONS);
+ }
+ if (p.getMaxNumFocusAreas() > 0) {
+ availableKeys.add(CaptureResult.CONTROL_AF_REGIONS);
+ }
+
+ CaptureResult.Key<?> availableResultKeys[] =
+ new CaptureResult.Key<?>[availableKeys.size()];
+ availableKeys.toArray(availableResultKeys);
+ m.set(REQUEST_AVAILABLE_RESULT_KEYS, getTagsForKeys(availableResultKeys));
+ }
+
+ /*
+ * request.maxNumOutputStreams
+ */
+ int[] outputStreams = {
+ /* RAW */
+ REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_RAW,
+ /* Processed & Not-Stalling */
+ REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_PROC,
+ /* Processed & Stalling */
+ REQUEST_MAX_NUM_OUTPUT_STREAMS_COUNT_PROC_STALL,
+ };
+ m.set(REQUEST_MAX_NUM_OUTPUT_STREAMS, outputStreams);
+
+ /*
+ * request.maxNumInputStreams
+ */
+ m.set(REQUEST_MAX_NUM_INPUT_STREAMS, REQUEST_MAX_NUM_INPUT_STREAMS_COUNT);
+
+ /*
+ * request.partialResultCount
+ */
+ m.set(REQUEST_PARTIAL_RESULT_COUNT, 1); // No partial results supported
+
+ /*
+ * request.pipelineMaxDepth
+ */
+ m.set(REQUEST_PIPELINE_MAX_DEPTH,
+ (byte)(REQUEST_PIPELINE_MAX_DEPTH_HAL1 + REQUEST_PIPELINE_MAX_DEPTH_OURS));
+ }
+
+ private static void mapScaler(CameraMetadataNative m, Parameters p) {
+ /*
+ * scaler.availableMaxDigitalZoom
+ */
+ m.set(SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, ParameterUtils.getMaxZoomRatio(p));
+
+ /*
+ * scaler.croppingType = CENTER_ONLY
+ */
+ m.set(SCALER_CROPPING_TYPE, SCALER_CROPPING_TYPE_CENTER_ONLY);
+ }
+
+ private static void mapSensor(CameraMetadataNative m, Parameters p) {
+ // Use the largest jpeg size (by area) for both active array and pixel array
+ Size largestJpegSize = getLargestSupportedJpegSizeByArea(p);
+ /*
+ * sensor.info.activeArraySize
+ */
+ {
+ Rect activeArrayRect = ParamsUtils.createRect(largestJpegSize);
+ m.set(SENSOR_INFO_ACTIVE_ARRAY_SIZE, activeArrayRect);
+ }
+
+ /*
+ * sensor.availableTestPatternModes
+ */
+ {
+ // Only "OFF" test pattern mode is available
+ m.set(SENSOR_AVAILABLE_TEST_PATTERN_MODES, new int[] { SENSOR_TEST_PATTERN_MODE_OFF });
+ }
+
+ /*
+ * sensor.info.pixelArraySize
+ */
+ m.set(SENSOR_INFO_PIXEL_ARRAY_SIZE, largestJpegSize);
+
+ /*
+ * sensor.info.physicalSize
+ */
+ {
+ /*
+ * Assume focal length is at infinity focus and that the lens is rectilinear.
+ */
+ float focalLength = p.getFocalLength(); // in mm
+ double angleHor = p.getHorizontalViewAngle() * Math.PI / 180; // to radians
+ double angleVer = p.getVerticalViewAngle() * Math.PI / 180; // to radians
+
+ float height = (float)Math.abs(2 * focalLength * Math.tan(angleVer / 2));
+ float width = (float)Math.abs(2 * focalLength * Math.tan(angleHor / 2));
+
+ m.set(SENSOR_INFO_PHYSICAL_SIZE, new SizeF(width, height)); // in mm
+ }
+
+ /*
+ * sensor.info.timestampSource
+ */
+ {
+ m.set(SENSOR_INFO_TIMESTAMP_SOURCE, SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
+ }
+ }
+
+ private static void mapStatistics(CameraMetadataNative m, Parameters p) {
+ /*
+ * statistics.info.availableFaceDetectModes
+ */
+ int[] fdModes;
+
+ if (p.getMaxNumDetectedFaces() > 0) {
+ fdModes = new int[] {
+ STATISTICS_FACE_DETECT_MODE_OFF,
+ STATISTICS_FACE_DETECT_MODE_SIMPLE
+ // FULL is never-listed, since we have no way to query it statically
+ };
+ } else {
+ fdModes = new int[] {
+ STATISTICS_FACE_DETECT_MODE_OFF
+ };
+ }
+ m.set(STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, fdModes);
+
+ /*
+ * statistics.info.maxFaceCount
+ */
+ m.set(STATISTICS_INFO_MAX_FACE_COUNT, p.getMaxNumDetectedFaces());
+ }
+
+ private static void mapSync(CameraMetadataNative m, Parameters p) {
+ /*
+ * sync.maxLatency
+ */
+ m.set(SYNC_MAX_LATENCY, SYNC_MAX_LATENCY_UNKNOWN);
+ }
+
+ private static void appendStreamConfig(
+ ArrayList<StreamConfiguration> configs, int format, List<Camera.Size> sizes) {
+ for (Camera.Size size : sizes) {
+ StreamConfiguration config =
+ new StreamConfiguration(format, size.width, size.height, /*input*/false);
+ configs.add(config);
+ }
+ }
+
+ private final static String[] sLegacySceneModes = {
+ Parameters.SCENE_MODE_AUTO,
+ Parameters.SCENE_MODE_ACTION,
+ Parameters.SCENE_MODE_PORTRAIT,
+ Parameters.SCENE_MODE_LANDSCAPE,
+ Parameters.SCENE_MODE_NIGHT,
+ Parameters.SCENE_MODE_NIGHT_PORTRAIT,
+ Parameters.SCENE_MODE_THEATRE,
+ Parameters.SCENE_MODE_BEACH,
+ Parameters.SCENE_MODE_SNOW,
+ Parameters.SCENE_MODE_SUNSET,
+ Parameters.SCENE_MODE_STEADYPHOTO,
+ Parameters.SCENE_MODE_FIREWORKS,
+ Parameters.SCENE_MODE_SPORTS,
+ Parameters.SCENE_MODE_PARTY,
+ Parameters.SCENE_MODE_CANDLELIGHT,
+ Parameters.SCENE_MODE_BARCODE,
+ Parameters.SCENE_MODE_HDR,
+ };
+
+ private final static int[] sSceneModes = {
+ CameraCharacteristics.CONTROL_SCENE_MODE_DISABLED,
+ CameraCharacteristics.CONTROL_SCENE_MODE_ACTION,
+ CameraCharacteristics.CONTROL_SCENE_MODE_PORTRAIT,
+ CameraCharacteristics.CONTROL_SCENE_MODE_LANDSCAPE,
+ CameraCharacteristics.CONTROL_SCENE_MODE_NIGHT,
+ CameraCharacteristics.CONTROL_SCENE_MODE_NIGHT_PORTRAIT,
+ CameraCharacteristics.CONTROL_SCENE_MODE_THEATRE,
+ CameraCharacteristics.CONTROL_SCENE_MODE_BEACH,
+ CameraCharacteristics.CONTROL_SCENE_MODE_SNOW,
+ CameraCharacteristics.CONTROL_SCENE_MODE_SUNSET,
+ CameraCharacteristics.CONTROL_SCENE_MODE_STEADYPHOTO,
+ CameraCharacteristics.CONTROL_SCENE_MODE_FIREWORKS,
+ CameraCharacteristics.CONTROL_SCENE_MODE_SPORTS,
+ CameraCharacteristics.CONTROL_SCENE_MODE_PARTY,
+ CameraCharacteristics.CONTROL_SCENE_MODE_CANDLELIGHT,
+ CameraCharacteristics.CONTROL_SCENE_MODE_BARCODE,
+ CameraCharacteristics.CONTROL_SCENE_MODE_HDR,
+ };
+
+ static int convertSceneModeFromLegacy(String mode) {
+ if (mode == null) {
+ return CameraCharacteristics.CONTROL_SCENE_MODE_DISABLED;
+ }
+ int index = ArrayUtils.getArrayIndex(sLegacySceneModes, mode);
+ if (index < 0) {
+ return UNKNOWN_MODE;
+ }
+ return sSceneModes[index];
+ }
+
+ static String convertSceneModeToLegacy(int mode) {
+ if (mode == CONTROL_SCENE_MODE_FACE_PRIORITY) {
+ // OK: Let LegacyFaceDetectMapper handle turning face detection on/off
+ return Parameters.SCENE_MODE_AUTO;
+ }
+
+ int index = ArrayUtils.getArrayIndex(sSceneModes, mode);
+ if (index < 0) {
+ return null;
+ }
+ return sLegacySceneModes[index];
+ }
+
+ private final static String[] sLegacyEffectMode = {
+ Parameters.EFFECT_NONE,
+ Parameters.EFFECT_MONO,
+ Parameters.EFFECT_NEGATIVE,
+ Parameters.EFFECT_SOLARIZE,
+ Parameters.EFFECT_SEPIA,
+ Parameters.EFFECT_POSTERIZE,
+ Parameters.EFFECT_WHITEBOARD,
+ Parameters.EFFECT_BLACKBOARD,
+ Parameters.EFFECT_AQUA,
+ };
+
+ private final static int[] sEffectModes = {
+ CameraCharacteristics.CONTROL_EFFECT_MODE_OFF,
+ CameraCharacteristics.CONTROL_EFFECT_MODE_MONO,
+ CameraCharacteristics.CONTROL_EFFECT_MODE_NEGATIVE,
+ CameraCharacteristics.CONTROL_EFFECT_MODE_SOLARIZE,
+ CameraCharacteristics.CONTROL_EFFECT_MODE_SEPIA,
+ CameraCharacteristics.CONTROL_EFFECT_MODE_POSTERIZE,
+ CameraCharacteristics.CONTROL_EFFECT_MODE_WHITEBOARD,
+ CameraCharacteristics.CONTROL_EFFECT_MODE_BLACKBOARD,
+ CameraCharacteristics.CONTROL_EFFECT_MODE_AQUA,
+ };
+
+ static int convertEffectModeFromLegacy(String mode) {
+ if (mode == null) {
+ return CameraCharacteristics.CONTROL_EFFECT_MODE_OFF;
+ }
+ int index = ArrayUtils.getArrayIndex(sLegacyEffectMode, mode);
+ if (index < 0) {
+ return UNKNOWN_MODE;
+ }
+ return sEffectModes[index];
+ }
+
+ static String convertEffectModeToLegacy(int mode) {
+ int index = ArrayUtils.getArrayIndex(sEffectModes, mode);
+ if (index < 0) {
+ return null;
+ }
+ return sLegacyEffectMode[index];
+ }
+
+ /**
+ * Convert the ae antibanding mode from api1 into api2.
+ *
+ * @param mode the api1 mode, {@code null} is allowed and will return {@code -1}.
+ *
+ * @return The api2 value, or {@code -1} by default if conversion failed
+ */
+ private static int convertAntiBandingMode(String mode) {
+ if (mode == null) {
+ return -1;
+ }
+
+ switch (mode) {
+ case Camera.Parameters.ANTIBANDING_OFF: {
+ return CONTROL_AE_ANTIBANDING_MODE_OFF;
+ }
+ case Camera.Parameters.ANTIBANDING_50HZ: {
+ return CONTROL_AE_ANTIBANDING_MODE_50HZ;
+ }
+ case Camera.Parameters.ANTIBANDING_60HZ: {
+ return CONTROL_AE_ANTIBANDING_MODE_60HZ;
+ }
+ case Camera.Parameters.ANTIBANDING_AUTO: {
+ return CONTROL_AE_ANTIBANDING_MODE_AUTO;
+ }
+ default: {
+ Log.w(TAG, "convertAntiBandingMode - Unknown antibanding mode " + mode);
+ return -1;
+ }
+ }
+ }
+
+ /**
+ * Convert the ae antibanding mode from api1 into api2.
+ *
+ * @param mode the api1 mode, {@code null} is allowed and will return {@code MODE_OFF}.
+ *
+ * @return The api2 value, or {@code MODE_OFF} by default if conversion failed
+ */
+ static int convertAntiBandingModeOrDefault(String mode) {
+ int antiBandingMode = convertAntiBandingMode(mode);
+ if (antiBandingMode == -1) {
+ return CONTROL_AE_ANTIBANDING_MODE_OFF;
+ }
+
+ return antiBandingMode;
+ }
+
+ private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) {
+ int[] legacyFps = new int[2];
+ legacyFps[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower();
+ legacyFps[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper();
+ return legacyFps;
+ }
+
+ /**
+ * Return the stall duration for a given output jpeg size in nanoseconds.
+ *
+ * <p>An 8mp image is chosen to have a stall duration of 0.8 seconds.</p>
+ */
+ private static long calculateJpegStallDuration(Camera.Size size) {
+ long baseDuration = APPROXIMATE_CAPTURE_DELAY_MS * NS_PER_MS; // 200ms for capture
+ long area = size.width * (long) size.height;
+ long stallPerArea = APPROXIMATE_JPEG_ENCODE_TIME_MS * NS_PER_MS /
+ APPROXIMATE_SENSOR_AREA_PX; // 600ms stall for 8mp
+ return baseDuration + area * stallPerArea;
+ }
+
+ /**
+ * Set the legacy parameters using the {@link LegacyRequest legacy request}.
+ *
+ * <p>The legacy request's parameters are changed as a side effect of calling this
+ * method.</p>
+ *
+ * @param request a non-{@code null} legacy request
+ */
+ public static void convertRequestMetadata(LegacyRequest request) {
+ LegacyRequestMapper.convertRequestMetadata(request);
+ }
+
+ private static final int[] sAllowedTemplates = {
+ CameraDevice.TEMPLATE_PREVIEW,
+ CameraDevice.TEMPLATE_STILL_CAPTURE,
+ CameraDevice.TEMPLATE_RECORD,
+ // Disallowed templates in legacy mode:
+ // CameraDevice.TEMPLATE_VIDEO_SNAPSHOT,
+ // CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG,
+ // CameraDevice.TEMPLATE_MANUAL
+ };
+
+ /**
+ * Create a request template
+ *
+ * @param c a non-{@code null} camera characteristics for this camera
+ * @param templateId a non-negative template ID
+ *
+ * @return a non-{@code null} request template
+ *
+ * @throws IllegalArgumentException if {@code templateId} was invalid
+ *
+ * @see android.hardware.camera2.CameraDevice#TEMPLATE_MANUAL
+ */
+ public static CameraMetadataNative createRequestTemplate(
+ CameraCharacteristics c, int templateId) {
+ if (!ArrayUtils.contains(sAllowedTemplates, templateId)) {
+ throw new IllegalArgumentException("templateId out of range");
+ }
+
+ CameraMetadataNative m = new CameraMetadataNative();
+
+ /*
+ * NOTE: If adding new code here and it needs to query the static info,
+ * query the camera characteristics, so we can reuse this for api2 code later
+ * to create our own templates in the framework
+ */
+
+ /*
+ * control.*
+ */
+
+ // control.awbMode
+ m.set(CaptureRequest.CONTROL_AWB_MODE, CameraMetadata.CONTROL_AWB_MODE_AUTO);
+ // AWB is always unconditionally available in API1 devices
+
+ // control.aeAntibandingMode
+ m.set(CaptureRequest.CONTROL_AE_ANTIBANDING_MODE, CONTROL_AE_ANTIBANDING_MODE_AUTO);
+
+ // control.aeExposureCompensation
+ m.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
+
+ // control.aeLock
+ m.set(CaptureRequest.CONTROL_AE_LOCK, false);
+
+ // control.aePrecaptureTrigger
+ m.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER, CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
+
+ // control.afTrigger
+ m.set(CaptureRequest.CONTROL_AF_TRIGGER, CONTROL_AF_TRIGGER_IDLE);
+
+ // control.awbMode
+ m.set(CaptureRequest.CONTROL_AWB_MODE, CONTROL_AWB_MODE_AUTO);
+
+ // control.awbLock
+ m.set(CaptureRequest.CONTROL_AWB_LOCK, false);
+
+ // control.aeRegions, control.awbRegions, control.afRegions
+ {
+ Rect activeArray = c.get(SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ MeteringRectangle[] activeRegions = new MeteringRectangle[] {
+ new MeteringRectangle(/*x*/0, /*y*/0, /*width*/activeArray.width() - 1,
+ /*height*/activeArray.height() - 1,/*weight*/0)};
+ m.set(CaptureRequest.CONTROL_AE_REGIONS, activeRegions);
+ m.set(CaptureRequest.CONTROL_AWB_REGIONS, activeRegions);
+ m.set(CaptureRequest.CONTROL_AF_REGIONS, activeRegions);
+ }
+
+ // control.captureIntent
+ {
+ int captureIntent;
+ switch (templateId) {
+ case CameraDevice.TEMPLATE_PREVIEW:
+ captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
+ break;
+ case CameraDevice.TEMPLATE_STILL_CAPTURE:
+ captureIntent = CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
+ break;
+ case CameraDevice.TEMPLATE_RECORD:
+ captureIntent = CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
+ break;
+ default:
+ // Can't get anything else since it's guarded by the IAE check
+ throw new AssertionError("Impossible; keep in sync with sAllowedTemplates");
+ }
+ m.set(CaptureRequest.CONTROL_CAPTURE_INTENT, captureIntent);
+ }
+
+ // control.aeMode
+ m.set(CaptureRequest.CONTROL_AE_MODE, CameraMetadata.CONTROL_AE_MODE_ON);
+ // AE is always unconditionally available in API1 devices
+
+ // control.mode
+ m.set(CaptureRequest.CONTROL_MODE, CONTROL_MODE_AUTO);
+
+ // control.afMode
+ {
+ Float minimumFocusDistance = c.get(LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+
+ int afMode;
+ if (minimumFocusDistance != null &&
+ minimumFocusDistance == LENS_INFO_MINIMUM_FOCUS_DISTANCE_FIXED_FOCUS) {
+ // Cannot control auto-focus with fixed-focus cameras
+ afMode = CameraMetadata.CONTROL_AF_MODE_OFF;
+ } else {
+ // If a minimum focus distance is reported; the camera must have AF
+ afMode = CameraMetadata.CONTROL_AF_MODE_AUTO;
+
+ if (templateId == CameraDevice.TEMPLATE_RECORD ||
+ templateId == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
+ if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES),
+ CONTROL_AF_MODE_CONTINUOUS_VIDEO)) {
+ afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+ }
+ } else if (templateId == CameraDevice.TEMPLATE_PREVIEW ||
+ templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
+ if (ArrayUtils.contains(c.get(CONTROL_AF_AVAILABLE_MODES),
+ CONTROL_AF_MODE_CONTINUOUS_PICTURE)) {
+ afMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+ }
+ }
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "createRequestTemplate (templateId=" + templateId + ")," +
+ " afMode=" + afMode + ", minimumFocusDistance=" + minimumFocusDistance);
+ }
+
+ m.set(CaptureRequest.CONTROL_AF_MODE, afMode);
+ }
+
+ {
+ // control.aeTargetFpsRange
+ Range<Integer>[] availableFpsRange = c.
+ get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
+
+ // Pick FPS range with highest max value, tiebreak on higher min value
+ Range<Integer> bestRange = availableFpsRange[0];
+ for (Range<Integer> r : availableFpsRange) {
+ if (bestRange.getUpper() < r.getUpper()) {
+ bestRange = r;
+ } else if (bestRange.getUpper() == r.getUpper() &&
+ bestRange.getLower() < r.getLower()) {
+ bestRange = r;
+ }
+ }
+ m.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, bestRange);
+ }
+
+ // control.sceneMode -- DISABLED is always available
+ m.set(CaptureRequest.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
+
+ /*
+ * statistics.*
+ */
+
+ // statistics.faceDetectMode
+ m.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, STATISTICS_FACE_DETECT_MODE_OFF);
+
+ /*
+ * flash.*
+ */
+
+ // flash.mode
+ m.set(CaptureRequest.FLASH_MODE, FLASH_MODE_OFF);
+
+ /*
+ * noiseReduction.*
+ */
+ if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
+ m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_HIGH_QUALITY);
+ } else {
+ m.set(CaptureRequest.NOISE_REDUCTION_MODE, NOISE_REDUCTION_MODE_FAST);
+ }
+
+ /*
+ * colorCorrection.*
+ */
+ if (templateId == CameraDevice.TEMPLATE_STILL_CAPTURE) {
+ m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
+ COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY);
+ } else {
+ m.set(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE,
+ COLOR_CORRECTION_ABERRATION_MODE_FAST);
+ }
+
+ /*
+ * lens.*
+ */
+
+ // lens.focalLength
+ m.set(CaptureRequest.LENS_FOCAL_LENGTH,
+ c.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)[0]);
+
+ /*
+ * jpeg.*
+ */
+
+ // jpeg.thumbnailSize - set smallest non-zero size if possible
+ Size[] sizes = c.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES);
+ m.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, (sizes.length > 1) ? sizes[1] : sizes[0]);
+
+ // TODO: map other request template values
+ return m;
+ }
+
+ private static int[] getTagsForKeys(Key<?>[] keys) {
+ int[] tags = new int[keys.length];
+
+ for (int i = 0; i < keys.length; ++i) {
+ tags[i] = keys[i].getNativeKey().getTag();
+ }
+
+ return tags;
+ }
+
+ private static int[] getTagsForKeys(CaptureRequest.Key<?>[] keys) {
+ int[] tags = new int[keys.length];
+
+ for (int i = 0; i < keys.length; ++i) {
+ tags[i] = keys[i].getNativeKey().getTag();
+ }
+
+ return tags;
+ }
+
+ private static int[] getTagsForKeys(CaptureResult.Key<?>[] keys) {
+ int[] tags = new int[keys.length];
+
+ for (int i = 0; i < keys.length; ++i) {
+ tags[i] = keys[i].getNativeKey().getTag();
+ }
+
+ return tags;
+ }
+
+ /**
+ * Convert the requested AF mode into its equivalent supported parameter.
+ *
+ * @param mode {@code CONTROL_AF_MODE}
+ * @param supportedFocusModes list of camera1's supported focus modes
+ * @return the stringified af mode, or {@code null} if its not supported
+ */
+ static String convertAfModeToLegacy(int mode, List<String> supportedFocusModes) {
+ if (supportedFocusModes == null || supportedFocusModes.isEmpty()) {
+ Log.w(TAG, "No focus modes supported; API1 bug");
+ return null;
+ }
+
+ String param = null;
+ switch (mode) {
+ case CONTROL_AF_MODE_AUTO:
+ param = Parameters.FOCUS_MODE_AUTO;
+ break;
+ case CONTROL_AF_MODE_CONTINUOUS_PICTURE:
+ param = Parameters.FOCUS_MODE_CONTINUOUS_PICTURE;
+ break;
+ case CONTROL_AF_MODE_CONTINUOUS_VIDEO:
+ param = Parameters.FOCUS_MODE_CONTINUOUS_VIDEO;
+ break;
+ case CONTROL_AF_MODE_EDOF:
+ param = Parameters.FOCUS_MODE_EDOF;
+ break;
+ case CONTROL_AF_MODE_MACRO:
+ param = Parameters.FOCUS_MODE_MACRO;
+ break;
+ case CONTROL_AF_MODE_OFF:
+ if (supportedFocusModes.contains(Parameters.FOCUS_MODE_FIXED)) {
+ param = Parameters.FOCUS_MODE_FIXED;
+ } else {
+ param = Parameters.FOCUS_MODE_INFINITY;
+ }
+ }
+
+ if (!supportedFocusModes.contains(param)) {
+ // Weed out bad user input by setting to the first arbitrary focus mode
+ String defaultMode = supportedFocusModes.get(0);
+ Log.w(TAG,
+ String.format(
+ "convertAfModeToLegacy - ignoring unsupported mode %d, " +
+ "defaulting to %s", mode, defaultMode));
+ param = defaultMode;
+ }
+
+ return param;
+ }
+}
diff --git a/android/hardware/camera2/legacy/LegacyRequest.java b/android/hardware/camera2/legacy/LegacyRequest.java
new file mode 100644
index 00000000..f13ac5c8
--- /dev/null
+++ b/android/hardware/camera2/legacy/LegacyRequest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.Camera;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.util.Size;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Hold important data necessary to build the camera1 parameters up from a capture request.
+ */
+public class LegacyRequest {
+ /** Immutable characteristics for the camera corresponding to this request */
+ public final CameraCharacteristics characteristics;
+ /** Immutable capture request, as requested by the user */
+ public final CaptureRequest captureRequest;
+ /** Immutable api1 preview buffer size at the time of the request */
+ public final Size previewSize;
+ /** <em>Mutable</em> camera parameters */
+ public final Camera.Parameters parameters;
+
+ /**
+ * Create a new legacy request; the parameters are copied.
+ *
+ * @param characteristics immutable static camera characteristics for this camera
+ * @param captureRequest immutable user-defined capture request
+ * @param previewSize immutable internal preview size used for {@link Camera#setPreviewSurface}
+ * @param parameters the initial camera1 parameter state; (copied) can be mutated
+ */
+ public LegacyRequest(CameraCharacteristics characteristics, CaptureRequest captureRequest,
+ Size previewSize, Camera.Parameters parameters) {
+ this.characteristics = checkNotNull(characteristics, "characteristics must not be null");
+ this.captureRequest = checkNotNull(captureRequest, "captureRequest must not be null");
+ this.previewSize = checkNotNull(previewSize, "previewSize must not be null");
+ checkNotNull(parameters, "parameters must not be null");
+
+ this.parameters = Camera.getParametersCopy(parameters);
+ }
+
+ /**
+ * Update the current parameters in-place to be a copy of the new parameters.
+ *
+ * @param parameters non-{@code null} parameters for api1 camera
+ */
+ public void setParameters(Camera.Parameters parameters) {
+ checkNotNull(parameters, "parameters must not be null");
+
+ this.parameters.copyFrom(parameters);
+ }
+}
diff --git a/android/hardware/camera2/legacy/LegacyRequestMapper.java b/android/hardware/camera2/legacy/LegacyRequestMapper.java
new file mode 100644
index 00000000..2e06d5fb
--- /dev/null
+++ b/android/hardware/camera2/legacy/LegacyRequestMapper.java
@@ -0,0 +1,687 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.Rect;
+import android.hardware.Camera;
+import android.hardware.Camera.Parameters;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.utils.ListUtils;
+import android.hardware.camera2.utils.ParamsUtils;
+import android.location.Location;
+import android.util.Log;
+import android.util.Range;
+import android.util.Size;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+
+import static android.hardware.camera2.CaptureRequest.*;
+
+/**
+ * Provide legacy-specific implementations of camera2 CaptureRequest for legacy devices.
+ */
+@SuppressWarnings("deprecation")
+public class LegacyRequestMapper {
+ private static final String TAG = "LegacyRequestMapper";
+ private static final boolean DEBUG = false;
+
+ /** Default quality for android.jpeg.quality, android.jpeg.thumbnailQuality */
+ private static final byte DEFAULT_JPEG_QUALITY = 85;
+
+ /**
+ * Set the legacy parameters using the {@link LegacyRequest legacy request}.
+ *
+ * <p>The legacy request's parameters are changed as a side effect of calling this
+ * method.</p>
+ *
+ * @param legacyRequest a non-{@code null} legacy request
+ */
+ public static void convertRequestMetadata(LegacyRequest legacyRequest) {
+ CameraCharacteristics characteristics = legacyRequest.characteristics;
+ CaptureRequest request = legacyRequest.captureRequest;
+ Size previewSize = legacyRequest.previewSize;
+ Camera.Parameters params = legacyRequest.parameters;
+
+ Rect activeArray = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+
+ /*
+ * scaler.cropRegion
+ */
+ ParameterUtils.ZoomData zoomData;
+ {
+ zoomData = ParameterUtils.convertScalerCropRegion(activeArray,
+ request.get(SCALER_CROP_REGION),
+ previewSize,
+ params);
+
+ if (params.isZoomSupported()) {
+ params.setZoom(zoomData.zoomIndex);
+ } else if (DEBUG) {
+ Log.v(TAG, "convertRequestToMetadata - zoom is not supported");
+ }
+ }
+
+ /*
+ * colorCorrection.*
+ */
+ // colorCorrection.aberrationMode
+ {
+ int aberrationMode = ParamsUtils.getOrDefault(request,
+ COLOR_CORRECTION_ABERRATION_MODE,
+ /*defaultValue*/COLOR_CORRECTION_ABERRATION_MODE_FAST);
+
+ if (aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_FAST &&
+ aberrationMode != COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
+ Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " +
+ "colorCorrection.aberrationMode = " + aberrationMode);
+ }
+ }
+
+ /*
+ * control.ae*
+ */
+ // control.aeAntibandingMode
+ {
+ String legacyMode;
+ Integer antiBandingMode = request.get(CONTROL_AE_ANTIBANDING_MODE);
+ if (antiBandingMode != null) {
+ legacyMode = convertAeAntiBandingModeToLegacy(antiBandingMode);
+ } else {
+ legacyMode = ListUtils.listSelectFirstFrom(params.getSupportedAntibanding(),
+ new String[] {
+ Parameters.ANTIBANDING_AUTO,
+ Parameters.ANTIBANDING_OFF,
+ Parameters.ANTIBANDING_50HZ,
+ Parameters.ANTIBANDING_60HZ,
+ });
+ }
+
+ if (legacyMode != null) {
+ params.setAntibanding(legacyMode);
+ }
+ }
+
+ /*
+ * control.aeRegions, afRegions
+ */
+ {
+ // aeRegions
+ {
+ // Use aeRegions if available, fall back to using awbRegions if present
+ MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
+ if (request.get(CONTROL_AWB_REGIONS) != null) {
+ Log.w(TAG, "convertRequestMetadata - control.awbRegions setting is not " +
+ "supported, ignoring value");
+ }
+ int maxNumMeteringAreas = params.getMaxNumMeteringAreas();
+ List<Camera.Area> meteringAreaList = convertMeteringRegionsToLegacy(
+ activeArray, zoomData, aeRegions, maxNumMeteringAreas,
+ /*regionName*/"AE");
+
+ // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
+ if (maxNumMeteringAreas > 0) {
+ params.setMeteringAreas(meteringAreaList);
+ }
+ }
+
+ // afRegions
+ {
+ MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
+ int maxNumFocusAreas = params.getMaxNumFocusAreas();
+ List<Camera.Area> focusAreaList = convertMeteringRegionsToLegacy(
+ activeArray, zoomData, afRegions, maxNumFocusAreas,
+ /*regionName*/"AF");
+
+ // WAR: for b/17252693, some devices can't handle params.setFocusAreas(null).
+ if (maxNumFocusAreas > 0) {
+ params.setFocusAreas(focusAreaList);
+ }
+ }
+ }
+
+ // control.aeTargetFpsRange
+ Range<Integer> aeFpsRange = request.get(CONTROL_AE_TARGET_FPS_RANGE);
+ if (aeFpsRange != null) {
+ int[] legacyFps = convertAeFpsRangeToLegacy(aeFpsRange);
+
+ int[] rangeToApply = null;
+ for(int[] range : params.getSupportedPreviewFpsRange()) {
+ // Round range up/down to integer FPS value
+ int intRangeLow = (int) Math.floor(range[0] / 1000.0) * 1000;
+ int intRangeHigh = (int) Math.ceil(range[1] / 1000.0) * 1000;
+ if (legacyFps[0] == intRangeLow && legacyFps[1] == intRangeHigh) {
+ rangeToApply = range;
+ break;
+ }
+ }
+ if (rangeToApply != null) {
+ params.setPreviewFpsRange(rangeToApply[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ rangeToApply[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+ } else {
+ Log.w(TAG, "Unsupported FPS range set [" + legacyFps[0] + "," + legacyFps[1] + "]");
+ }
+ }
+
+ /*
+ * control
+ */
+
+ // control.aeExposureCompensation
+ {
+ Range<Integer> compensationRange =
+ characteristics.get(CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE);
+ int compensation = ParamsUtils.getOrDefault(request,
+ CONTROL_AE_EXPOSURE_COMPENSATION,
+ /*defaultValue*/0);
+
+ if (!compensationRange.contains(compensation)) {
+ Log.w(TAG,
+ "convertRequestMetadata - control.aeExposureCompensation " +
+ "is out of range, ignoring value");
+ compensation = 0;
+ }
+
+ params.setExposureCompensation(compensation);
+ }
+
+ // control.aeLock
+ {
+ Boolean aeLock = getIfSupported(request, CONTROL_AE_LOCK, /*defaultValue*/false,
+ params.isAutoExposureLockSupported(),
+ /*allowedValue*/false);
+
+ if (aeLock != null) {
+ params.setAutoExposureLock(aeLock);
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "convertRequestToMetadata - control.aeLock set to " + aeLock);
+ }
+
+ // TODO: Don't add control.aeLock to availableRequestKeys if it's not supported
+ }
+
+ // control.aeMode, flash.mode
+ mapAeAndFlashMode(request, /*out*/params);
+
+ // control.afMode
+ {
+ int afMode = ParamsUtils.getOrDefault(request, CONTROL_AF_MODE,
+ /*defaultValue*/CONTROL_AF_MODE_OFF);
+ String focusMode = LegacyMetadataMapper.convertAfModeToLegacy(afMode,
+ params.getSupportedFocusModes());
+
+ if (focusMode != null) {
+ params.setFocusMode(focusMode);
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "convertRequestToMetadata - control.afMode "
+ + afMode + " mapped to " + focusMode);
+ }
+ }
+
+ // control.awbMode
+ {
+ Integer awbMode = getIfSupported(request, CONTROL_AWB_MODE,
+ /*defaultValue*/CONTROL_AWB_MODE_AUTO,
+ params.getSupportedWhiteBalance() != null,
+ /*allowedValue*/CONTROL_AWB_MODE_AUTO);
+
+ String whiteBalanceMode = null;
+ if (awbMode != null) { // null iff AWB is not supported by camera1 api
+ whiteBalanceMode = convertAwbModeToLegacy(awbMode);
+ params.setWhiteBalance(whiteBalanceMode);
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "convertRequestToMetadata - control.awbMode "
+ + awbMode + " mapped to " + whiteBalanceMode);
+ }
+ }
+
+ // control.awbLock
+ {
+ Boolean awbLock = getIfSupported(request, CONTROL_AWB_LOCK, /*defaultValue*/false,
+ params.isAutoWhiteBalanceLockSupported(),
+ /*allowedValue*/false);
+
+ if (awbLock != null) {
+ params.setAutoWhiteBalanceLock(awbLock);
+ }
+
+ // TODO: Don't add control.awbLock to availableRequestKeys if it's not supported
+ }
+
+ // control.captureIntent
+ {
+ int captureIntent = ParamsUtils.getOrDefault(request,
+ CONTROL_CAPTURE_INTENT,
+ /*defaultValue*/CONTROL_CAPTURE_INTENT_PREVIEW);
+
+ captureIntent = filterSupportedCaptureIntent(captureIntent);
+
+ params.setRecordingHint(
+ captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_RECORD ||
+ captureIntent == CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT);
+ }
+
+ // control.videoStabilizationMode
+ {
+ Integer stabMode = getIfSupported(request, CONTROL_VIDEO_STABILIZATION_MODE,
+ /*defaultValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF,
+ params.isVideoStabilizationSupported(),
+ /*allowedValue*/CONTROL_VIDEO_STABILIZATION_MODE_OFF);
+
+ if (stabMode != null) {
+ params.setVideoStabilization(stabMode == CONTROL_VIDEO_STABILIZATION_MODE_ON);
+ }
+ }
+
+ // lens.focusDistance
+ {
+ boolean infinityFocusSupported =
+ ListUtils.listContains(params.getSupportedFocusModes(),
+ Parameters.FOCUS_MODE_INFINITY);
+ Float focusDistance = getIfSupported(request, LENS_FOCUS_DISTANCE,
+ /*defaultValue*/0f, infinityFocusSupported, /*allowedValue*/0f);
+
+ if (focusDistance == null || focusDistance != 0f) {
+ Log.w(TAG,
+ "convertRequestToMetadata - Ignoring android.lens.focusDistance "
+ + infinityFocusSupported + ", only 0.0f is supported");
+ }
+ }
+
+ // control.sceneMode, control.mode
+ {
+ // TODO: Map FACE_PRIORITY scene mode to face detection.
+
+ if (params.getSupportedSceneModes() != null) {
+ int controlMode = ParamsUtils.getOrDefault(request, CONTROL_MODE,
+ /*defaultValue*/CONTROL_MODE_AUTO);
+ String modeToSet;
+ switch (controlMode) {
+ case CONTROL_MODE_USE_SCENE_MODE: {
+ int sceneMode = ParamsUtils.getOrDefault(request, CONTROL_SCENE_MODE,
+ /*defaultValue*/CONTROL_SCENE_MODE_DISABLED);
+ String legacySceneMode = LegacyMetadataMapper.
+ convertSceneModeToLegacy(sceneMode);
+ if (legacySceneMode != null) {
+ modeToSet = legacySceneMode;
+ } else {
+ modeToSet = Parameters.SCENE_MODE_AUTO;
+ Log.w(TAG, "Skipping unknown requested scene mode: " + sceneMode);
+ }
+ break;
+ }
+ case CONTROL_MODE_AUTO: {
+ modeToSet = Parameters.SCENE_MODE_AUTO;
+ break;
+ }
+ default: {
+ Log.w(TAG, "Control mode " + controlMode +
+ " is unsupported, defaulting to AUTO");
+ modeToSet = Parameters.SCENE_MODE_AUTO;
+ }
+ }
+ params.setSceneMode(modeToSet);
+ }
+ }
+
+ // control.effectMode
+ {
+ if (params.getSupportedColorEffects() != null) {
+ int effectMode = ParamsUtils.getOrDefault(request, CONTROL_EFFECT_MODE,
+ /*defaultValue*/CONTROL_EFFECT_MODE_OFF);
+ String legacyEffectMode = LegacyMetadataMapper.convertEffectModeToLegacy(effectMode);
+ if (legacyEffectMode != null) {
+ params.setColorEffect(legacyEffectMode);
+ } else {
+ params.setColorEffect(Parameters.EFFECT_NONE);
+ Log.w(TAG, "Skipping unknown requested effect mode: " + effectMode);
+ }
+ }
+ }
+
+ /*
+ * sensor
+ */
+
+ // sensor.testPattern
+ {
+ int testPatternMode = ParamsUtils.getOrDefault(request, SENSOR_TEST_PATTERN_MODE,
+ /*defaultValue*/SENSOR_TEST_PATTERN_MODE_OFF);
+ if (testPatternMode != SENSOR_TEST_PATTERN_MODE_OFF) {
+ Log.w(TAG, "convertRequestToMetadata - ignoring sensor.testPatternMode "
+ + testPatternMode + "; only OFF is supported");
+ }
+ }
+
+ /*
+ * jpeg.*
+ */
+
+ // jpeg.gpsLocation
+ {
+ Location location = request.get(JPEG_GPS_LOCATION);
+ if (location != null) {
+ if (checkForCompleteGpsData(location)) {
+ params.setGpsAltitude(location.getAltitude());
+ params.setGpsLatitude(location.getLatitude());
+ params.setGpsLongitude(location.getLongitude());
+ params.setGpsProcessingMethod(location.getProvider().toUpperCase());
+ params.setGpsTimestamp(location.getTime());
+ } else {
+ Log.w(TAG, "Incomplete GPS parameters provided in location " + location);
+ }
+ } else {
+ params.removeGpsData();
+ }
+ }
+
+ // jpeg.orientation
+ {
+ Integer orientation = request.get(CaptureRequest.JPEG_ORIENTATION);
+ params.setRotation(ParamsUtils.getOrDefault(request, JPEG_ORIENTATION,
+ (orientation == null) ? 0 : orientation));
+ }
+
+ // jpeg.quality
+ {
+ params.setJpegQuality(0xFF & ParamsUtils.getOrDefault(request, JPEG_QUALITY,
+ DEFAULT_JPEG_QUALITY));
+ }
+
+ // jpeg.thumbnailQuality
+ {
+ params.setJpegThumbnailQuality(0xFF & ParamsUtils.getOrDefault(request,
+ JPEG_THUMBNAIL_QUALITY, DEFAULT_JPEG_QUALITY));
+ }
+
+ // jpeg.thumbnailSize
+ {
+ List<Camera.Size> sizes = params.getSupportedJpegThumbnailSizes();
+
+ if (sizes != null && sizes.size() > 0) {
+ Size s = request.get(JPEG_THUMBNAIL_SIZE);
+ boolean invalidSize = (s == null) ? false : !ParameterUtils.containsSize(sizes,
+ s.getWidth(), s.getHeight());
+ if (invalidSize) {
+ Log.w(TAG, "Invalid JPEG thumbnail size set " + s + ", skipping thumbnail...");
+ }
+ if (s == null || invalidSize) {
+ // (0,0) = "no thumbnail" in Camera API 1
+ params.setJpegThumbnailSize(/*width*/0, /*height*/0);
+ } else {
+ params.setJpegThumbnailSize(s.getWidth(), s.getHeight());
+ }
+ }
+ }
+
+ /*
+ * noiseReduction.*
+ */
+ // noiseReduction.mode
+ {
+ int mode = ParamsUtils.getOrDefault(request,
+ NOISE_REDUCTION_MODE,
+ /*defaultValue*/NOISE_REDUCTION_MODE_FAST);
+
+ if (mode != NOISE_REDUCTION_MODE_FAST &&
+ mode != NOISE_REDUCTION_MODE_HIGH_QUALITY) {
+ Log.w(TAG, "convertRequestToMetadata - Ignoring unsupported " +
+ "noiseReduction.mode = " + mode);
+ }
+ }
+ }
+
+ private static boolean checkForCompleteGpsData(Location location) {
+ return location != null && location.getProvider() != null && location.getTime() != 0;
+ }
+
+ static int filterSupportedCaptureIntent(int captureIntent) {
+ switch (captureIntent) {
+ case CONTROL_CAPTURE_INTENT_CUSTOM:
+ case CONTROL_CAPTURE_INTENT_PREVIEW:
+ case CONTROL_CAPTURE_INTENT_STILL_CAPTURE:
+ case CONTROL_CAPTURE_INTENT_VIDEO_RECORD:
+ case CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT:
+ break;
+ case CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG:
+ case CONTROL_CAPTURE_INTENT_MANUAL:
+ captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
+ Log.w(TAG, "Unsupported control.captureIntent value " + captureIntent
+ + "; default to PREVIEW");
+ default:
+ captureIntent = CONTROL_CAPTURE_INTENT_PREVIEW;
+ Log.w(TAG, "Unknown control.captureIntent value " + captureIntent
+ + "; default to PREVIEW");
+ }
+
+ return captureIntent;
+ }
+
+ private static List<Camera.Area> convertMeteringRegionsToLegacy(
+ Rect activeArray, ParameterUtils.ZoomData zoomData,
+ MeteringRectangle[] meteringRegions, int maxNumMeteringAreas, String regionName) {
+ if (meteringRegions == null || maxNumMeteringAreas <= 0) {
+ if (maxNumMeteringAreas > 0) {
+ return Arrays.asList(ParameterUtils.CAMERA_AREA_DEFAULT);
+ } else {
+ return null;
+ }
+ }
+
+ // Add all non-zero weight regions to the list
+ List<MeteringRectangle> meteringRectangleList = new ArrayList<>();
+ for (MeteringRectangle rect : meteringRegions) {
+ if (rect.getMeteringWeight() != MeteringRectangle.METERING_WEIGHT_DONT_CARE) {
+ meteringRectangleList.add(rect);
+ }
+ }
+
+ if (meteringRectangleList.size() == 0) {
+ Log.w(TAG, "Only received metering rectangles with weight 0.");
+ return Arrays.asList(ParameterUtils.CAMERA_AREA_DEFAULT);
+ }
+
+ // Ignore any regions beyond our maximum supported count
+ int countMeteringAreas =
+ Math.min(maxNumMeteringAreas, meteringRectangleList.size());
+ List<Camera.Area> meteringAreaList = new ArrayList<>(countMeteringAreas);
+
+ for (int i = 0; i < countMeteringAreas; ++i) {
+ MeteringRectangle rect = meteringRectangleList.get(i);
+
+ ParameterUtils.MeteringData meteringData =
+ ParameterUtils.convertMeteringRectangleToLegacy(activeArray, rect, zoomData);
+ meteringAreaList.add(meteringData.meteringArea);
+ }
+
+ if (maxNumMeteringAreas < meteringRectangleList.size()) {
+ Log.w(TAG,
+ "convertMeteringRegionsToLegacy - Too many requested " + regionName +
+ " regions, ignoring all beyond the first " + maxNumMeteringAreas);
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "convertMeteringRegionsToLegacy - " + regionName + " areas = "
+ + ParameterUtils.stringFromAreaList(meteringAreaList));
+ }
+
+ return meteringAreaList;
+ }
+
+ private static void mapAeAndFlashMode(CaptureRequest r, /*out*/Parameters p) {
+ int flashMode = ParamsUtils.getOrDefault(r, FLASH_MODE, FLASH_MODE_OFF);
+ int aeMode = ParamsUtils.getOrDefault(r, CONTROL_AE_MODE, CONTROL_AE_MODE_ON);
+
+ List<String> supportedFlashModes = p.getSupportedFlashModes();
+
+ String flashModeSetting = null;
+
+ // Flash is OFF by default, on cameras that support flash
+ if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_OFF)) {
+ flashModeSetting = Parameters.FLASH_MODE_OFF;
+ }
+
+ /*
+ * Map all of the control.aeMode* enums, but ignore AE_MODE_OFF since we never support it
+ */
+
+ // Ignore flash.mode controls unless aeMode == ON
+ if (aeMode == CONTROL_AE_MODE_ON) {
+ if (flashMode == FLASH_MODE_TORCH) {
+ if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_TORCH)) {
+ flashModeSetting = Parameters.FLASH_MODE_TORCH;
+ } else {
+ Log.w(TAG, "mapAeAndFlashMode - Ignore flash.mode == TORCH;" +
+ "camera does not support it");
+ }
+ } else if (flashMode == FLASH_MODE_SINGLE) {
+ if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_ON)) {
+ flashModeSetting = Parameters.FLASH_MODE_ON;
+ } else {
+ Log.w(TAG, "mapAeAndFlashMode - Ignore flash.mode == SINGLE;" +
+ "camera does not support it");
+ }
+ } else {
+ // Use the default FLASH_MODE_OFF
+ }
+ } else if (aeMode == CONTROL_AE_MODE_ON_ALWAYS_FLASH) {
+ if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_ON)) {
+ flashModeSetting = Parameters.FLASH_MODE_ON;
+ } else {
+ Log.w(TAG, "mapAeAndFlashMode - Ignore control.aeMode == ON_ALWAYS_FLASH;" +
+ "camera does not support it");
+ }
+ } else if (aeMode == CONTROL_AE_MODE_ON_AUTO_FLASH) {
+ if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_AUTO)) {
+ flashModeSetting = Parameters.FLASH_MODE_AUTO;
+ } else {
+ Log.w(TAG, "mapAeAndFlashMode - Ignore control.aeMode == ON_AUTO_FLASH;" +
+ "camera does not support it");
+ }
+ } else if (aeMode == CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
+ if (ListUtils.listContains(supportedFlashModes, Parameters.FLASH_MODE_RED_EYE)) {
+ flashModeSetting = Parameters.FLASH_MODE_RED_EYE;
+ } else {
+ Log.w(TAG, "mapAeAndFlashMode - Ignore control.aeMode == ON_AUTO_FLASH_REDEYE;"
+ + "camera does not support it");
+ }
+ } else {
+ // Default to aeMode == ON, flash = OFF
+ }
+
+ if (flashModeSetting != null) {
+ p.setFlashMode(flashModeSetting);
+ }
+
+ if (DEBUG) {
+ Log.v(TAG,
+ "mapAeAndFlashMode - set flash.mode (api1) to " + flashModeSetting
+ + ", requested (api2) " + flashMode
+ + ", supported (api1) " + ListUtils.listToString(supportedFlashModes));
+ }
+ }
+
+ /**
+ * Returns null if the anti-banding mode enum is not supported.
+ */
+ private static String convertAeAntiBandingModeToLegacy(int mode) {
+ switch (mode) {
+ case CONTROL_AE_ANTIBANDING_MODE_OFF: {
+ return Parameters.ANTIBANDING_OFF;
+ }
+ case CONTROL_AE_ANTIBANDING_MODE_50HZ: {
+ return Parameters.ANTIBANDING_50HZ;
+ }
+ case CONTROL_AE_ANTIBANDING_MODE_60HZ: {
+ return Parameters.ANTIBANDING_60HZ;
+ }
+ case CONTROL_AE_ANTIBANDING_MODE_AUTO: {
+ return Parameters.ANTIBANDING_AUTO;
+ }
+ default: {
+ return null;
+ }
+ }
+ }
+
+ private static int[] convertAeFpsRangeToLegacy(Range<Integer> fpsRange) {
+ int[] legacyFps = new int[2];
+ legacyFps[Parameters.PREVIEW_FPS_MIN_INDEX] = fpsRange.getLower() * 1000;
+ legacyFps[Parameters.PREVIEW_FPS_MAX_INDEX] = fpsRange.getUpper() * 1000;
+ return legacyFps;
+ }
+
+ private static String convertAwbModeToLegacy(int mode) {
+ switch (mode) {
+ case CONTROL_AWB_MODE_AUTO:
+ return Camera.Parameters.WHITE_BALANCE_AUTO;
+ case CONTROL_AWB_MODE_INCANDESCENT:
+ return Camera.Parameters.WHITE_BALANCE_INCANDESCENT;
+ case CONTROL_AWB_MODE_FLUORESCENT:
+ return Camera.Parameters.WHITE_BALANCE_FLUORESCENT;
+ case CONTROL_AWB_MODE_WARM_FLUORESCENT:
+ return Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT;
+ case CONTROL_AWB_MODE_DAYLIGHT:
+ return Camera.Parameters.WHITE_BALANCE_DAYLIGHT;
+ case CONTROL_AWB_MODE_CLOUDY_DAYLIGHT:
+ return Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT;
+ case CONTROL_AWB_MODE_TWILIGHT:
+ return Camera.Parameters.WHITE_BALANCE_TWILIGHT;
+ case CONTROL_AWB_MODE_SHADE:
+ return Parameters.WHITE_BALANCE_SHADE;
+ default:
+ Log.w(TAG, "convertAwbModeToLegacy - unrecognized control.awbMode" + mode);
+ return Camera.Parameters.WHITE_BALANCE_AUTO;
+ }
+ }
+
+
+ /**
+ * Return {@code null} if the value is not supported, otherwise return the retrieved key's
+ * value from the request (or the default value if it wasn't set).
+ *
+ * <p>If the fetched value in the request is equivalent to {@code allowedValue},
+ * then omit the warning (e.g. turning off AF lock on a camera
+ * that always has the AF lock turned off is a silent no-op), but still return {@code null}.</p>
+ *
+ * <p>Logs a warning to logcat if the key is not supported by api1 camera device.</p.
+ */
+ private static <T> T getIfSupported(
+ CaptureRequest r, CaptureRequest.Key<T> key, T defaultValue, boolean isSupported,
+ T allowedValue) {
+ T val = ParamsUtils.getOrDefault(r, key, defaultValue);
+
+ if (!isSupported) {
+ if (!Objects.equals(val, allowedValue)) {
+ Log.w(TAG, key.getName() + " is not supported; ignoring requested value " + val);
+ }
+ return null;
+ }
+
+ return val;
+ }
+}
diff --git a/android/hardware/camera2/legacy/LegacyResultMapper.java b/android/hardware/camera2/legacy/LegacyResultMapper.java
new file mode 100644
index 00000000..dc5823d8
--- /dev/null
+++ b/android/hardware/camera2/legacy/LegacyResultMapper.java
@@ -0,0 +1,520 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.Rect;
+import android.hardware.Camera;
+import android.hardware.Camera.Parameters;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.legacy.ParameterUtils.WeightedRectangle;
+import android.hardware.camera2.legacy.ParameterUtils.ZoomData;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.utils.ListUtils;
+import android.hardware.camera2.utils.ParamsUtils;
+import android.util.Log;
+import android.util.Size;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static android.hardware.camera2.CaptureResult.*;
+
+/**
+ * Provide legacy-specific implementations of camera2 CaptureResult for legacy devices.
+ */
+@SuppressWarnings("deprecation")
+public class LegacyResultMapper {
+ private static final String TAG = "LegacyResultMapper";
+ private static final boolean DEBUG = false;
+
+ private LegacyRequest mCachedRequest = null;
+ private CameraMetadataNative mCachedResult = null;
+
+ /**
+ * Generate capture result metadata from the legacy camera request.
+ *
+ * <p>This method caches and reuses the result from the previous call to this method if
+ * the {@code parameters} of the subsequent {@link LegacyRequest} passed to this method
+ * have not changed.</p>
+ *
+ * @param legacyRequest a non-{@code null} legacy request containing the latest parameters
+ * @param timestamp the timestamp to use for this result in nanoseconds.
+ *
+ * @return {@link CameraMetadataNative} object containing result metadata.
+ */
+ public CameraMetadataNative cachedConvertResultMetadata(
+ LegacyRequest legacyRequest, long timestamp) {
+ CameraMetadataNative result;
+ boolean cached;
+
+ /*
+ * Attempt to look up the result from the cache if the parameters haven't changed
+ */
+ if (mCachedRequest != null &&
+ legacyRequest.parameters.same(mCachedRequest.parameters) &&
+ legacyRequest.captureRequest.equals(mCachedRequest.captureRequest)) {
+ result = new CameraMetadataNative(mCachedResult);
+ cached = true;
+ } else {
+ result = convertResultMetadata(legacyRequest);
+ cached = false;
+
+ // Always cache a *copy* of the metadata result,
+ // since api2's client side takes ownership of it after it receives a result
+ mCachedRequest = legacyRequest;
+ mCachedResult = new CameraMetadataNative(result);
+ }
+
+ /*
+ * Unconditionally set fields that change in every single frame
+ */
+ {
+ // sensor.timestamp
+ result.set(SENSOR_TIMESTAMP, timestamp);
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "cachedConvertResultMetadata - cached? " + cached +
+ " timestamp = " + timestamp);
+
+ Log.v(TAG, "----- beginning of result dump ------");
+ result.dumpToLog();
+ Log.v(TAG, "----- end of result dump ------");
+ }
+
+ return result;
+ }
+
+ /**
+ * Generate capture result metadata from the legacy camera request.
+ *
+ * @param legacyRequest a non-{@code null} legacy request containing the latest parameters
+ * @return a {@link CameraMetadataNative} object containing result metadata.
+ */
+ private static CameraMetadataNative convertResultMetadata(LegacyRequest legacyRequest) {
+ CameraCharacteristics characteristics = legacyRequest.characteristics;
+ CaptureRequest request = legacyRequest.captureRequest;
+ Size previewSize = legacyRequest.previewSize;
+ Camera.Parameters params = legacyRequest.parameters;
+
+ CameraMetadataNative result = new CameraMetadataNative();
+
+ Rect activeArraySize = characteristics.get(
+ CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ ZoomData zoomData = ParameterUtils.convertScalerCropRegion(activeArraySize,
+ request.get(CaptureRequest.SCALER_CROP_REGION), previewSize, params);
+
+ /*
+ * colorCorrection
+ */
+ // colorCorrection.aberrationMode
+ {
+ result.set(COLOR_CORRECTION_ABERRATION_MODE,
+ request.get(CaptureRequest.COLOR_CORRECTION_ABERRATION_MODE));
+ }
+
+ /*
+ * control
+ */
+
+ /*
+ * control.ae*
+ */
+ mapAe(result, characteristics, request, activeArraySize, zoomData, /*out*/params);
+
+ /*
+ * control.af*
+ */
+ mapAf(result, activeArraySize, zoomData, /*out*/params);
+
+ /*
+ * control.awb*
+ */
+ mapAwb(result, /*out*/params);
+
+ /*
+ * control.captureIntent
+ */
+ {
+ int captureIntent = ParamsUtils.getOrDefault(request,
+ CaptureRequest.CONTROL_CAPTURE_INTENT,
+ /*defaultValue*/CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
+
+ captureIntent = LegacyRequestMapper.filterSupportedCaptureIntent(captureIntent);
+
+ result.set(CONTROL_CAPTURE_INTENT, captureIntent);
+ }
+
+ /*
+ * control.mode
+ */
+ {
+ int controlMode = ParamsUtils.getOrDefault(request, CaptureRequest.CONTROL_MODE,
+ CONTROL_MODE_AUTO);
+ if (controlMode == CaptureResult.CONTROL_MODE_USE_SCENE_MODE) {
+ result.set(CONTROL_MODE, CONTROL_MODE_USE_SCENE_MODE);
+ } else {
+ result.set(CONTROL_MODE, CONTROL_MODE_AUTO);
+ }
+ }
+
+ /*
+ * control.sceneMode
+ */
+ {
+ String legacySceneMode = params.getSceneMode();
+ int mode = LegacyMetadataMapper.convertSceneModeFromLegacy(legacySceneMode);
+ if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
+ result.set(CaptureResult.CONTROL_SCENE_MODE, mode);
+ // In case of SCENE_MODE == FACE_PRIORITY, LegacyFaceDetectMapper will override
+ // the result to say SCENE_MODE == FACE_PRIORITY.
+ } else {
+ Log.w(TAG, "Unknown scene mode " + legacySceneMode +
+ " returned by camera HAL, setting to disabled.");
+ result.set(CaptureResult.CONTROL_SCENE_MODE, CONTROL_SCENE_MODE_DISABLED);
+ }
+ }
+
+ /*
+ * control.effectMode
+ */
+ {
+ String legacyEffectMode = params.getColorEffect();
+ int mode = LegacyMetadataMapper.convertEffectModeFromLegacy(legacyEffectMode);
+ if (mode != LegacyMetadataMapper.UNKNOWN_MODE) {
+ result.set(CaptureResult.CONTROL_EFFECT_MODE, mode);
+ } else {
+ Log.w(TAG, "Unknown effect mode " + legacyEffectMode +
+ " returned by camera HAL, setting to off.");
+ result.set(CaptureResult.CONTROL_EFFECT_MODE, CONTROL_EFFECT_MODE_OFF);
+ }
+ }
+
+ // control.videoStabilizationMode
+ {
+ int stabMode =
+ (params.isVideoStabilizationSupported() && params.getVideoStabilization()) ?
+ CONTROL_VIDEO_STABILIZATION_MODE_ON :
+ CONTROL_VIDEO_STABILIZATION_MODE_OFF;
+ result.set(CONTROL_VIDEO_STABILIZATION_MODE, stabMode);
+ }
+
+ /*
+ * flash
+ */
+ {
+ // flash.mode, flash.state mapped in mapAeAndFlashMode
+ }
+
+ /*
+ * lens
+ */
+ // lens.focusDistance
+ {
+ if (Parameters.FOCUS_MODE_INFINITY.equals(params.getFocusMode())) {
+ result.set(CaptureResult.LENS_FOCUS_DISTANCE, 0.0f);
+ }
+ }
+
+ // lens.focalLength
+ result.set(CaptureResult.LENS_FOCAL_LENGTH, params.getFocalLength());
+
+ /*
+ * request
+ */
+ // request.pipelineDepth
+ result.set(REQUEST_PIPELINE_DEPTH,
+ characteristics.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
+
+ /*
+ * scaler
+ */
+ mapScaler(result, zoomData, /*out*/params);
+
+ /*
+ * sensor
+ */
+ // sensor.timestamp varies every frame; mapping is done in #cachedConvertResultMetadata
+ {
+ // Unconditionally no test patterns
+ result.set(SENSOR_TEST_PATTERN_MODE, SENSOR_TEST_PATTERN_MODE_OFF);
+ }
+
+ /*
+ * jpeg
+ */
+ // jpeg.gpsLocation
+ result.set(JPEG_GPS_LOCATION, request.get(CaptureRequest.JPEG_GPS_LOCATION));
+
+ // jpeg.orientation
+ result.set(JPEG_ORIENTATION, request.get(CaptureRequest.JPEG_ORIENTATION));
+
+ // jpeg.quality
+ result.set(JPEG_QUALITY, (byte) params.getJpegQuality());
+
+ // jpeg.thumbnailQuality
+ result.set(JPEG_THUMBNAIL_QUALITY, (byte) params.getJpegThumbnailQuality());
+
+ // jpeg.thumbnailSize
+ Camera.Size s = params.getJpegThumbnailSize();
+ if (s != null) {
+ result.set(JPEG_THUMBNAIL_SIZE, ParameterUtils.convertSize(s));
+ } else {
+ Log.w(TAG, "Null thumbnail size received from parameters.");
+ }
+
+ /*
+ * noiseReduction.*
+ */
+ // noiseReduction.mode
+ result.set(NOISE_REDUCTION_MODE, request.get(CaptureRequest.NOISE_REDUCTION_MODE));
+
+ return result;
+ }
+
+ private static void mapAe(CameraMetadataNative m,
+ CameraCharacteristics characteristics,
+ CaptureRequest request, Rect activeArray, ZoomData zoomData, /*out*/Parameters p) {
+ // control.aeAntiBandingMode
+ {
+ int antiBandingMode = LegacyMetadataMapper.convertAntiBandingModeOrDefault(
+ p.getAntibanding());
+ m.set(CONTROL_AE_ANTIBANDING_MODE, antiBandingMode);
+ }
+
+ // control.aeExposureCompensation
+ {
+ m.set(CONTROL_AE_EXPOSURE_COMPENSATION, p.getExposureCompensation());
+ }
+
+ // control.aeLock
+ {
+ boolean lock = p.isAutoExposureLockSupported() ? p.getAutoExposureLock() : false;
+ m.set(CONTROL_AE_LOCK, lock);
+ if (DEBUG) {
+ Log.v(TAG,
+ "mapAe - android.control.aeLock = " + lock +
+ ", supported = " + p.isAutoExposureLockSupported());
+ }
+
+ Boolean requestLock = request.get(CaptureRequest.CONTROL_AE_LOCK);
+ if (requestLock != null && requestLock != lock) {
+ Log.w(TAG,
+ "mapAe - android.control.aeLock was requested to " + requestLock +
+ " but resulted in " + lock);
+ }
+ }
+
+ // control.aeMode, flash.mode, flash.state
+ mapAeAndFlashMode(m, characteristics, p);
+
+ // control.aeState
+ if (LegacyMetadataMapper.LIE_ABOUT_AE_STATE) {
+ // Lie to pass CTS temporarily.
+ // TODO: Implement precapture trigger, after which we can report CONVERGED ourselves
+ m.set(CONTROL_AE_STATE, CONTROL_AE_STATE_CONVERGED);
+ }
+
+ // control.aeRegions
+ if (p.getMaxNumMeteringAreas() > 0) {
+ if (DEBUG) {
+ String meteringAreas = p.get("metering-areas");
+ Log.v(TAG, "mapAe - parameter dump; metering-areas: " + meteringAreas);
+ }
+
+ MeteringRectangle[] meteringRectArray = getMeteringRectangles(activeArray,
+ zoomData, p.getMeteringAreas(), "AE");
+
+ m.set(CONTROL_AE_REGIONS, meteringRectArray);
+ }
+
+ }
+
+ private static void mapAf(CameraMetadataNative m,
+ Rect activeArray, ZoomData zoomData, Camera.Parameters p) {
+ // control.afMode
+ m.set(CaptureResult.CONTROL_AF_MODE, convertLegacyAfMode(p.getFocusMode()));
+
+ // control.afRegions
+ if (p.getMaxNumFocusAreas() > 0) {
+ if (DEBUG) {
+ String focusAreas = p.get("focus-areas");
+ Log.v(TAG, "mapAe - parameter dump; focus-areas: " + focusAreas);
+ }
+
+ MeteringRectangle[] meteringRectArray = getMeteringRectangles(activeArray,
+ zoomData, p.getFocusAreas(), "AF");
+
+ m.set(CONTROL_AF_REGIONS, meteringRectArray);
+ }
+ }
+
+ private static void mapAwb(CameraMetadataNative m, Camera.Parameters p) {
+ // control.awbLock
+ {
+ boolean lock = p.isAutoWhiteBalanceLockSupported() ?
+ p.getAutoWhiteBalanceLock() : false;
+ m.set(CONTROL_AWB_LOCK, lock);
+ }
+
+ // control.awbMode
+ {
+ int awbMode = convertLegacyAwbMode(p.getWhiteBalance());
+ m.set(CONTROL_AWB_MODE, awbMode);
+ }
+ }
+
+ private static MeteringRectangle[] getMeteringRectangles(Rect activeArray, ZoomData zoomData,
+ List<Camera.Area> meteringAreaList, String regionName) {
+ List<MeteringRectangle> meteringRectList = new ArrayList<>();
+ if (meteringAreaList != null) {
+ for (Camera.Area area : meteringAreaList) {
+ WeightedRectangle rect =
+ ParameterUtils.convertCameraAreaToActiveArrayRectangle(
+ activeArray, zoomData, area);
+
+ meteringRectList.add(rect.toMetering());
+ }
+ }
+
+ if (DEBUG) {
+ Log.v(TAG,
+ "Metering rectangles for " + regionName + ": "
+ + ListUtils.listToString(meteringRectList));
+ }
+
+ return meteringRectList.toArray(new MeteringRectangle[0]);
+ }
+
+ /** Map results for control.aeMode, flash.mode, flash.state */
+ private static void mapAeAndFlashMode(CameraMetadataNative m,
+ CameraCharacteristics characteristics, Parameters p) {
+ // Default: AE mode on but flash never fires
+ int flashMode = FLASH_MODE_OFF;
+ // If there is no flash on this camera, the state is always unavailable
+ // , otherwise it's only known for TORCH/SINGLE modes
+ Integer flashState = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE)
+ ? null : FLASH_STATE_UNAVAILABLE;
+ int aeMode = CONTROL_AE_MODE_ON;
+
+ String flashModeSetting = p.getFlashMode();
+
+ if (flashModeSetting != null) {
+ switch (flashModeSetting) {
+ case Parameters.FLASH_MODE_OFF:
+ break; // ok, using default
+ case Parameters.FLASH_MODE_AUTO:
+ aeMode = CONTROL_AE_MODE_ON_AUTO_FLASH;
+ break;
+ case Parameters.FLASH_MODE_ON:
+ // flashMode = SINGLE + aeMode = ON is indistinguishable from ON_ALWAYS_FLASH
+ flashMode = FLASH_MODE_SINGLE;
+ aeMode = CONTROL_AE_MODE_ON_ALWAYS_FLASH;
+ flashState = FLASH_STATE_FIRED;
+ break;
+ case Parameters.FLASH_MODE_RED_EYE:
+ aeMode = CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
+ break;
+ case Parameters.FLASH_MODE_TORCH:
+ flashMode = FLASH_MODE_TORCH;
+ flashState = FLASH_STATE_FIRED;
+ break;
+ default:
+ Log.w(TAG,
+ "mapAeAndFlashMode - Ignoring unknown flash mode " + p.getFlashMode());
+ }
+ }
+
+ // flash.state
+ m.set(FLASH_STATE, flashState);
+ // flash.mode
+ m.set(FLASH_MODE, flashMode);
+ // control.aeMode
+ m.set(CONTROL_AE_MODE, aeMode);
+ }
+
+ private static int convertLegacyAfMode(String mode) {
+ if (mode == null) {
+ Log.w(TAG, "convertLegacyAfMode - no AF mode, default to OFF");
+ return CONTROL_AF_MODE_OFF;
+ }
+
+ switch (mode) {
+ case Parameters.FOCUS_MODE_AUTO:
+ return CONTROL_AF_MODE_AUTO;
+ case Parameters.FOCUS_MODE_CONTINUOUS_PICTURE:
+ return CONTROL_AF_MODE_CONTINUOUS_PICTURE;
+ case Parameters.FOCUS_MODE_CONTINUOUS_VIDEO:
+ return CONTROL_AF_MODE_CONTINUOUS_VIDEO;
+ case Parameters.FOCUS_MODE_EDOF:
+ return CONTROL_AF_MODE_EDOF;
+ case Parameters.FOCUS_MODE_MACRO:
+ return CONTROL_AF_MODE_MACRO;
+ case Parameters.FOCUS_MODE_FIXED:
+ return CONTROL_AF_MODE_OFF;
+ case Parameters.FOCUS_MODE_INFINITY:
+ return CONTROL_AF_MODE_OFF;
+ default:
+ Log.w(TAG, "convertLegacyAfMode - unknown mode " + mode + " , ignoring");
+ return CONTROL_AF_MODE_OFF;
+ }
+ }
+
+ private static int convertLegacyAwbMode(String mode) {
+ if (mode == null) {
+ // OK: camera1 api may not support changing WB modes; assume AUTO
+ return CONTROL_AWB_MODE_AUTO;
+ }
+
+ switch (mode) {
+ case Camera.Parameters.WHITE_BALANCE_AUTO:
+ return CONTROL_AWB_MODE_AUTO;
+ case Camera.Parameters.WHITE_BALANCE_INCANDESCENT:
+ return CONTROL_AWB_MODE_INCANDESCENT;
+ case Camera.Parameters.WHITE_BALANCE_FLUORESCENT:
+ return CONTROL_AWB_MODE_FLUORESCENT;
+ case Camera.Parameters.WHITE_BALANCE_WARM_FLUORESCENT:
+ return CONTROL_AWB_MODE_WARM_FLUORESCENT;
+ case Camera.Parameters.WHITE_BALANCE_DAYLIGHT:
+ return CONTROL_AWB_MODE_DAYLIGHT;
+ case Camera.Parameters.WHITE_BALANCE_CLOUDY_DAYLIGHT:
+ return CONTROL_AWB_MODE_CLOUDY_DAYLIGHT;
+ case Camera.Parameters.WHITE_BALANCE_TWILIGHT:
+ return CONTROL_AWB_MODE_TWILIGHT;
+ case Camera.Parameters.WHITE_BALANCE_SHADE:
+ return CONTROL_AWB_MODE_SHADE;
+ default:
+ Log.w(TAG, "convertAwbMode - unrecognized WB mode " + mode);
+ return CONTROL_AWB_MODE_AUTO;
+ }
+ }
+
+ /** Map results for scaler.* */
+ private static void mapScaler(CameraMetadataNative m,
+ ZoomData zoomData,
+ /*out*/Parameters p) {
+ /*
+ * scaler.cropRegion
+ */
+ {
+ m.set(SCALER_CROP_REGION, zoomData.reportedCrop);
+ }
+ }
+}
diff --git a/android/hardware/camera2/legacy/ParameterUtils.java b/android/hardware/camera2/legacy/ParameterUtils.java
new file mode 100644
index 00000000..3cfd020a
--- /dev/null
+++ b/android/hardware/camera2/legacy/ParameterUtils.java
@@ -0,0 +1,1006 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.Matrix;
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.hardware.Camera;
+import android.hardware.Camera.Area;
+import android.hardware.camera2.params.Face;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.utils.ListUtils;
+import android.hardware.camera2.utils.ParamsUtils;
+import android.hardware.camera2.utils.SizeAreaComparator;
+import android.util.Size;
+import android.util.SizeF;
+
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Various utilities for dealing with camera API1 parameters.
+ */
+@SuppressWarnings("deprecation")
+public class ParameterUtils {
+ /** Upper/left minimal point of a normalized rectangle */
+ public static final int NORMALIZED_RECTANGLE_MIN = -1000;
+ /** Lower/right maximal point of a normalized rectangle */
+ public static final int NORMALIZED_RECTANGLE_MAX = 1000;
+ /** The default normalized rectangle spans the entire size of the preview viewport */
+ public static final Rect NORMALIZED_RECTANGLE_DEFAULT = new Rect(
+ NORMALIZED_RECTANGLE_MIN,
+ NORMALIZED_RECTANGLE_MIN,
+ NORMALIZED_RECTANGLE_MAX,
+ NORMALIZED_RECTANGLE_MAX);
+ /** The default normalized area uses the default normalized rectangle with a weight=1 */
+ public static final Camera.Area CAMERA_AREA_DEFAULT =
+ new Camera.Area(new Rect(NORMALIZED_RECTANGLE_DEFAULT),
+ /*weight*/1);
+ /** Empty rectangle {@code 0x0+0,0} */
+ public static final Rect RECTANGLE_EMPTY =
+ new Rect(/*left*/0, /*top*/0, /*right*/0, /*bottom*/0);
+
+ private static final double ASPECT_RATIO_TOLERANCE = 0.05f;
+
+ /**
+ * Calculate effective/reported zoom data from a user-specified crop region.
+ */
+ public static class ZoomData {
+ /** Zoom index used by {@link Camera.Parameters#setZoom} */
+ public final int zoomIndex;
+ /** Effective crop-region given the zoom index, coordinates relative to active-array */
+ public final Rect previewCrop;
+ /** Reported crop-region given the zoom index, coordinates relative to active-array */
+ public final Rect reportedCrop;
+
+ public ZoomData(int zoomIndex, Rect previewCrop, Rect reportedCrop) {
+ this.zoomIndex = zoomIndex;
+ this.previewCrop = previewCrop;
+ this.reportedCrop = reportedCrop;
+ }
+ }
+
+ /**
+ * Calculate effective/reported metering data from a user-specified metering region.
+ */
+ public static class MeteringData {
+ /**
+ * The metering area scaled to the range of [-1000, 1000].
+ * <p>Values outside of this range are clipped to be within the range.</p>
+ */
+ public final Camera.Area meteringArea;
+ /**
+ * Effective preview metering region, coordinates relative to active-array.
+ *
+ * <p>Clipped to fit inside of the (effective) preview crop region.</p>
+ */
+ public final Rect previewMetering;
+ /**
+ * Reported metering region, coordinates relative to active-array.
+ *
+ * <p>Clipped to fit inside of the (reported) resulting crop region.</p>
+ */
+ public final Rect reportedMetering;
+
+ public MeteringData(Area meteringArea, Rect previewMetering, Rect reportedMetering) {
+ this.meteringArea = meteringArea;
+ this.previewMetering = previewMetering;
+ this.reportedMetering = reportedMetering;
+ }
+ }
+
+ /**
+ * A weighted rectangle is an arbitrary rectangle (the coordinate system is unknown) with an
+ * arbitrary weight.
+ *
+ * <p>The user of this class must know what the coordinate system ahead of time; it's
+ * then possible to convert to a more concrete type such as a metering rectangle or a face.
+ * </p>
+ *
+ * <p>When converting to a more concrete type, out-of-range values are clipped; this prevents
+ * possible illegal argument exceptions being thrown at runtime.</p>
+ */
+ public static class WeightedRectangle {
+ /** Arbitrary rectangle (the range is user-defined); never {@code null}. */
+ public final Rect rect;
+ /** Arbitrary weight (the range is user-defined). */
+ public final int weight;
+
+ /**
+ * Create a new weighted-rectangle from a non-{@code null} rectangle; the {@code weight}
+ * can be unbounded.
+ */
+ public WeightedRectangle(Rect rect, int weight) {
+ this.rect = checkNotNull(rect, "rect must not be null");
+ this.weight = weight;
+ }
+
+ /**
+ * Convert to a metering rectangle, clipping any of the values to stay within range.
+ *
+ * <p>If values are clipped, a warning is printed to logcat.</p>
+ *
+ * @return a new metering rectangle
+ */
+ public MeteringRectangle toMetering() {
+ int weight = clip(this.weight,
+ MeteringRectangle.METERING_WEIGHT_MIN,
+ MeteringRectangle.METERING_WEIGHT_MAX,
+ rect,
+ "weight");
+
+ int x = clipLower(rect.left, /*lo*/0, rect, "left");
+ int y = clipLower(rect.top, /*lo*/0, rect, "top");
+ int w = clipLower(rect.width(), /*lo*/0, rect, "width");
+ int h = clipLower(rect.height(), /*lo*/0, rect, "height");
+
+ return new MeteringRectangle(x, y, w, h, weight);
+ }
+
+ /**
+ * Convert to a face; the rect is considered to be the bounds, and the weight
+ * is considered to be the score.
+ *
+ * <p>If the score is out of range of {@value Face#SCORE_MIN}, {@value Face#SCORE_MAX},
+ * the score is clipped first and a warning is printed to logcat.</p>
+ *
+ * <p>If the id is negative, the id is changed to 0 and a warning is printed to
+ * logcat.</p>
+ *
+ * <p>All other parameters are passed-through as-is.</p>
+ *
+ * @return a new face with the optional features set
+ */
+ public Face toFace(
+ int id, Point leftEyePosition, Point rightEyePosition, Point mouthPosition) {
+ int idSafe = clipLower(id, /*lo*/0, rect, "id");
+ int score = clip(weight,
+ Face.SCORE_MIN,
+ Face.SCORE_MAX,
+ rect,
+ "score");
+
+ return new Face(rect, score, idSafe, leftEyePosition, rightEyePosition, mouthPosition);
+ }
+
+ /**
+ * Convert to a face; the rect is considered to be the bounds, and the weight
+ * is considered to be the score.
+ *
+ * <p>If the score is out of range of {@value Face#SCORE_MIN}, {@value Face#SCORE_MAX},
+ * the score is clipped first and a warning is printed to logcat.</p>
+ *
+ * <p>All other parameters are passed-through as-is.</p>
+ *
+ * @return a new face without the optional features
+ */
+ public Face toFace() {
+ int score = clip(weight,
+ Face.SCORE_MIN,
+ Face.SCORE_MAX,
+ rect,
+ "score");
+
+ return new Face(rect, score);
+ }
+
+ private static int clipLower(int value, int lo, Rect rect, String name) {
+ return clip(value, lo, /*hi*/Integer.MAX_VALUE, rect, name);
+ }
+
+ private static int clip(int value, int lo, int hi, Rect rect, String name) {
+ if (value < lo) {
+ Log.w(TAG, "toMetering - Rectangle " + rect + " "
+ + name + " too small, clip to " + lo);
+ value = lo;
+ } else if (value > hi) {
+ Log.w(TAG, "toMetering - Rectangle " + rect + " "
+ + name + " too small, clip to " + hi);
+ value = hi;
+ }
+
+ return value;
+ }
+ }
+
+ private static final String TAG = "ParameterUtils";
+ private static final boolean DEBUG = false;
+
+ /** getZoomRatios stores zoom ratios in 1/100 increments, e.x. a zoom of 3.2 is 320 */
+ private static final int ZOOM_RATIO_MULTIPLIER = 100;
+
+ /**
+ * Convert a camera API1 size into a util size
+ */
+ public static Size convertSize(Camera.Size size) {
+ checkNotNull(size, "size must not be null");
+
+ return new Size(size.width, size.height);
+ }
+
+ /**
+ * Convert a camera API1 list of sizes into a util list of sizes
+ */
+ public static List<Size> convertSizeList(List<Camera.Size> sizeList) {
+ checkNotNull(sizeList, "sizeList must not be null");
+
+ List<Size> sizes = new ArrayList<>(sizeList.size());
+ for (Camera.Size s : sizeList) {
+ sizes.add(new Size(s.width, s.height));
+ }
+ return sizes;
+ }
+
+ /**
+ * Convert a camera API1 list of sizes into an array of sizes
+ */
+ public static Size[] convertSizeListToArray(List<Camera.Size> sizeList) {
+ checkNotNull(sizeList, "sizeList must not be null");
+
+ Size[] array = new Size[sizeList.size()];
+ int ctr = 0;
+ for (Camera.Size s : sizeList) {
+ array[ctr++] = new Size(s.width, s.height);
+ }
+ return array;
+ }
+
+ /**
+ * Check if the camera API1 list of sizes contains a size with the given dimens.
+ */
+ public static boolean containsSize(List<Camera.Size> sizeList, int width, int height) {
+ checkNotNull(sizeList, "sizeList must not be null");
+ for (Camera.Size s : sizeList) {
+ if (s.height == height && s.width == width) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Returns the largest supported picture size, as compared by its area.
+ */
+ public static Size getLargestSupportedJpegSizeByArea(Camera.Parameters params) {
+ checkNotNull(params, "params must not be null");
+
+ List<Size> supportedJpegSizes = convertSizeList(params.getSupportedPictureSizes());
+ return SizeAreaComparator.findLargestByArea(supportedJpegSizes);
+ }
+
+ /**
+ * Convert a camera area into a human-readable string.
+ */
+ public static String stringFromArea(Camera.Area area) {
+ if (area == null) {
+ return null;
+ } else {
+ StringBuilder sb = new StringBuilder();
+ Rect r = area.rect;
+
+ sb.setLength(0);
+ sb.append("(["); sb.append(r.left); sb.append(',');
+ sb.append(r.top); sb.append("]["); sb.append(r.right);
+ sb.append(','); sb.append(r.bottom); sb.append(']');
+
+ sb.append(',');
+ sb.append(area.weight);
+ sb.append(')');
+
+ return sb.toString();
+ }
+ }
+
+ /**
+ * Convert a camera area list into a human-readable string
+ * @param areaList a list of areas (null is ok)
+ */
+ public static String stringFromAreaList(List<Camera.Area> areaList) {
+ StringBuilder sb = new StringBuilder();
+
+ if (areaList == null) {
+ return null;
+ }
+
+ int i = 0;
+ for (Camera.Area area : areaList) {
+ if (area == null) {
+ sb.append("null");
+ } else {
+ sb.append(stringFromArea(area));
+ }
+
+ if (i != areaList.size() - 1) {
+ sb.append(", ");
+ }
+
+ i++;
+ }
+
+ return sb.toString();
+ }
+
+ /**
+ * Calculate the closest zoom index for the user-requested crop region by rounding
+ * up to the closest (largest or equal) possible zoom crop.
+ *
+ * <p>If the requested crop region exceeds the size of the active array, it is
+ * shrunk to fit inside of the active array first.</p>
+ *
+ * <p>Since all api1 camera devices only support a discrete set of zooms, we have
+ * to translate the per-pixel-granularity requested crop region into a per-zoom-index
+ * granularity.</p>
+ *
+ * <p>Furthermore, since the zoom index and zoom levels also depends on the field-of-view
+ * of the preview, the current preview {@code streamSize} is also used.</p>
+ *
+ * <p>The calculated crop regions are then written to in-place to {@code reportedCropRegion}
+ * and {@code previewCropRegion}, in coordinates relative to the active array.</p>
+ *
+ * @param params non-{@code null} camera api1 parameters
+ * @param activeArray active array dimensions, in sensor space
+ * @param streamSize stream size dimensions, in pixels
+ * @param cropRegion user-specified crop region, in active array coordinates
+ * @param reportedCropRegion (out parameter) what the result for {@code cropRegion} looks like
+ * @param previewCropRegion (out parameter) what the visual preview crop is
+ * @return
+ * the zoom index inclusively between 0 and {@code Parameters#getMaxZoom},
+ * where 0 means the camera is not zoomed
+ *
+ * @throws NullPointerException if any of the args were {@code null}
+ */
+ public static int getClosestAvailableZoomCrop(
+ Camera.Parameters params, Rect activeArray, Size streamSize, Rect cropRegion,
+ /*out*/
+ Rect reportedCropRegion,
+ Rect previewCropRegion) {
+ checkNotNull(params, "params must not be null");
+ checkNotNull(activeArray, "activeArray must not be null");
+ checkNotNull(streamSize, "streamSize must not be null");
+ checkNotNull(reportedCropRegion, "reportedCropRegion must not be null");
+ checkNotNull(previewCropRegion, "previewCropRegion must not be null");
+
+ Rect actualCrop = new Rect(cropRegion);
+
+ /*
+ * Shrink requested crop region to fit inside of the active array size
+ */
+ if (!actualCrop.intersect(activeArray)) {
+ Log.w(TAG, "getClosestAvailableZoomCrop - Crop region out of range; " +
+ "setting to active array size");
+ actualCrop.set(activeArray);
+ }
+
+ Rect previewCrop = getPreviewCropRectangleUnzoomed(activeArray, streamSize);
+
+ // Make the user-requested crop region the same aspect ratio as the preview stream size
+ Rect cropRegionAsPreview =
+ shrinkToSameAspectRatioCentered(previewCrop, actualCrop);
+
+ if (DEBUG) {
+ Log.v(TAG, "getClosestAvailableZoomCrop - actualCrop = " + actualCrop);
+ Log.v(TAG,
+ "getClosestAvailableZoomCrop - previewCrop = " + previewCrop);
+ Log.v(TAG,
+ "getClosestAvailableZoomCrop - cropRegionAsPreview = " + cropRegionAsPreview);
+ }
+
+ /*
+ * Iterate all available zoom rectangles and find the closest zoom index
+ */
+ Rect bestReportedCropRegion = null;
+ Rect bestPreviewCropRegion = null;
+ int bestZoomIndex = -1;
+
+ List<Rect> availableReportedCropRegions =
+ getAvailableZoomCropRectangles(params, activeArray);
+ List<Rect> availablePreviewCropRegions =
+ getAvailablePreviewZoomCropRectangles(params, activeArray, streamSize);
+
+ if (DEBUG) {
+ Log.v(TAG,
+ "getClosestAvailableZoomCrop - availableReportedCropRegions = " +
+ ListUtils.listToString(availableReportedCropRegions));
+ Log.v(TAG,
+ "getClosestAvailableZoomCrop - availablePreviewCropRegions = " +
+ ListUtils.listToString(availablePreviewCropRegions));
+ }
+
+ if (availableReportedCropRegions.size() != availablePreviewCropRegions.size()) {
+ throw new AssertionError("available reported/preview crop region size mismatch");
+ }
+
+ for (int i = 0; i < availableReportedCropRegions.size(); ++i) {
+ Rect currentPreviewCropRegion = availablePreviewCropRegions.get(i);
+ Rect currentReportedCropRegion = availableReportedCropRegions.get(i);
+
+ boolean isBest;
+ if (bestZoomIndex == -1) {
+ isBest = true;
+ } else if (currentPreviewCropRegion.width() >= cropRegionAsPreview.width() &&
+ currentPreviewCropRegion.height() >= cropRegionAsPreview.height()) {
+ isBest = true;
+ } else {
+ isBest = false;
+ }
+
+ // Sizes are sorted largest-to-smallest, so once the available crop is too small,
+ // we the rest are too small. Furthermore, this is the final best crop,
+ // since its the largest crop that still fits the requested crop
+ if (isBest) {
+ bestPreviewCropRegion = currentPreviewCropRegion;
+ bestReportedCropRegion = currentReportedCropRegion;
+ bestZoomIndex = i;
+ } else {
+ break;
+ }
+ }
+
+ if (bestZoomIndex == -1) {
+ // Even in the worst case, we should always at least return 0 here
+ throw new AssertionError("Should've found at least one valid zoom index");
+ }
+
+ // Write the rectangles in-place
+ reportedCropRegion.set(bestReportedCropRegion);
+ previewCropRegion.set(bestPreviewCropRegion);
+
+ return bestZoomIndex;
+ }
+
+ /**
+ * Calculate the effective crop rectangle for this preview viewport;
+ * assumes the preview is centered to the sensor and scaled to fit across one of the dimensions
+ * without skewing.
+ *
+ * <p>The preview size must be a subset of the active array size; the resulting
+ * rectangle will also be a subset of the active array rectangle.</p>
+ *
+ * <p>The unzoomed crop rectangle is calculated only.</p>
+ *
+ * @param activeArray active array dimensions, in sensor space
+ * @param previewSize size of the preview buffer render target, in pixels (not in sensor space)
+ * @return a rectangle which serves as the preview stream's effective crop region (unzoomed),
+ * in sensor space
+ *
+ * @throws NullPointerException
+ * if any of the args were {@code null}
+ * @throws IllegalArgumentException
+ * if {@code previewSize} is wider or taller than {@code activeArray}
+ */
+ private static Rect getPreviewCropRectangleUnzoomed(Rect activeArray, Size previewSize) {
+ if (previewSize.getWidth() > activeArray.width()) {
+ throw new IllegalArgumentException("previewSize must not be wider than activeArray");
+ } else if (previewSize.getHeight() > activeArray.height()) {
+ throw new IllegalArgumentException("previewSize must not be taller than activeArray");
+ }
+
+ float aspectRatioArray = activeArray.width() * 1.0f / activeArray.height();
+ float aspectRatioPreview = previewSize.getWidth() * 1.0f / previewSize.getHeight();
+
+ float cropH, cropW;
+ if (Math.abs(aspectRatioPreview - aspectRatioArray) < ASPECT_RATIO_TOLERANCE) {
+ cropH = activeArray.height();
+ cropW = activeArray.width();
+ } else if (aspectRatioPreview < aspectRatioArray) {
+ // The new width must be smaller than the height, so scale the width by AR
+ cropH = activeArray.height();
+ cropW = cropH * aspectRatioPreview;
+ } else {
+ // The new height must be smaller (or equal) than the width, so scale the height by AR
+ cropW = activeArray.width();
+ cropH = cropW / aspectRatioPreview;
+ }
+
+ Matrix translateMatrix = new Matrix();
+ RectF cropRect = new RectF(/*left*/0, /*top*/0, cropW, cropH);
+
+ // Now center the crop rectangle so its center is in the center of the active array
+ translateMatrix.setTranslate(activeArray.exactCenterX(), activeArray.exactCenterY());
+ translateMatrix.postTranslate(-cropRect.centerX(), -cropRect.centerY());
+
+ translateMatrix.mapRect(/*inout*/cropRect);
+
+ // Round the rect corners towards the nearest integer values
+ return ParamsUtils.createRect(cropRect);
+ }
+
+ /**
+ * Shrink the {@code shrinkTarget} rectangle to snugly fit inside of {@code reference};
+ * the aspect ratio of {@code shrinkTarget} will change to be the same aspect ratio as
+ * {@code reference}.
+ *
+ * <p>At most a single dimension will scale (down). Both dimensions will never be scaled.</p>
+ *
+ * @param reference the rectangle whose aspect ratio will be used as the new aspect ratio
+ * @param shrinkTarget the rectangle which will be scaled down to have a new aspect ratio
+ *
+ * @return a new rectangle, a subset of {@code shrinkTarget},
+ * whose aspect ratio will match that of {@code reference}
+ */
+ private static Rect shrinkToSameAspectRatioCentered(Rect reference, Rect shrinkTarget) {
+ float aspectRatioReference = reference.width() * 1.0f / reference.height();
+ float aspectRatioShrinkTarget = shrinkTarget.width() * 1.0f / shrinkTarget.height();
+
+ float cropH, cropW;
+ if (aspectRatioShrinkTarget < aspectRatioReference) {
+ // The new width must be smaller than the height, so scale the width by AR
+ cropH = reference.height();
+ cropW = cropH * aspectRatioShrinkTarget;
+ } else {
+ // The new height must be smaller (or equal) than the width, so scale the height by AR
+ cropW = reference.width();
+ cropH = cropW / aspectRatioShrinkTarget;
+ }
+
+ Matrix translateMatrix = new Matrix();
+ RectF shrunkRect = new RectF(shrinkTarget);
+
+ // Scale the rectangle down, but keep its center in the same place as before
+ translateMatrix.setScale(cropW / reference.width(), cropH / reference.height(),
+ shrinkTarget.exactCenterX(), shrinkTarget.exactCenterY());
+
+ translateMatrix.mapRect(/*inout*/shrunkRect);
+
+ return ParamsUtils.createRect(shrunkRect);
+ }
+
+ /**
+ * Get the available 'crop' (zoom) rectangles for this camera that will be reported
+ * via a {@code CaptureResult} when a zoom is requested.
+ *
+ * <p>These crops ignores the underlying preview buffer size, and will always be reported
+ * the same values regardless of what configuration of outputs is used.</p>
+ *
+ * <p>When zoom is supported, this will return a list of {@code 1 + #getMaxZoom} size,
+ * where each crop rectangle corresponds to a zoom ratio (and is centered at the middle).</p>
+ *
+ * <p>Each crop rectangle is changed to have the same aspect ratio as {@code streamSize},
+ * by shrinking the rectangle if necessary.</p>
+ *
+ * <p>To get the reported crop region when applying a zoom to the sensor, use {@code streamSize}
+ * = {@code activeArray size}.</p>
+ *
+ * @param params non-{@code null} camera api1 parameters
+ * @param activeArray active array dimensions, in sensor space
+ * @param streamSize stream size dimensions, in pixels
+ *
+ * @return a list of available zoom rectangles, sorted from least zoomed to most zoomed
+ */
+ public static List<Rect> getAvailableZoomCropRectangles(
+ Camera.Parameters params, Rect activeArray) {
+ checkNotNull(params, "params must not be null");
+ checkNotNull(activeArray, "activeArray must not be null");
+
+ return getAvailableCropRectangles(params, activeArray, ParamsUtils.createSize(activeArray));
+ }
+
+ /**
+ * Get the available 'crop' (zoom) rectangles for this camera.
+ *
+ * <p>This is the effective (real) crop that is applied by the camera api1 device
+ * when projecting the zoom onto the intermediate preview buffer. Use this when
+ * deciding which zoom ratio to apply.</p>
+ *
+ * <p>When zoom is supported, this will return a list of {@code 1 + #getMaxZoom} size,
+ * where each crop rectangle corresponds to a zoom ratio (and is centered at the middle).</p>
+ *
+ * <p>Each crop rectangle is changed to have the same aspect ratio as {@code streamSize},
+ * by shrinking the rectangle if necessary.</p>
+ *
+ * <p>To get the reported crop region when applying a zoom to the sensor, use {@code streamSize}
+ * = {@code activeArray size}.</p>
+ *
+ * @param params non-{@code null} camera api1 parameters
+ * @param activeArray active array dimensions, in sensor space
+ * @param streamSize stream size dimensions, in pixels
+ *
+ * @return a list of available zoom rectangles, sorted from least zoomed to most zoomed
+ */
+ public static List<Rect> getAvailablePreviewZoomCropRectangles(Camera.Parameters params,
+ Rect activeArray, Size previewSize) {
+ checkNotNull(params, "params must not be null");
+ checkNotNull(activeArray, "activeArray must not be null");
+ checkNotNull(previewSize, "previewSize must not be null");
+
+ return getAvailableCropRectangles(params, activeArray, previewSize);
+ }
+
+ /**
+ * Get the available 'crop' (zoom) rectangles for this camera.
+ *
+ * <p>When zoom is supported, this will return a list of {@code 1 + #getMaxZoom} size,
+ * where each crop rectangle corresponds to a zoom ratio (and is centered at the middle).</p>
+ *
+ * <p>Each crop rectangle is changed to have the same aspect ratio as {@code streamSize},
+ * by shrinking the rectangle if necessary.</p>
+ *
+ * <p>To get the reported crop region when applying a zoom to the sensor, use {@code streamSize}
+ * = {@code activeArray size}.</p>
+ *
+ * @param params non-{@code null} camera api1 parameters
+ * @param activeArray active array dimensions, in sensor space
+ * @param streamSize stream size dimensions, in pixels
+ *
+ * @return a list of available zoom rectangles, sorted from least zoomed to most zoomed
+ */
+ private static List<Rect> getAvailableCropRectangles(Camera.Parameters params,
+ Rect activeArray, Size streamSize) {
+ checkNotNull(params, "params must not be null");
+ checkNotNull(activeArray, "activeArray must not be null");
+ checkNotNull(streamSize, "streamSize must not be null");
+
+ // TODO: change all uses of Rect activeArray to Size activeArray,
+ // since we want the crop to be active-array relative, not pixel-array relative
+
+ Rect unzoomedStreamCrop = getPreviewCropRectangleUnzoomed(activeArray, streamSize);
+
+ if (!params.isZoomSupported()) {
+ // Trivial case: No zoom -> only support the full size as the crop region
+ return new ArrayList<>(Arrays.asList(unzoomedStreamCrop));
+ }
+
+ List<Rect> zoomCropRectangles = new ArrayList<>(params.getMaxZoom() + 1);
+ Matrix scaleMatrix = new Matrix();
+ RectF scaledRect = new RectF();
+
+ for (int zoom : params.getZoomRatios()) {
+ float shrinkRatio = ZOOM_RATIO_MULTIPLIER * 1.0f / zoom; // normalize to 1.0 and smaller
+
+ // set scaledRect to unzoomedStreamCrop
+ ParamsUtils.convertRectF(unzoomedStreamCrop, /*out*/scaledRect);
+
+ scaleMatrix.setScale(
+ shrinkRatio, shrinkRatio,
+ activeArray.exactCenterX(),
+ activeArray.exactCenterY());
+
+ scaleMatrix.mapRect(scaledRect);
+
+ Rect intRect = ParamsUtils.createRect(scaledRect);
+
+ // Round the rect corners towards the nearest integer values
+ zoomCropRectangles.add(intRect);
+ }
+
+ return zoomCropRectangles;
+ }
+
+ /**
+ * Get the largest possible zoom ratio (normalized to {@code 1.0f} and higher)
+ * that the camera can support.
+ *
+ * <p>If the camera does not support zoom, it always returns {@code 1.0f}.</p>
+ *
+ * @param params non-{@code null} camera api1 parameters
+ * @return normalized max zoom ratio, at least {@code 1.0f}
+ */
+ public static float getMaxZoomRatio(Camera.Parameters params) {
+ if (!params.isZoomSupported()) {
+ return 1.0f; // no zoom
+ }
+
+ List<Integer> zoomRatios = params.getZoomRatios(); // sorted smallest->largest
+ int zoom = zoomRatios.get(zoomRatios.size() - 1); // largest zoom ratio
+ float zoomRatio = zoom * 1.0f / ZOOM_RATIO_MULTIPLIER; // normalize to 1.0 and smaller
+
+ return zoomRatio;
+ }
+
+ /**
+ * Returns the component-wise zoom ratio (each greater or equal than {@code 1.0});
+ * largest values means more zoom.
+ *
+ * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
+ * @param cropSize size of the crop/zoom
+ *
+ * @return {@link SizeF} with width/height being the component-wise zoom ratio
+ *
+ * @throws NullPointerException if any of the args were {@code null}
+ * @throws IllegalArgumentException if any component of {@code cropSize} was {@code 0}
+ */
+ private static SizeF getZoomRatio(Size activeArraySize, Size cropSize) {
+ checkNotNull(activeArraySize, "activeArraySize must not be null");
+ checkNotNull(cropSize, "cropSize must not be null");
+ checkArgumentPositive(cropSize.getWidth(), "cropSize.width must be positive");
+ checkArgumentPositive(cropSize.getHeight(), "cropSize.height must be positive");
+
+ float zoomRatioWidth = activeArraySize.getWidth() * 1.0f / cropSize.getWidth();
+ float zoomRatioHeight = activeArraySize.getHeight() * 1.0f / cropSize.getHeight();
+
+ return new SizeF(zoomRatioWidth, zoomRatioHeight);
+ }
+
+ /**
+ * Convert the user-specified crop region into zoom data; which can be used
+ * to set the parameters to a specific zoom index, or to report back to the user what the
+ * actual zoom was, or for other calculations requiring the current preview crop region.
+ *
+ * <p>None of the parameters are mutated.</p>
+ *
+ * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
+ * @param cropRegion the user-specified crop region
+ * @param previewSize the current preview size (in pixels)
+ * @param params the current camera parameters (not mutated)
+ *
+ * @return the zoom index, and the effective/reported crop regions (relative to active array)
+ */
+ public static ZoomData convertScalerCropRegion(Rect activeArraySize, Rect
+ cropRegion, Size previewSize, Camera.Parameters params) {
+ Rect activeArraySizeOnly = new Rect(
+ /*left*/0, /*top*/0,
+ activeArraySize.width(), activeArraySize.height());
+
+ Rect userCropRegion = cropRegion;
+
+ if (userCropRegion == null) {
+ userCropRegion = activeArraySizeOnly;
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, "convertScalerCropRegion - user crop region was " + userCropRegion);
+ }
+
+ final Rect reportedCropRegion = new Rect();
+ final Rect previewCropRegion = new Rect();
+ final int zoomIdx = ParameterUtils.getClosestAvailableZoomCrop(params, activeArraySizeOnly,
+ previewSize, userCropRegion,
+ /*out*/reportedCropRegion, /*out*/previewCropRegion);
+
+ if (DEBUG) {
+ Log.v(TAG, "convertScalerCropRegion - zoom calculated to: " +
+ "zoomIndex = " + zoomIdx +
+ ", reported crop region = " + reportedCropRegion +
+ ", preview crop region = " + previewCropRegion);
+ }
+
+ return new ZoomData(zoomIdx, previewCropRegion, reportedCropRegion);
+ }
+
+ /**
+ * Calculate the actual/effective/reported normalized rectangle data from a metering
+ * rectangle.
+ *
+ * <p>If any of the rectangles are out-of-range of their intended bounding box,
+ * the {@link #RECTANGLE_EMPTY empty rectangle} is substituted instead
+ * (with a weight of {@code 0}).</p>
+ *
+ * <p>The metering rectangle is bound by the crop region (effective/reported respectively).
+ * The metering {@link Camera.Area area} is bound by {@code [-1000, 1000]}.</p>
+ *
+ * <p>No parameters are mutated; returns the new metering data.</p>
+ *
+ * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
+ * @param meteringRect the user-specified metering rectangle
+ * @param zoomData the calculated zoom data corresponding to this request
+ *
+ * @return the metering area, the reported/effective metering rectangles
+ */
+ public static MeteringData convertMeteringRectangleToLegacy(
+ Rect activeArray, MeteringRectangle meteringRect, ZoomData zoomData) {
+ Rect previewCrop = zoomData.previewCrop;
+
+ float scaleW = (NORMALIZED_RECTANGLE_MAX - NORMALIZED_RECTANGLE_MIN) * 1.0f /
+ previewCrop.width();
+ float scaleH = (NORMALIZED_RECTANGLE_MAX - NORMALIZED_RECTANGLE_MIN) * 1.0f /
+ previewCrop.height();
+
+ Matrix transform = new Matrix();
+ // Move the preview crop so that top,left is at (0,0), otherwise after scaling
+ // the corner bounds will be outside of [-1000, 1000]
+ transform.setTranslate(-previewCrop.left, -previewCrop.top);
+ // Scale into [0, 2000] range about the center of the preview
+ transform.postScale(scaleW, scaleH);
+ // Move so that top left of a typical rect is at [-1000, -1000]
+ transform.postTranslate(/*dx*/NORMALIZED_RECTANGLE_MIN, /*dy*/NORMALIZED_RECTANGLE_MIN);
+
+ /*
+ * Calculate the preview metering region (effective), and the camera1 api
+ * normalized metering region.
+ */
+ Rect normalizedRegionUnbounded = ParamsUtils.mapRect(transform, meteringRect.getRect());
+
+ /*
+ * Try to intersect normalized area with [-1000, 1000] rectangle; otherwise
+ * it's completely out of range
+ */
+ Rect normalizedIntersected = new Rect(normalizedRegionUnbounded);
+
+ Camera.Area meteringArea;
+ if (!normalizedIntersected.intersect(NORMALIZED_RECTANGLE_DEFAULT)) {
+ Log.w(TAG,
+ "convertMeteringRectangleToLegacy - metering rectangle too small, " +
+ "no metering will be done");
+ normalizedIntersected.set(RECTANGLE_EMPTY);
+ meteringArea = new Camera.Area(RECTANGLE_EMPTY,
+ MeteringRectangle.METERING_WEIGHT_DONT_CARE);
+ } else {
+ meteringArea = new Camera.Area(normalizedIntersected,
+ meteringRect.getMeteringWeight());
+ }
+
+ /*
+ * Calculate effective preview metering region
+ */
+ Rect previewMetering = meteringRect.getRect();
+ if (!previewMetering.intersect(previewCrop)) {
+ previewMetering.set(RECTANGLE_EMPTY);
+ }
+
+ /*
+ * Calculate effective reported metering region
+ * - Transform the calculated metering area back into active array space
+ * - Clip it to be a subset of the reported crop region
+ */
+ Rect reportedMetering;
+ {
+ Camera.Area normalizedAreaUnbounded = new Camera.Area(
+ normalizedRegionUnbounded, meteringRect.getMeteringWeight());
+ WeightedRectangle reportedMeteringRect = convertCameraAreaToActiveArrayRectangle(
+ activeArray, zoomData, normalizedAreaUnbounded, /*usePreviewCrop*/false);
+ reportedMetering = reportedMeteringRect.rect;
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, String.format(
+ "convertMeteringRectangleToLegacy - activeArray = %s, meteringRect = %s, " +
+ "previewCrop = %s, meteringArea = %s, previewMetering = %s, " +
+ "reportedMetering = %s, normalizedRegionUnbounded = %s",
+ activeArray, meteringRect,
+ previewCrop, stringFromArea(meteringArea), previewMetering,
+ reportedMetering, normalizedRegionUnbounded));
+ }
+
+ return new MeteringData(meteringArea, previewMetering, reportedMetering);
+ }
+
+ /**
+ * Convert the normalized camera area from [-1000, 1000] coordinate space
+ * into the active array-based coordinate space.
+ *
+ * <p>Values out of range are clipped to be within the resulting (reported) crop
+ * region. It is possible to have values larger than the preview crop.</p>
+ *
+ * <p>Weights out of range of [0, 1000] are clipped to be within the range.</p>
+ *
+ * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
+ * @param zoomData the calculated zoom data corresponding to this request
+ * @param area the normalized camera area
+ *
+ * @return the weighed rectangle in active array coordinate space, with the weight
+ */
+ public static WeightedRectangle convertCameraAreaToActiveArrayRectangle(
+ Rect activeArray, ZoomData zoomData, Camera.Area area) {
+ return convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, area,
+ /*usePreviewCrop*/true);
+ }
+
+ /**
+ * Convert an api1 face into an active-array based api2 face.
+ *
+ * <p>Out-of-ranges scores and ids will be clipped to be within range (with a warning).</p>
+ *
+ * @param face a non-{@code null} api1 face
+ * @param activeArraySize active array size of the sensor (e.g. max jpeg size)
+ * @param zoomData the calculated zoom data corresponding to this request
+ *
+ * @return a non-{@code null} api2 face
+ *
+ * @throws NullPointerException if the {@code face} was {@code null}
+ */
+ public static Face convertFaceFromLegacy(Camera.Face face, Rect activeArray,
+ ZoomData zoomData) {
+ checkNotNull(face, "face must not be null");
+
+ Face api2Face;
+
+ Camera.Area fakeArea = new Camera.Area(face.rect, /*weight*/1);
+
+ WeightedRectangle faceRect =
+ convertCameraAreaToActiveArrayRectangle(activeArray, zoomData, fakeArea);
+
+ Point leftEye = face.leftEye, rightEye = face.rightEye, mouth = face.mouth;
+ if (leftEye != null && rightEye != null && mouth != null && leftEye.x != -2000 &&
+ leftEye.y != -2000 && rightEye.x != -2000 && rightEye.y != -2000 &&
+ mouth.x != -2000 && mouth.y != -2000) {
+ leftEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
+ leftEye, /*usePreviewCrop*/true);
+ rightEye = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
+ leftEye, /*usePreviewCrop*/true);
+ mouth = convertCameraPointToActiveArrayPoint(activeArray, zoomData,
+ leftEye, /*usePreviewCrop*/true);
+
+ api2Face = faceRect.toFace(face.id, leftEye, rightEye, mouth);
+ } else {
+ api2Face = faceRect.toFace();
+ }
+
+ return api2Face;
+ }
+
+ private static Point convertCameraPointToActiveArrayPoint(
+ Rect activeArray, ZoomData zoomData, Point point, boolean usePreviewCrop) {
+ Rect pointedRect = new Rect(point.x, point.y, point.x, point.y);
+ Camera.Area pointedArea = new Area(pointedRect, /*weight*/1);
+
+ WeightedRectangle adjustedRect =
+ convertCameraAreaToActiveArrayRectangle(activeArray,
+ zoomData, pointedArea, usePreviewCrop);
+
+ Point transformedPoint = new Point(adjustedRect.rect.left, adjustedRect.rect.top);
+
+ return transformedPoint;
+ }
+
+ private static WeightedRectangle convertCameraAreaToActiveArrayRectangle(
+ Rect activeArray, ZoomData zoomData, Camera.Area area, boolean usePreviewCrop) {
+ Rect previewCrop = zoomData.previewCrop;
+ Rect reportedCrop = zoomData.reportedCrop;
+
+ float scaleW = previewCrop.width() * 1.0f /
+ (NORMALIZED_RECTANGLE_MAX - NORMALIZED_RECTANGLE_MIN);
+ float scaleH = previewCrop.height() * 1.0f /
+ (NORMALIZED_RECTANGLE_MAX - NORMALIZED_RECTANGLE_MIN);
+
+ /*
+ * Calculate the reported metering region from the non-intersected normalized region
+ * by scaling and translating back into active array-relative coordinates.
+ */
+ Matrix transform = new Matrix();
+
+ // Move top left from (-1000, -1000) to (0, 0)
+ transform.setTranslate(/*dx*/NORMALIZED_RECTANGLE_MAX, /*dy*/NORMALIZED_RECTANGLE_MAX);
+
+ // Scale from [0, 2000] back into the preview rectangle
+ transform.postScale(scaleW, scaleH);
+
+ // Move the rect so that the [-1000,-1000] point ends up at the preview [left, top]
+ transform.postTranslate(previewCrop.left, previewCrop.top);
+
+ Rect cropToIntersectAgainst = usePreviewCrop ? previewCrop : reportedCrop;
+
+ // Now apply the transformation backwards to get the reported metering region
+ Rect reportedMetering = ParamsUtils.mapRect(transform, area.rect);
+ // Intersect it with the crop region, to avoid reporting out-of-bounds
+ // metering regions
+ if (!reportedMetering.intersect(cropToIntersectAgainst)) {
+ reportedMetering.set(RECTANGLE_EMPTY);
+ }
+
+ int weight = area.weight;
+ if (weight < MeteringRectangle.METERING_WEIGHT_MIN) {
+ Log.w(TAG,
+ "convertCameraAreaToMeteringRectangle - rectangle "
+ + stringFromArea(area) + " has too small weight, clip to 0");
+ weight = 0;
+ }
+
+ return new WeightedRectangle(reportedMetering, area.weight);
+ }
+
+
+ private ParameterUtils() {
+ throw new AssertionError();
+ }
+}
diff --git a/android/hardware/camera2/legacy/PerfMeasurement.java b/android/hardware/camera2/legacy/PerfMeasurement.java
new file mode 100644
index 00000000..53278c7e
--- /dev/null
+++ b/android/hardware/camera2/legacy/PerfMeasurement.java
@@ -0,0 +1,308 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.os.SystemClock;
+import android.util.Log;
+
+import java.io.BufferedWriter;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.Queue;
+
+/**
+ * GPU and CPU performance measurement for the legacy implementation.
+ *
+ * <p>Measures CPU and GPU processing duration for a set of operations, and dumps
+ * the results into a file.</p>
+ *
+ * <p>Rough usage:
+ * <pre>
+ * {@code
+ * <set up workload>
+ * <start long-running workload>
+ * mPerfMeasurement.startTimer();
+ * ...render a frame...
+ * mPerfMeasurement.stopTimer();
+ * <end workload>
+ * mPerfMeasurement.dumpPerformanceData("/sdcard/my_data.txt");
+ * }
+ * </pre>
+ * </p>
+ *
+ * <p>All calls to this object must be made within the same thread, and the same GL context.
+ * PerfMeasurement cannot be used outside of a GL context. The only exception is
+ * dumpPerformanceData, which can be called outside of a valid GL context.</p>
+ */
+class PerfMeasurement {
+ private static final String TAG = "PerfMeasurement";
+
+ public static final int DEFAULT_MAX_QUERIES = 3;
+
+ private final long mNativeContext;
+
+ private int mCompletedQueryCount = 0;
+
+ /**
+ * Values for completed measurements
+ */
+ private ArrayList<Long> mCollectedGpuDurations = new ArrayList<>();
+ private ArrayList<Long> mCollectedCpuDurations = new ArrayList<>();
+ private ArrayList<Long> mCollectedTimestamps = new ArrayList<>();
+
+ /**
+ * Values for in-progress measurements (waiting for async GPU results)
+ */
+ private Queue<Long> mTimestampQueue = new LinkedList<>();
+ private Queue<Long> mCpuDurationsQueue = new LinkedList<>();
+
+ private long mStartTimeNs;
+
+ /**
+ * The value returned by {@link #nativeGetNextGlDuration} if no new timing
+ * measurement is available since the last call.
+ */
+ private static final long NO_DURATION_YET = -1l;
+
+ /**
+ * The value returned by {@link #nativeGetNextGlDuration} if timing failed for
+ * the next timing interval
+ */
+ private static final long FAILED_TIMING = -2l;
+
+ /**
+ * Create a performance measurement object with a maximum of {@value #DEFAULT_MAX_QUERIES}
+ * in-progess queries.
+ */
+ public PerfMeasurement() {
+ mNativeContext = nativeCreateContext(DEFAULT_MAX_QUERIES);
+ }
+
+ /**
+ * Create a performance measurement object with maxQueries as the maximum number of
+ * in-progress queries.
+ *
+ * @param maxQueries maximum in-progress queries, must be larger than 0.
+ * @throws IllegalArgumentException if maxQueries is less than 1.
+ */
+ public PerfMeasurement(int maxQueries) {
+ if (maxQueries < 1) throw new IllegalArgumentException("maxQueries is less than 1");
+ mNativeContext = nativeCreateContext(maxQueries);
+ }
+
+ /**
+ * Returns true if the Gl timing methods will work, false otherwise.
+ *
+ * <p>Must be called within a valid GL context.</p>
+ */
+ public static boolean isGlTimingSupported() {
+ return nativeQuerySupport();
+ }
+
+ /**
+ * Dump collected data to file, and clear the stored data.
+ *
+ * <p>
+ * Format is a simple csv-like text file with a header,
+ * followed by a 3-column list of values in nanoseconds:
+ * <pre>
+ * timestamp gpu_duration cpu_duration
+ * <long> <long> <long>
+ * <long> <long> <long>
+ * <long> <long> <long>
+ * ....
+ * </pre>
+ * </p>
+ */
+ public void dumpPerformanceData(String path) {
+ try (BufferedWriter dump = new BufferedWriter(new FileWriter(path))) {
+ dump.write("timestamp gpu_duration cpu_duration\n");
+ for (int i = 0; i < mCollectedGpuDurations.size(); i++) {
+ dump.write(String.format("%d %d %d\n",
+ mCollectedTimestamps.get(i),
+ mCollectedGpuDurations.get(i),
+ mCollectedCpuDurations.get(i)));
+ }
+ mCollectedTimestamps.clear();
+ mCollectedGpuDurations.clear();
+ mCollectedCpuDurations.clear();
+ } catch (IOException e) {
+ Log.e(TAG, "Error writing data dump to " + path + ":" + e);
+ }
+ }
+
+ /**
+ * Start a GPU/CPU timing measurement.
+ *
+ * <p>Call before starting a rendering pass. Only one timing measurement can be active at once,
+ * so {@link #stopTimer} must be called before the next call to this method.</p>
+ *
+ * @throws IllegalStateException if the maximum number of queries are in progress already,
+ * or the method is called multiple times in a row, or there is
+ * a GPU error.
+ */
+ public void startTimer() {
+ nativeStartGlTimer(mNativeContext);
+ mStartTimeNs = SystemClock.elapsedRealtimeNanos();
+ }
+
+ /**
+ * Finish a GPU/CPU timing measurement.
+ *
+ * <p>Call after finishing all the drawing for a rendering pass. Only one timing measurement can
+ * be active at once, so {@link #startTimer} must be called before the next call to this
+ * method.</p>
+ *
+ * @throws IllegalStateException if no GL timer is currently started, or there is a GPU
+ * error.
+ */
+ public void stopTimer() {
+ // Complete CPU timing
+ long endTimeNs = SystemClock.elapsedRealtimeNanos();
+ mCpuDurationsQueue.add(endTimeNs - mStartTimeNs);
+ // Complete GL timing
+ nativeStopGlTimer(mNativeContext);
+
+ // Poll to see if GL timing results have arrived; if so
+ // store the results for a frame
+ long duration = getNextGlDuration();
+ if (duration > 0) {
+ mCollectedGpuDurations.add(duration);
+ mCollectedTimestamps.add(mTimestampQueue.isEmpty() ?
+ NO_DURATION_YET : mTimestampQueue.poll());
+ mCollectedCpuDurations.add(mCpuDurationsQueue.isEmpty() ?
+ NO_DURATION_YET : mCpuDurationsQueue.poll());
+ }
+ if (duration == FAILED_TIMING) {
+ // Discard timestamp and CPU measurement since GPU measurement failed
+ if (!mTimestampQueue.isEmpty()) {
+ mTimestampQueue.poll();
+ }
+ if (!mCpuDurationsQueue.isEmpty()) {
+ mCpuDurationsQueue.poll();
+ }
+ }
+ }
+
+ /**
+ * Add a timestamp to a timing measurement. These are queued up and matched to completed
+ * workload measurements as they become available.
+ */
+ public void addTimestamp(long timestamp) {
+ mTimestampQueue.add(timestamp);
+ }
+
+ /**
+ * Get the next available GPU timing measurement.
+ *
+ * <p>Since the GPU works asynchronously, the results of a single start/stopGlTimer measurement
+ * will only be available some time after the {@link #stopTimer} call is made. Poll this method
+ * until the result becomes available. If multiple start/endTimer measurements are made in a
+ * row, the results will be available in FIFO order.</p>
+ *
+ * @return The measured duration of the GPU workload for the next pending query, or
+ * {@link #NO_DURATION_YET} if no queries are pending or the next pending query has not
+ * yet finished, or {@link #FAILED_TIMING} if the GPU was unable to complete the
+ * measurement.
+ *
+ * @throws IllegalStateException If there is a GPU error.
+ *
+ */
+ private long getNextGlDuration() {
+ long duration = nativeGetNextGlDuration(mNativeContext);
+ if (duration > 0) {
+ mCompletedQueryCount++;
+ }
+ return duration;
+ }
+
+ /**
+ * Returns the number of measurements so far that returned a valid duration
+ * measurement.
+ */
+ public int getCompletedQueryCount() {
+ return mCompletedQueryCount;
+ }
+
+ @Override
+ protected void finalize() {
+ nativeDeleteContext(mNativeContext);
+ }
+
+ /**
+ * Create a native performance measurement context.
+ *
+ * @param maxQueryCount maximum in-progress queries; must be >= 1.
+ */
+ private static native long nativeCreateContext(int maxQueryCount);
+
+ /**
+ * Delete the native context.
+ *
+ * <p>Not safe to call more than once.</p>
+ */
+ private static native void nativeDeleteContext(long contextHandle);
+
+ /**
+ * Query whether the relevant Gl extensions are available for Gl timing
+ */
+ private static native boolean nativeQuerySupport();
+
+ /**
+ * Start a GL timing section.
+ *
+ * <p>All GL commands between this method and the next {@link #nativeEndGlTimer} will be
+ * included in the timing.</p>
+ *
+ * <p>Must be called from the same thread as calls to {@link #nativeEndGlTimer} and
+ * {@link #nativeGetNextGlDuration}.</p>
+ *
+ * @throws IllegalStateException if a GL error occurs or start is called repeatedly.
+ */
+ protected static native void nativeStartGlTimer(long contextHandle);
+
+ /**
+ * Finish a GL timing section.
+ *
+ * <p>Some time after this call returns, the time the GPU took to
+ * execute all work submitted between the latest {@link #nativeStartGlTimer} and
+ * this call, will become available from calling {@link #nativeGetNextGlDuration}.</p>
+ *
+ * <p>Must be called from the same thread as calls to {@link #nativeStartGlTimer} and
+ * {@link #nativeGetNextGlDuration}.</p>
+ *
+ * @throws IllegalStateException if a GL error occurs or stop is called before start
+ */
+ protected static native void nativeStopGlTimer(long contextHandle);
+
+ /**
+ * Get the next available GL duration measurement, in nanoseconds.
+ *
+ * <p>Must be called from the same thread as calls to {@link #nativeStartGlTimer} and
+ * {@link #nativeEndGlTimer}.</p>
+ *
+ * @return the next GL duration measurement, or {@link #NO_DURATION_YET} if
+ * no new measurement is available, or {@link #FAILED_TIMING} if timing
+ * failed for the next duration measurement.
+ * @throws IllegalStateException if a GL error occurs
+ */
+ protected static native long nativeGetNextGlDuration(long contextHandle);
+
+
+}
diff --git a/android/hardware/camera2/legacy/RequestHandlerThread.java b/android/hardware/camera2/legacy/RequestHandlerThread.java
new file mode 100644
index 00000000..e19ebf2d
--- /dev/null
+++ b/android/hardware/camera2/legacy/RequestHandlerThread.java
@@ -0,0 +1,113 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.MessageQueue;
+
+public class RequestHandlerThread extends HandlerThread {
+
+ /**
+ * Ensure that the MessageQueue's idle handler gets run by poking the message queue;
+ * normally if the message queue is already idle, the idle handler won't get invoked.
+ *
+ * <p>Users of this handler thread should ignore this message.</p>
+ */
+ public final static int MSG_POKE_IDLE_HANDLER = -1;
+
+ private final ConditionVariable mStarted = new ConditionVariable(false);
+ private final ConditionVariable mIdle = new ConditionVariable(true);
+ private Handler.Callback mCallback;
+ private volatile Handler mHandler;
+
+ public RequestHandlerThread(String name, Handler.Callback callback) {
+ super(name, Thread.MAX_PRIORITY);
+ mCallback = callback;
+ }
+
+ @Override
+ protected void onLooperPrepared() {
+ mHandler = new Handler(getLooper(), mCallback);
+ mStarted.open();
+ }
+
+ // Blocks until thread has started
+ public void waitUntilStarted() {
+ mStarted.block();
+ }
+
+ // May return null if the handler is not set up yet.
+ public Handler getHandler() {
+ return mHandler;
+ }
+
+ // Blocks until thread has started
+ public Handler waitAndGetHandler() {
+ waitUntilStarted();
+ return getHandler();
+ }
+
+ // Atomic multi-type message existence check
+ public boolean hasAnyMessages(int[] what) {
+ synchronized (mHandler.getLooper().getQueue()) {
+ for (int i : what) {
+ if (mHandler.hasMessages(i)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ // Atomic multi-type message remove
+ public void removeMessages(int[] what) {
+ synchronized (mHandler.getLooper().getQueue()) {
+ for (int i : what) {
+ mHandler.removeMessages(i);
+ }
+ }
+ }
+
+ private final MessageQueue.IdleHandler mIdleHandler = new MessageQueue.IdleHandler() {
+ @Override
+ public boolean queueIdle() {
+ mIdle.open();
+ return false;
+ }
+ };
+
+ // Blocks until thread is idling
+ public void waitUntilIdle() {
+ Handler handler = waitAndGetHandler();
+ MessageQueue queue = handler.getLooper().getQueue();
+ if (queue.isIdle()) {
+ return;
+ }
+ mIdle.close();
+ queue.addIdleHandler(mIdleHandler);
+ // Ensure that the idle handler gets run even if the looper already went idle
+ handler.sendEmptyMessage(MSG_POKE_IDLE_HANDLER);
+ if (queue.isIdle()) {
+ return;
+ }
+ mIdle.block();
+ }
+
+}
diff --git a/android/hardware/camera2/legacy/RequestHolder.java b/android/hardware/camera2/legacy/RequestHolder.java
new file mode 100644
index 00000000..98b761b8
--- /dev/null
+++ b/android/hardware/camera2/legacy/RequestHolder.java
@@ -0,0 +1,283 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.CaptureRequest;
+import android.util.Log;
+import android.view.Surface;
+
+import java.util.Collection;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Semi-immutable container for a single capture request and associated information,
+ * the only mutable characteristic of this container is whether or not is has been
+ * marked as "failed" using {@code #failRequest}.
+ */
+public class RequestHolder {
+ private static final String TAG = "RequestHolder";
+
+ private final boolean mRepeating;
+ private final CaptureRequest mRequest;
+ private final int mRequestId;
+ private final int mSubsequeceId;
+ private final long mFrameNumber;
+ private final int mNumJpegTargets;
+ private final int mNumPreviewTargets;
+ private volatile boolean mFailed = false;
+ private boolean mOutputAbandoned = false;
+
+ private final Collection<Long> mJpegSurfaceIds;
+
+ /**
+ * A builder class for {@link RequestHolder} objects.
+ *
+ * <p>
+ * This allows per-request queries to be cached for repeating {@link CaptureRequest} objects.
+ * </p>
+ */
+ public final static class Builder {
+ private final int mRequestId;
+ private final int mSubsequenceId;
+ private final CaptureRequest mRequest;
+ private final boolean mRepeating;
+ private final int mNumJpegTargets;
+ private final int mNumPreviewTargets;
+ private final Collection<Long> mJpegSurfaceIds;
+
+ /**
+ * Construct a new {@link Builder} to generate {@link RequestHolder} objects.
+ *
+ * @param requestId the ID to set in {@link RequestHolder} objects.
+ * @param subsequenceId the sequence ID to set in {@link RequestHolder} objects.
+ * @param request the original {@link CaptureRequest} to set in {@link RequestHolder}
+ * objects.
+ * @param repeating {@code true} if the request is repeating.
+ */
+ public Builder(int requestId, int subsequenceId, CaptureRequest request,
+ boolean repeating, Collection<Long> jpegSurfaceIds) {
+ checkNotNull(request, "request must not be null");
+ mRequestId = requestId;
+ mSubsequenceId = subsequenceId;
+ mRequest = request;
+ mRepeating = repeating;
+ mJpegSurfaceIds = jpegSurfaceIds;
+ mNumJpegTargets = numJpegTargets(mRequest);
+ mNumPreviewTargets = numPreviewTargets(mRequest);
+ }
+
+ /**
+ * Returns true if the given surface requires jpeg buffers.
+ *
+ * @param s a {@link android.view.Surface} to check.
+ * @return true if the surface requires a jpeg buffer.
+ */
+ private boolean jpegType(Surface s)
+ throws LegacyExceptionUtils.BufferQueueAbandonedException {
+ return LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds);
+ }
+
+ /**
+ * Returns true if the given surface requires non-jpeg buffer types.
+ *
+ * <p>
+ * "Jpeg buffer" refers to the buffers returned in the jpeg
+ * {@link android.hardware.Camera.PictureCallback}. Non-jpeg buffers are created using a tee
+ * of the preview stream drawn to the surface
+ * set via {@link android.hardware.Camera#setPreviewDisplay(android.view.SurfaceHolder)} or
+ * equivalent methods.
+ * </p>
+ * @param s a {@link android.view.Surface} to check.
+ * @return true if the surface requires a non-jpeg buffer type.
+ */
+ private boolean previewType(Surface s)
+ throws LegacyExceptionUtils.BufferQueueAbandonedException {
+ return !jpegType(s);
+ }
+
+ /**
+ * Returns the number of surfaces targeted by the request that require jpeg buffers.
+ */
+ private int numJpegTargets(CaptureRequest request) {
+ int count = 0;
+ for (Surface s : request.getTargets()) {
+ try {
+ if (jpegType(s)) {
+ ++count;
+ }
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.d(TAG, "Surface abandoned, skipping...", e);
+ }
+ }
+ return count;
+ }
+
+ /**
+ * Returns the number of surfaces targeted by the request that require non-jpeg buffers.
+ */
+ private int numPreviewTargets(CaptureRequest request) {
+ int count = 0;
+ for (Surface s : request.getTargets()) {
+ try {
+ if (previewType(s)) {
+ ++count;
+ }
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.d(TAG, "Surface abandoned, skipping...", e);
+ }
+ }
+ return count;
+ }
+
+ /**
+ * Build a new {@link RequestHolder} using with parameters generated from this
+ * {@link Builder}.
+ *
+ * @param frameNumber the {@code framenumber} to generate in the {@link RequestHolder}.
+ * @return a {@link RequestHolder} constructed with the {@link Builder}'s parameters.
+ */
+ public RequestHolder build(long frameNumber) {
+ return new RequestHolder(mRequestId, mSubsequenceId, mRequest, mRepeating, frameNumber,
+ mNumJpegTargets, mNumPreviewTargets, mJpegSurfaceIds);
+ }
+ }
+
+ private RequestHolder(int requestId, int subsequenceId, CaptureRequest request,
+ boolean repeating, long frameNumber, int numJpegTargets,
+ int numPreviewTargets, Collection<Long> jpegSurfaceIds) {
+ mRepeating = repeating;
+ mRequest = request;
+ mRequestId = requestId;
+ mSubsequeceId = subsequenceId;
+ mFrameNumber = frameNumber;
+ mNumJpegTargets = numJpegTargets;
+ mNumPreviewTargets = numPreviewTargets;
+ mJpegSurfaceIds = jpegSurfaceIds;
+ }
+
+ /**
+ * Return the request id for the contained {@link CaptureRequest}.
+ */
+ public int getRequestId() {
+ return mRequestId;
+ }
+
+ /**
+ * Returns true if the contained request is repeating.
+ */
+ public boolean isRepeating() {
+ return mRepeating;
+ }
+
+ /**
+ * Return the subsequence id for this request.
+ */
+ public int getSubsequeceId() {
+ return mSubsequeceId;
+ }
+
+ /**
+ * Returns the frame number for this request.
+ */
+ public long getFrameNumber() {
+ return mFrameNumber;
+ }
+
+ /**
+ * Returns the contained request.
+ */
+ public CaptureRequest getRequest() {
+ return mRequest;
+ }
+
+ /**
+ * Returns a read-only collection of the surfaces targeted by the contained request.
+ */
+ public Collection<Surface> getHolderTargets() {
+ return getRequest().getTargets();
+ }
+
+ /**
+ * Returns true if any of the surfaces targeted by the contained request require jpeg buffers.
+ */
+ public boolean hasJpegTargets() {
+ return mNumJpegTargets > 0;
+ }
+
+ /**
+ * Returns true if any of the surfaces targeted by the contained request require a
+ * non-jpeg buffer type.
+ */
+ public boolean hasPreviewTargets(){
+ return mNumPreviewTargets > 0;
+ }
+
+ /**
+ * Return the number of jpeg-type surfaces targeted by this request.
+ */
+ public int numJpegTargets() {
+ return mNumJpegTargets;
+ }
+
+ /**
+ * Return the number of non-jpeg-type surfaces targeted by this request.
+ */
+ public int numPreviewTargets() {
+ return mNumPreviewTargets;
+ }
+
+ /**
+ * Returns true if the given surface requires jpeg buffers.
+ *
+ * @param s a {@link android.view.Surface} to check.
+ * @return true if the surface requires a jpeg buffer.
+ */
+ public boolean jpegType(Surface s)
+ throws LegacyExceptionUtils.BufferQueueAbandonedException {
+ return LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds);
+ }
+
+ /**
+ * Mark this request as failed.
+ */
+ public void failRequest() {
+ Log.w(TAG, "Capture failed for request: " + getRequestId());
+ mFailed = true;
+ }
+
+ /**
+ * Return {@code true} if this request failed.
+ */
+ public boolean requestFailed() {
+ return mFailed;
+ }
+
+ /**
+ * Mark at least one of this request's output surfaces is abandoned.
+ */
+ public void setOutputAbandoned() {
+ mOutputAbandoned = true;
+ }
+
+ /**
+ * Return if any of this request's output surface is abandoned.
+ */
+ public boolean isOutputAbandoned() {
+ return mOutputAbandoned;
+ }
+}
diff --git a/android/hardware/camera2/legacy/RequestQueue.java b/android/hardware/camera2/legacy/RequestQueue.java
new file mode 100644
index 00000000..407e5e63
--- /dev/null
+++ b/android/hardware/camera2/legacy/RequestQueue.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.legacy;
+
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.utils.SubmitInfo;
+import android.util.Log;
+
+import java.util.ArrayDeque;
+import java.util.List;
+
+/**
+ * A queue of bursts of requests.
+ *
+ * <p>This queue maintains the count of frames that have been produced, and is thread safe.</p>
+ */
+public class RequestQueue {
+ private static final String TAG = "RequestQueue";
+
+ private static final long INVALID_FRAME = -1;
+
+ private BurstHolder mRepeatingRequest = null;
+ private final ArrayDeque<BurstHolder> mRequestQueue = new ArrayDeque<BurstHolder>();
+
+ private long mCurrentFrameNumber = 0;
+ private long mCurrentRepeatingFrameNumber = INVALID_FRAME;
+ private int mCurrentRequestId = 0;
+ private final List<Long> mJpegSurfaceIds;
+
+ public final class RequestQueueEntry {
+ private final BurstHolder mBurstHolder;
+ private final Long mFrameNumber;
+ private final boolean mQueueEmpty;
+
+ public BurstHolder getBurstHolder() {
+ return mBurstHolder;
+ }
+ public Long getFrameNumber() {
+ return mFrameNumber;
+ }
+ public boolean isQueueEmpty() {
+ return mQueueEmpty;
+ }
+
+ public RequestQueueEntry(BurstHolder burstHolder, Long frameNumber, boolean queueEmpty) {
+ mBurstHolder = burstHolder;
+ mFrameNumber = frameNumber;
+ mQueueEmpty = queueEmpty;
+ }
+ }
+
+ public RequestQueue(List<Long> jpegSurfaceIds) {
+ mJpegSurfaceIds = jpegSurfaceIds;
+ }
+
+ /**
+ * Return and remove the next burst on the queue.
+ *
+ * <p>If a repeating burst is returned, it will not be removed.</p>
+ *
+ * @return an entry containing the next burst, the current frame number, and flag about whether
+ * request queue becomes empty. Null if no burst exists.
+ */
+ public synchronized RequestQueueEntry getNext() {
+ BurstHolder next = mRequestQueue.poll();
+ boolean queueEmptied = (next != null && mRequestQueue.size() == 0);
+ if (next == null && mRepeatingRequest != null) {
+ next = mRepeatingRequest;
+ mCurrentRepeatingFrameNumber = mCurrentFrameNumber +
+ next.getNumberOfRequests();
+ }
+
+ if (next == null) {
+ return null;
+ }
+
+ RequestQueueEntry ret = new RequestQueueEntry(next, mCurrentFrameNumber, queueEmptied);
+ mCurrentFrameNumber += next.getNumberOfRequests();
+ return ret;
+ }
+
+ /**
+ * Cancel a repeating request.
+ *
+ * @param requestId the id of the repeating request to cancel.
+ * @return the last frame to be returned from the HAL for the given repeating request, or
+ * {@code INVALID_FRAME} if none exists.
+ */
+ public synchronized long stopRepeating(int requestId) {
+ long ret = INVALID_FRAME;
+ if (mRepeatingRequest != null && mRepeatingRequest.getRequestId() == requestId) {
+ mRepeatingRequest = null;
+ ret = (mCurrentRepeatingFrameNumber == INVALID_FRAME) ? INVALID_FRAME :
+ mCurrentRepeatingFrameNumber - 1;
+ mCurrentRepeatingFrameNumber = INVALID_FRAME;
+ Log.i(TAG, "Repeating capture request cancelled.");
+ } else {
+ Log.e(TAG, "cancel failed: no repeating request exists for request id: " + requestId);
+ }
+ return ret;
+ }
+
+ /**
+ * Cancel a repeating request.
+ *
+ * @return the last frame to be returned from the HAL for the given repeating request, or
+ * {@code INVALID_FRAME} if none exists.
+ */
+ public synchronized long stopRepeating() {
+ if (mRepeatingRequest == null) {
+ Log.e(TAG, "cancel failed: no repeating request exists.");
+ return INVALID_FRAME;
+ }
+ return stopRepeating(mRepeatingRequest.getRequestId());
+ }
+
+ /**
+ * Add a the given burst to the queue.
+ *
+ * <p>If the burst is repeating, replace the current repeating burst.</p>
+ *
+ * @param requests the burst of requests to add to the queue.
+ * @param repeating true if the burst is repeating.
+ * @return the submission info, including the new request id, and the last frame number, which
+ * contains either the frame number of the last frame that will be returned for this request,
+ * or the frame number of the last frame that will be returned for the current repeating
+ * request if this burst is set to be repeating.
+ */
+ public synchronized SubmitInfo submit(CaptureRequest[] requests, boolean repeating) {
+ int requestId = mCurrentRequestId++;
+ BurstHolder burst = new BurstHolder(requestId, repeating, requests, mJpegSurfaceIds);
+ long lastFrame = INVALID_FRAME;
+ if (burst.isRepeating()) {
+ Log.i(TAG, "Repeating capture request set.");
+ if (mRepeatingRequest != null) {
+ lastFrame = (mCurrentRepeatingFrameNumber == INVALID_FRAME) ? INVALID_FRAME :
+ mCurrentRepeatingFrameNumber - 1;
+ }
+ mCurrentRepeatingFrameNumber = INVALID_FRAME;
+ mRepeatingRequest = burst;
+ } else {
+ mRequestQueue.offer(burst);
+ lastFrame = calculateLastFrame(burst.getRequestId());
+ }
+ SubmitInfo info = new SubmitInfo(requestId, lastFrame);
+ return info;
+ }
+
+ private long calculateLastFrame(int requestId) {
+ long total = mCurrentFrameNumber;
+ for (BurstHolder b : mRequestQueue) {
+ total += b.getNumberOfRequests();
+ if (b.getRequestId() == requestId) {
+ return total - 1;
+ }
+ }
+ throw new IllegalStateException(
+ "At least one request must be in the queue to calculate frame number");
+ }
+
+}
diff --git a/android/hardware/camera2/legacy/RequestThreadManager.java b/android/hardware/camera2/legacy/RequestThreadManager.java
new file mode 100644
index 00000000..aaf07e60
--- /dev/null
+++ b/android/hardware/camera2/legacy/RequestThreadManager.java
@@ -0,0 +1,1098 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.impl.CameraDeviceImpl;
+import android.hardware.camera2.utils.SubmitInfo;
+import android.hardware.camera2.utils.SizeAreaComparator;
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.Message;
+import android.os.SystemClock;
+import android.util.Log;
+import android.util.MutableLong;
+import android.util.Pair;
+import android.util.Size;
+import android.view.Surface;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Iterator;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * This class executes requests to the {@link Camera}.
+ *
+ * <p>
+ * The main components of this class are:
+ * - A message queue of requests to the {@link Camera}.
+ * - A thread that consumes requests to the {@link Camera} and executes them.
+ * - A {@link GLThreadManager} that draws to the configured output {@link Surface}s.
+ * - An {@link CameraDeviceState} state machine that manages the callbacks for various operations.
+ * </p>
+ */
+@SuppressWarnings("deprecation")
+public class RequestThreadManager {
+ private final String TAG;
+ private final int mCameraId;
+ private final RequestHandlerThread mRequestThread;
+
+ private static final boolean DEBUG = false;
+ // For slightly more spammy messages that will get repeated every frame
+ private static final boolean VERBOSE = false;
+ private Camera mCamera;
+ private final CameraCharacteristics mCharacteristics;
+
+ private final CameraDeviceState mDeviceState;
+ private final CaptureCollector mCaptureCollector;
+ private final LegacyFocusStateMapper mFocusStateMapper;
+ private final LegacyFaceDetectMapper mFaceDetectMapper;
+
+ private static final int MSG_CONFIGURE_OUTPUTS = 1;
+ private static final int MSG_SUBMIT_CAPTURE_REQUEST = 2;
+ private static final int MSG_CLEANUP = 3;
+
+ private static final int MAX_IN_FLIGHT_REQUESTS = 2;
+
+ private static final int PREVIEW_FRAME_TIMEOUT = 1000; // ms
+ private static final int JPEG_FRAME_TIMEOUT = 4000; // ms (same as CTS for API2)
+ private static final int REQUEST_COMPLETE_TIMEOUT = JPEG_FRAME_TIMEOUT;
+
+ private static final float ASPECT_RATIO_TOLERANCE = 0.01f;
+ private boolean mPreviewRunning = false;
+
+ private final List<Surface> mPreviewOutputs = new ArrayList<>();
+ private final List<Surface> mCallbackOutputs = new ArrayList<>();
+ private GLThreadManager mGLThreadManager;
+ private SurfaceTexture mPreviewTexture;
+ private Camera.Parameters mParams;
+
+ private final List<Long> mJpegSurfaceIds = new ArrayList<>();
+
+ private Size mIntermediateBufferSize;
+
+ private final RequestQueue mRequestQueue = new RequestQueue(mJpegSurfaceIds);
+ private LegacyRequest mLastRequest = null;
+ private SurfaceTexture mDummyTexture;
+ private Surface mDummySurface;
+
+ private final Object mIdleLock = new Object();
+ private final FpsCounter mPrevCounter = new FpsCounter("Incoming Preview");
+ private final FpsCounter mRequestCounter = new FpsCounter("Incoming Requests");
+
+ private final AtomicBoolean mQuit = new AtomicBoolean(false);
+
+ // Stuff JPEGs into HAL_PIXEL_FORMAT_RGBA_8888 gralloc buffers to get around SW write
+ // limitations for (b/17379185).
+ private static final boolean USE_BLOB_FORMAT_OVERRIDE = true;
+
+ /**
+ * Container object for Configure messages.
+ */
+ private static class ConfigureHolder {
+ public final ConditionVariable condition;
+ public final Collection<Pair<Surface, Size>> surfaces;
+
+ public ConfigureHolder(ConditionVariable condition, Collection<Pair<Surface,
+ Size>> surfaces) {
+ this.condition = condition;
+ this.surfaces = surfaces;
+ }
+ }
+
+ /**
+ * Counter class used to calculate and log the current FPS of frame production.
+ */
+ public static class FpsCounter {
+ //TODO: Hook this up to SystTrace?
+ private static final String TAG = "FpsCounter";
+ private int mFrameCount = 0;
+ private long mLastTime = 0;
+ private long mLastPrintTime = 0;
+ private double mLastFps = 0;
+ private final String mStreamType;
+ private static final long NANO_PER_SECOND = 1000000000; //ns
+
+ public FpsCounter(String streamType) {
+ mStreamType = streamType;
+ }
+
+ public synchronized void countFrame() {
+ mFrameCount++;
+ long nextTime = SystemClock.elapsedRealtimeNanos();
+ if (mLastTime == 0) {
+ mLastTime = nextTime;
+ }
+ if (nextTime > mLastTime + NANO_PER_SECOND) {
+ long elapsed = nextTime - mLastTime;
+ mLastFps = mFrameCount * (NANO_PER_SECOND / (double) elapsed);
+ mFrameCount = 0;
+ mLastTime = nextTime;
+ }
+ }
+
+ public synchronized double checkFps() {
+ return mLastFps;
+ }
+
+ public synchronized void staggeredLog() {
+ if (mLastTime > mLastPrintTime + 5 * NANO_PER_SECOND) {
+ mLastPrintTime = mLastTime;
+ Log.d(TAG, "FPS for " + mStreamType + " stream: " + mLastFps );
+ }
+ }
+
+ public synchronized void countAndLog() {
+ countFrame();
+ staggeredLog();
+ }
+ }
+ /**
+ * Fake preview for jpeg captures when there is no active preview
+ */
+ private void createDummySurface() {
+ if (mDummyTexture == null || mDummySurface == null) {
+ mDummyTexture = new SurfaceTexture(/*ignored*/0);
+ // TODO: use smallest default sizes
+ mDummyTexture.setDefaultBufferSize(640, 480);
+ mDummySurface = new Surface(mDummyTexture);
+ }
+ }
+
+ private final Camera.ErrorCallback mErrorCallback = new Camera.ErrorCallback() {
+ @Override
+ public void onError(int i, Camera camera) {
+ switch(i) {
+ case Camera.CAMERA_ERROR_EVICTED: {
+ flush();
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DISCONNECTED);
+ } break;
+ default: {
+ Log.e(TAG, "Received error " + i + " from the Camera1 ErrorCallback");
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ } break;
+ }
+ }
+ };
+
+ private final ConditionVariable mReceivedJpeg = new ConditionVariable(false);
+
+ private final Camera.PictureCallback mJpegCallback = new Camera.PictureCallback() {
+ @Override
+ public void onPictureTaken(byte[] data, Camera camera) {
+ Log.i(TAG, "Received jpeg.");
+ Pair<RequestHolder, Long> captureInfo = mCaptureCollector.jpegProduced();
+ if (captureInfo == null || captureInfo.first == null) {
+ Log.e(TAG, "Dropping jpeg frame.");
+ return;
+ }
+ RequestHolder holder = captureInfo.first;
+ long timestamp = captureInfo.second;
+ for (Surface s : holder.getHolderTargets()) {
+ try {
+ if (LegacyCameraDevice.containsSurfaceId(s, mJpegSurfaceIds)) {
+ Log.i(TAG, "Producing jpeg buffer...");
+
+ int totalSize = data.length + LegacyCameraDevice.nativeGetJpegFooterSize();
+ totalSize = (totalSize + 3) & ~0x3; // round up to nearest octonibble
+ LegacyCameraDevice.setNextTimestamp(s, timestamp);
+
+ if (USE_BLOB_FORMAT_OVERRIDE) {
+ // Override to RGBA_8888 format.
+ LegacyCameraDevice.setSurfaceFormat(s,
+ LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888);
+
+ int dimen = (int) Math.ceil(Math.sqrt(totalSize));
+ dimen = (dimen + 0xf) & ~0xf; // round up to nearest multiple of 16
+ LegacyCameraDevice.setSurfaceDimens(s, dimen, dimen);
+ LegacyCameraDevice.produceFrame(s, data, dimen, dimen,
+ CameraMetadataNative.NATIVE_JPEG_FORMAT);
+ } else {
+ LegacyCameraDevice.setSurfaceDimens(s, totalSize, /*height*/1);
+ LegacyCameraDevice.produceFrame(s, data, totalSize, /*height*/1,
+ CameraMetadataNative.NATIVE_JPEG_FORMAT);
+ }
+ }
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, dropping frame. ", e);
+ }
+ }
+
+ mReceivedJpeg.open();
+ }
+ };
+
+ private final Camera.ShutterCallback mJpegShutterCallback = new Camera.ShutterCallback() {
+ @Override
+ public void onShutter() {
+ mCaptureCollector.jpegCaptured(SystemClock.elapsedRealtimeNanos());
+ }
+ };
+
+ private final SurfaceTexture.OnFrameAvailableListener mPreviewCallback =
+ new SurfaceTexture.OnFrameAvailableListener() {
+ @Override
+ public void onFrameAvailable(SurfaceTexture surfaceTexture) {
+ if (DEBUG) {
+ mPrevCounter.countAndLog();
+ }
+ mGLThreadManager.queueNewFrame();
+ }
+ };
+
+ private void stopPreview() {
+ if (VERBOSE) {
+ Log.v(TAG, "stopPreview - preview running? " + mPreviewRunning);
+ }
+ if (mPreviewRunning) {
+ mCamera.stopPreview();
+ mPreviewRunning = false;
+ }
+ }
+
+ private void startPreview() {
+ if (VERBOSE) {
+ Log.v(TAG, "startPreview - preview running? " + mPreviewRunning);
+ }
+ if (!mPreviewRunning) {
+ // XX: CameraClient:;startPreview is not getting called after a stop
+ mCamera.startPreview();
+ mPreviewRunning = true;
+ }
+ }
+
+ private void doJpegCapturePrepare(RequestHolder request) throws IOException {
+ if (DEBUG) Log.d(TAG, "doJpegCapturePrepare - preview running? " + mPreviewRunning);
+
+ if (!mPreviewRunning) {
+ if (DEBUG) Log.d(TAG, "doJpegCapture - create fake surface");
+
+ createDummySurface();
+ mCamera.setPreviewTexture(mDummyTexture);
+ startPreview();
+ }
+ }
+
+ private void doJpegCapture(RequestHolder request) {
+ if (DEBUG) Log.d(TAG, "doJpegCapturePrepare");
+
+ mCamera.takePicture(mJpegShutterCallback, /*raw*/null, mJpegCallback);
+ mPreviewRunning = false;
+ }
+
+ private void doPreviewCapture(RequestHolder request) throws IOException {
+ if (VERBOSE) {
+ Log.v(TAG, "doPreviewCapture - preview running? " + mPreviewRunning);
+ }
+
+ if (mPreviewRunning) {
+ return; // Already running
+ }
+
+ if (mPreviewTexture == null) {
+ throw new IllegalStateException(
+ "Preview capture called with no preview surfaces configured.");
+ }
+
+ mPreviewTexture.setDefaultBufferSize(mIntermediateBufferSize.getWidth(),
+ mIntermediateBufferSize.getHeight());
+ mCamera.setPreviewTexture(mPreviewTexture);
+
+ startPreview();
+ }
+
+ private void configureOutputs(Collection<Pair<Surface, Size>> outputs) {
+ if (DEBUG) {
+ String outputsStr = outputs == null ? "null" : (outputs.size() + " surfaces");
+ Log.d(TAG, "configureOutputs with " + outputsStr);
+ }
+
+ try {
+ stopPreview();
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Received device exception in configure call: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ return;
+ }
+
+ /*
+ * Try to release the previous preview's surface texture earlier if we end up
+ * using a different one; this also reduces the likelihood of getting into a deadlock
+ * when disconnecting from the old previous texture at a later time.
+ */
+ try {
+ mCamera.setPreviewTexture(/*surfaceTexture*/null);
+ } catch (IOException e) {
+ Log.w(TAG, "Failed to clear prior SurfaceTexture, may cause GL deadlock: ", e);
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Received device exception in configure call: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ return;
+ }
+
+ if (mGLThreadManager != null) {
+ mGLThreadManager.waitUntilStarted();
+ mGLThreadManager.ignoreNewFrames();
+ mGLThreadManager.waitUntilIdle();
+ }
+ resetJpegSurfaceFormats(mCallbackOutputs);
+
+ for (Surface s : mCallbackOutputs) {
+ try {
+ LegacyCameraDevice.disconnectSurface(s);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, skipping...", e);
+ }
+ }
+ mPreviewOutputs.clear();
+ mCallbackOutputs.clear();
+ mJpegSurfaceIds.clear();
+ mPreviewTexture = null;
+
+ List<Size> previewOutputSizes = new ArrayList<>();
+ List<Size> callbackOutputSizes = new ArrayList<>();
+
+ int facing = mCharacteristics.get(CameraCharacteristics.LENS_FACING);
+ int orientation = mCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ if (outputs != null) {
+ for (Pair<Surface, Size> outPair : outputs) {
+ Surface s = outPair.first;
+ Size outSize = outPair.second;
+ try {
+ int format = LegacyCameraDevice.detectSurfaceType(s);
+ LegacyCameraDevice.setSurfaceOrientation(s, facing, orientation);
+ switch (format) {
+ case CameraMetadataNative.NATIVE_JPEG_FORMAT:
+ if (USE_BLOB_FORMAT_OVERRIDE) {
+ // Override to RGBA_8888 format.
+ LegacyCameraDevice.setSurfaceFormat(s,
+ LegacyMetadataMapper.HAL_PIXEL_FORMAT_RGBA_8888);
+ }
+ mJpegSurfaceIds.add(LegacyCameraDevice.getSurfaceId(s));
+ mCallbackOutputs.add(s);
+ callbackOutputSizes.add(outSize);
+
+ // LegacyCameraDevice is the producer of JPEG output surfaces
+ // so LegacyCameraDevice needs to connect to the surfaces.
+ LegacyCameraDevice.connectSurface(s);
+ break;
+ default:
+ LegacyCameraDevice.setScalingMode(s, LegacyCameraDevice.
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+ mPreviewOutputs.add(s);
+ previewOutputSizes.add(outSize);
+ break;
+ }
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, skipping...", e);
+ }
+ }
+ }
+ try {
+ mParams = mCamera.getParameters();
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Received device exception: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ return;
+ }
+
+ List<int[]> supportedFpsRanges = mParams.getSupportedPreviewFpsRange();
+ int[] bestRange = getPhotoPreviewFpsRange(supportedFpsRanges);
+ if (DEBUG) {
+ Log.d(TAG, "doPreviewCapture - Selected range [" +
+ bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX] + "," +
+ bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX] + "]");
+ }
+ mParams.setPreviewFpsRange(bestRange[Camera.Parameters.PREVIEW_FPS_MIN_INDEX],
+ bestRange[Camera.Parameters.PREVIEW_FPS_MAX_INDEX]);
+
+ Size smallestSupportedJpegSize = calculatePictureSize(mCallbackOutputs,
+ callbackOutputSizes, mParams);
+
+ if (previewOutputSizes.size() > 0) {
+
+ Size largestOutput = SizeAreaComparator.findLargestByArea(previewOutputSizes);
+
+ // Find largest jpeg dimension - assume to have the same aspect ratio as sensor.
+ Size largestJpegDimen = ParameterUtils.getLargestSupportedJpegSizeByArea(mParams);
+
+ Size chosenJpegDimen = (smallestSupportedJpegSize != null) ? smallestSupportedJpegSize
+ : largestJpegDimen;
+
+ List<Size> supportedPreviewSizes = ParameterUtils.convertSizeList(
+ mParams.getSupportedPreviewSizes());
+
+ // Use smallest preview dimension with same aspect ratio as sensor that is >= than all
+ // of the configured output dimensions. If none exists, fall back to using the largest
+ // supported preview size.
+ long largestOutputArea = largestOutput.getHeight() * (long) largestOutput.getWidth();
+ Size bestPreviewDimen = SizeAreaComparator.findLargestByArea(supportedPreviewSizes);
+ for (Size s : supportedPreviewSizes) {
+ long currArea = s.getWidth() * s.getHeight();
+ long bestArea = bestPreviewDimen.getWidth() * bestPreviewDimen.getHeight();
+ if (checkAspectRatiosMatch(chosenJpegDimen, s) && (currArea < bestArea &&
+ currArea >= largestOutputArea)) {
+ bestPreviewDimen = s;
+ }
+ }
+
+ mIntermediateBufferSize = bestPreviewDimen;
+ mParams.setPreviewSize(mIntermediateBufferSize.getWidth(),
+ mIntermediateBufferSize.getHeight());
+
+ if (DEBUG) {
+ Log.d(TAG, "Intermediate buffer selected with dimens: " +
+ bestPreviewDimen.toString());
+ }
+ } else {
+ mIntermediateBufferSize = null;
+ if (DEBUG) {
+ Log.d(TAG, "No Intermediate buffer selected, no preview outputs were configured");
+ }
+ }
+
+ if (smallestSupportedJpegSize != null) {
+ /*
+ * Set takePicture size to the smallest supported JPEG size large enough
+ * to scale/crop out of for the bounding rectangle of the configured JPEG sizes.
+ */
+
+ Log.i(TAG, "configureOutputs - set take picture size to " + smallestSupportedJpegSize);
+ mParams.setPictureSize(
+ smallestSupportedJpegSize.getWidth(), smallestSupportedJpegSize.getHeight());
+ }
+
+ // TODO: Detect and optimize single-output paths here to skip stream teeing.
+ if (mGLThreadManager == null) {
+ mGLThreadManager = new GLThreadManager(mCameraId, facing, mDeviceState);
+ mGLThreadManager.start();
+ }
+ mGLThreadManager.waitUntilStarted();
+ List<Pair<Surface, Size>> previews = new ArrayList<>();
+ Iterator<Size> previewSizeIter = previewOutputSizes.iterator();
+ for (Surface p : mPreviewOutputs) {
+ previews.add(new Pair<>(p, previewSizeIter.next()));
+ }
+ mGLThreadManager.setConfigurationAndWait(previews, mCaptureCollector);
+
+ for (Surface p : mPreviewOutputs) {
+ try {
+ LegacyCameraDevice.setSurfaceOrientation(p, facing, orientation);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.e(TAG, "Surface abandoned, skipping setSurfaceOrientation()", e);
+ }
+ }
+
+ mGLThreadManager.allowNewFrames();
+ mPreviewTexture = mGLThreadManager.getCurrentSurfaceTexture();
+ if (mPreviewTexture != null) {
+ mPreviewTexture.setOnFrameAvailableListener(mPreviewCallback);
+ }
+
+ try {
+ mCamera.setParameters(mParams);
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Received device exception while configuring: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+
+ }
+ }
+
+ private void resetJpegSurfaceFormats(Collection<Surface> surfaces) {
+ if (!USE_BLOB_FORMAT_OVERRIDE || surfaces == null) {
+ return;
+ }
+ for(Surface s : surfaces) {
+ if (s == null || !s.isValid()) {
+ Log.w(TAG, "Jpeg surface is invalid, skipping...");
+ continue;
+ }
+ try {
+ LegacyCameraDevice.setSurfaceFormat(s, LegacyMetadataMapper.HAL_PIXEL_FORMAT_BLOB);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, skipping...", e);
+ }
+ }
+ }
+
+ /**
+ * Find a JPEG size (that is supported by the legacy camera device) which is equal to or larger
+ * than all of the configured {@code JPEG} outputs (by both width and height).
+ *
+ * <p>If multiple supported JPEG sizes are larger, select the smallest of them which
+ * still satisfies the above constraint.</p>
+ *
+ * <p>As a result, the returned size is guaranteed to be usable without needing
+ * to upscale any of the outputs. If only one {@code JPEG} surface is used,
+ * then no scaling/cropping is necessary between the taken picture and
+ * the {@code JPEG} output surface.</p>
+ *
+ * @param callbackOutputs a non-{@code null} list of {@code Surface}s with any image formats
+ * @param params api1 parameters (used for reading only)
+ *
+ * @return a size large enough to fit all of the configured {@code JPEG} outputs, or
+ * {@code null} if the {@code callbackOutputs} did not have any {@code JPEG}
+ * surfaces.
+ */
+ private Size calculatePictureSize( List<Surface> callbackOutputs,
+ List<Size> callbackSizes, Camera.Parameters params) {
+ /*
+ * Find the largest JPEG size (if any), from the configured outputs:
+ * - the api1 picture size should be set to the smallest legal size that's at least as large
+ * as the largest configured JPEG size
+ */
+ if (callbackOutputs.size() != callbackSizes.size()) {
+ throw new IllegalStateException("Input collections must be same length");
+ }
+ List<Size> configuredJpegSizes = new ArrayList<>();
+ Iterator<Size> sizeIterator = callbackSizes.iterator();
+ for (Surface callbackSurface : callbackOutputs) {
+ Size jpegSize = sizeIterator.next();
+ if (!LegacyCameraDevice.containsSurfaceId(callbackSurface, mJpegSurfaceIds)) {
+ continue; // Ignore non-JPEG callback formats
+ }
+
+ configuredJpegSizes.add(jpegSize);
+ }
+ if (!configuredJpegSizes.isEmpty()) {
+ /*
+ * Find the largest configured JPEG width, and height, independently
+ * of the rest.
+ *
+ * The rest of the JPEG streams can be cropped out of this smallest bounding
+ * rectangle.
+ */
+ int maxConfiguredJpegWidth = -1;
+ int maxConfiguredJpegHeight = -1;
+ for (Size jpegSize : configuredJpegSizes) {
+ maxConfiguredJpegWidth = jpegSize.getWidth() > maxConfiguredJpegWidth ?
+ jpegSize.getWidth() : maxConfiguredJpegWidth;
+ maxConfiguredJpegHeight = jpegSize.getHeight() > maxConfiguredJpegHeight ?
+ jpegSize.getHeight() : maxConfiguredJpegHeight;
+ }
+ Size smallestBoundJpegSize = new Size(maxConfiguredJpegWidth, maxConfiguredJpegHeight);
+
+ List<Size> supportedJpegSizes = ParameterUtils.convertSizeList(
+ params.getSupportedPictureSizes());
+
+ /*
+ * Find the smallest supported JPEG size that can fit the smallest bounding
+ * rectangle for the configured JPEG sizes.
+ */
+ List<Size> candidateSupportedJpegSizes = new ArrayList<>();
+ for (Size supportedJpegSize : supportedJpegSizes) {
+ if (supportedJpegSize.getWidth() >= maxConfiguredJpegWidth &&
+ supportedJpegSize.getHeight() >= maxConfiguredJpegHeight) {
+ candidateSupportedJpegSizes.add(supportedJpegSize);
+ }
+ }
+
+ if (candidateSupportedJpegSizes.isEmpty()) {
+ throw new AssertionError(
+ "Could not find any supported JPEG sizes large enough to fit " +
+ smallestBoundJpegSize);
+ }
+
+ Size smallestSupportedJpegSize = Collections.min(candidateSupportedJpegSizes,
+ new SizeAreaComparator());
+
+ if (!smallestSupportedJpegSize.equals(smallestBoundJpegSize)) {
+ Log.w(TAG,
+ String.format(
+ "configureOutputs - Will need to crop picture %s into "
+ + "smallest bound size %s",
+ smallestSupportedJpegSize, smallestBoundJpegSize));
+ }
+
+ return smallestSupportedJpegSize;
+ }
+
+ return null;
+ }
+
+ private static boolean checkAspectRatiosMatch(Size a, Size b) {
+ float aAspect = a.getWidth() / (float) a.getHeight();
+ float bAspect = b.getWidth() / (float) b.getHeight();
+
+ return Math.abs(aAspect - bAspect) < ASPECT_RATIO_TOLERANCE;
+ }
+
+ // Calculate the highest FPS range supported
+ private int[] getPhotoPreviewFpsRange(List<int[]> frameRates) {
+ if (frameRates.size() == 0) {
+ Log.e(TAG, "No supported frame rates returned!");
+ return null;
+ }
+
+ int bestMin = 0;
+ int bestMax = 0;
+ int bestIndex = 0;
+ int index = 0;
+ for (int[] rate : frameRates) {
+ int minFps = rate[Camera.Parameters.PREVIEW_FPS_MIN_INDEX];
+ int maxFps = rate[Camera.Parameters.PREVIEW_FPS_MAX_INDEX];
+ if (maxFps > bestMax || (maxFps == bestMax && minFps > bestMin)) {
+ bestMin = minFps;
+ bestMax = maxFps;
+ bestIndex = index;
+ }
+ index++;
+ }
+
+ return frameRates.get(bestIndex);
+ }
+
+ private final Handler.Callback mRequestHandlerCb = new Handler.Callback() {
+ private boolean mCleanup = false;
+ private final LegacyResultMapper mMapper = new LegacyResultMapper();
+
+ @Override
+ public boolean handleMessage(Message msg) {
+ if (mCleanup) {
+ return true;
+ }
+
+ if (DEBUG) {
+ Log.d(TAG, "Request thread handling message:" + msg.what);
+ }
+ long startTime = 0;
+ if (DEBUG) {
+ startTime = SystemClock.elapsedRealtimeNanos();
+ }
+ switch (msg.what) {
+ case MSG_CONFIGURE_OUTPUTS:
+ ConfigureHolder config = (ConfigureHolder) msg.obj;
+ int sizes = config.surfaces != null ? config.surfaces.size() : 0;
+ Log.i(TAG, "Configure outputs: " + sizes + " surfaces configured.");
+
+ try {
+ boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
+ TimeUnit.MILLISECONDS);
+ if (!success) {
+ Log.e(TAG, "Timed out while queueing configure request.");
+ mCaptureCollector.failAll();
+ }
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Interrupted while waiting for requests to complete.");
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ break;
+ }
+
+ configureOutputs(config.surfaces);
+ config.condition.open();
+ if (DEBUG) {
+ long totalTime = SystemClock.elapsedRealtimeNanos() - startTime;
+ Log.d(TAG, "Configure took " + totalTime + " ns");
+ }
+ break;
+ case MSG_SUBMIT_CAPTURE_REQUEST:
+ Handler handler = RequestThreadManager.this.mRequestThread.getHandler();
+ boolean anyRequestOutputAbandoned = false;
+
+ // Get the next burst from the request queue.
+ RequestQueue.RequestQueueEntry nextBurst = mRequestQueue.getNext();
+
+ if (nextBurst == null) {
+ // If there are no further requests queued, wait for any currently executing
+ // requests to complete, then switch to idle state.
+ try {
+ boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
+ TimeUnit.MILLISECONDS);
+ if (!success) {
+ Log.e(TAG,
+ "Timed out while waiting for prior requests to complete.");
+ mCaptureCollector.failAll();
+ }
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Interrupted while waiting for requests to complete: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ break;
+ }
+
+ synchronized (mIdleLock) {
+ // Retry the the request queue.
+ nextBurst = mRequestQueue.getNext();
+
+ // If we still have no queued requests, go idle.
+ if (nextBurst == null) {
+ mDeviceState.setIdle();
+ break;
+ }
+ }
+ }
+
+ if (nextBurst != null) {
+ // Queue another capture if we did not get the last burst.
+ handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
+
+ // Check whether capture queue becomes empty
+ if (nextBurst.isQueueEmpty()) {
+ mDeviceState.setRequestQueueEmpty();
+ }
+ }
+
+ // Complete each request in the burst
+ BurstHolder burstHolder = nextBurst.getBurstHolder();
+ List<RequestHolder> requests =
+ burstHolder.produceRequestHolders(nextBurst.getFrameNumber());
+ for (RequestHolder holder : requests) {
+ CaptureRequest request = holder.getRequest();
+
+ boolean paramsChanged = false;
+
+ // Only update parameters if the request has changed
+ if (mLastRequest == null || mLastRequest.captureRequest != request) {
+
+ // The intermediate buffer is sometimes null, but we always need
+ // the Camera1 API configured preview size
+ Size previewSize = ParameterUtils.convertSize(mParams.getPreviewSize());
+
+ LegacyRequest legacyRequest = new LegacyRequest(mCharacteristics,
+ request, previewSize, mParams); // params are copied
+
+
+ // Parameters are mutated as a side-effect
+ LegacyMetadataMapper.convertRequestMetadata(/*inout*/legacyRequest);
+
+ // If the parameters have changed, set them in the Camera1 API.
+ if (!mParams.same(legacyRequest.parameters)) {
+ try {
+ mCamera.setParameters(legacyRequest.parameters);
+ } catch (RuntimeException e) {
+ // If setting the parameters failed, report a request error to
+ // the camera client, and skip any further work for this request
+ Log.e(TAG, "Exception while setting camera parameters: ", e);
+ holder.failRequest();
+ mDeviceState.setCaptureStart(holder, /*timestamp*/0,
+ CameraDeviceImpl.CameraDeviceCallbacks.
+ ERROR_CAMERA_REQUEST);
+ continue;
+ }
+ paramsChanged = true;
+ mParams = legacyRequest.parameters;
+ }
+
+ mLastRequest = legacyRequest;
+ }
+
+ try {
+ boolean success = mCaptureCollector.queueRequest(holder,
+ mLastRequest, JPEG_FRAME_TIMEOUT, TimeUnit.MILLISECONDS);
+
+ if (!success) {
+ // Report a request error if we timed out while queuing this.
+ Log.e(TAG, "Timed out while queueing capture request.");
+ holder.failRequest();
+ mDeviceState.setCaptureStart(holder, /*timestamp*/0,
+ CameraDeviceImpl.CameraDeviceCallbacks.
+ ERROR_CAMERA_REQUEST);
+ continue;
+ }
+
+ // Starting the preview needs to happen before enabling
+ // face detection or auto focus
+ if (holder.hasPreviewTargets()) {
+ doPreviewCapture(holder);
+ }
+ if (holder.hasJpegTargets()) {
+ while(!mCaptureCollector.waitForPreviewsEmpty(PREVIEW_FRAME_TIMEOUT,
+ TimeUnit.MILLISECONDS)) {
+ // Fail preview requests until the queue is empty.
+ Log.e(TAG, "Timed out while waiting for preview requests to " +
+ "complete.");
+ mCaptureCollector.failNextPreview();
+ }
+ mReceivedJpeg.close();
+ doJpegCapturePrepare(holder);
+ }
+
+ /*
+ * Do all the actions that require a preview to have been started
+ */
+
+ // Toggle face detection on/off
+ // - do this before AF to give AF a chance to use faces
+ mFaceDetectMapper.processFaceDetectMode(request, /*in*/mParams);
+
+ // Unconditionally process AF triggers, since they're non-idempotent
+ // - must be done after setting the most-up-to-date AF mode
+ mFocusStateMapper.processRequestTriggers(request, mParams);
+
+ if (holder.hasJpegTargets()) {
+ doJpegCapture(holder);
+ if (!mReceivedJpeg.block(JPEG_FRAME_TIMEOUT)) {
+ Log.e(TAG, "Hit timeout for jpeg callback!");
+ mCaptureCollector.failNextJpeg();
+ }
+ }
+
+ } catch (IOException e) {
+ Log.e(TAG, "Received device exception during capture call: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ break;
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Interrupted during capture: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ break;
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Received device exception during capture call: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ break;
+ }
+
+ if (paramsChanged) {
+ if (DEBUG) {
+ Log.d(TAG, "Params changed -- getting new Parameters from HAL.");
+ }
+ try {
+ mParams = mCamera.getParameters();
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Received device exception: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ break;
+ }
+
+ // Update parameters to the latest that we think the camera is using
+ mLastRequest.setParameters(mParams);
+ }
+
+ MutableLong timestampMutable = new MutableLong(/*value*/0L);
+ try {
+ boolean success = mCaptureCollector.waitForRequestCompleted(holder,
+ REQUEST_COMPLETE_TIMEOUT, TimeUnit.MILLISECONDS,
+ /*out*/timestampMutable);
+
+ if (!success) {
+ Log.e(TAG, "Timed out while waiting for request to complete.");
+ mCaptureCollector.failAll();
+ }
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Interrupted waiting for request completion: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ break;
+ }
+
+ CameraMetadataNative result = mMapper.cachedConvertResultMetadata(
+ mLastRequest, timestampMutable.value);
+ /*
+ * Order matters: The default result mapper is state-less; the
+ * other mappers carry state and may override keys set by the default
+ * mapper with their own values.
+ */
+
+ // Update AF state
+ mFocusStateMapper.mapResultTriggers(result);
+ // Update face-related results
+ mFaceDetectMapper.mapResultFaces(result, mLastRequest);
+
+ if (!holder.requestFailed()) {
+ mDeviceState.setCaptureResult(holder, result);
+ }
+
+ if (holder.isOutputAbandoned()) {
+ anyRequestOutputAbandoned = true;
+ }
+ }
+
+ // Stop the repeating request if any of its output surfaces is abandoned.
+ if (anyRequestOutputAbandoned && burstHolder.isRepeating()) {
+ long lastFrameNumber = cancelRepeating(burstHolder.getRequestId());
+ if (DEBUG) {
+ Log.d(TAG, "Stopped repeating request. Last frame number is " +
+ lastFrameNumber);
+ }
+ mDeviceState.setRepeatingRequestError(lastFrameNumber,
+ burstHolder.getRequestId());
+ }
+
+ if (DEBUG) {
+ long totalTime = SystemClock.elapsedRealtimeNanos() - startTime;
+ Log.d(TAG, "Capture request took " + totalTime + " ns");
+ mRequestCounter.countAndLog();
+ }
+ break;
+ case MSG_CLEANUP:
+ mCleanup = true;
+ try {
+ boolean success = mCaptureCollector.waitForEmpty(JPEG_FRAME_TIMEOUT,
+ TimeUnit.MILLISECONDS);
+ if (!success) {
+ Log.e(TAG, "Timed out while queueing cleanup request.");
+ mCaptureCollector.failAll();
+ }
+ } catch (InterruptedException e) {
+ Log.e(TAG, "Interrupted while waiting for requests to complete: ", e);
+ mDeviceState.setError(
+ CameraDeviceImpl.CameraDeviceCallbacks.ERROR_CAMERA_DEVICE);
+ }
+ if (mGLThreadManager != null) {
+ mGLThreadManager.quit();
+ mGLThreadManager = null;
+ }
+ if (mCamera != null) {
+ mCamera.release();
+ mCamera = null;
+ }
+ resetJpegSurfaceFormats(mCallbackOutputs);
+ break;
+ case RequestHandlerThread.MSG_POKE_IDLE_HANDLER:
+ // OK: Ignore message.
+ break;
+ default:
+ throw new AssertionError("Unhandled message " + msg.what +
+ " on RequestThread.");
+ }
+ return true;
+ }
+ };
+
+ /**
+ * Create a new RequestThreadManager.
+ *
+ * @param cameraId the id of the camera to use.
+ * @param camera an open camera object. The RequestThreadManager takes ownership of this camera
+ * object, and is responsible for closing it.
+ * @param characteristics the static camera characteristics corresponding to this camera device
+ * @param deviceState a {@link CameraDeviceState} state machine.
+ */
+ public RequestThreadManager(int cameraId, Camera camera, CameraCharacteristics characteristics,
+ CameraDeviceState deviceState) {
+ mCamera = checkNotNull(camera, "camera must not be null");
+ mCameraId = cameraId;
+ mCharacteristics = checkNotNull(characteristics, "characteristics must not be null");
+ String name = String.format("RequestThread-%d", cameraId);
+ TAG = name;
+ mDeviceState = checkNotNull(deviceState, "deviceState must not be null");
+ mFocusStateMapper = new LegacyFocusStateMapper(mCamera);
+ mFaceDetectMapper = new LegacyFaceDetectMapper(mCamera, mCharacteristics);
+ mCaptureCollector = new CaptureCollector(MAX_IN_FLIGHT_REQUESTS, mDeviceState);
+ mRequestThread = new RequestHandlerThread(name, mRequestHandlerCb);
+ mCamera.setErrorCallback(mErrorCallback);
+ }
+
+ /**
+ * Start the request thread.
+ */
+ public void start() {
+ mRequestThread.start();
+ }
+
+ /**
+ * Flush any pending requests.
+ *
+ * @return the last frame number.
+ */
+ public long flush() {
+ Log.i(TAG, "Flushing all pending requests.");
+ long lastFrame = mRequestQueue.stopRepeating();
+ mCaptureCollector.failAll();
+ return lastFrame;
+ }
+
+ /**
+ * Quit the request thread, and clean up everything.
+ */
+ public void quit() {
+ if (!mQuit.getAndSet(true)) { // Avoid sending messages on dead thread's handler.
+ Handler handler = mRequestThread.waitAndGetHandler();
+ handler.sendMessageAtFrontOfQueue(handler.obtainMessage(MSG_CLEANUP));
+ mRequestThread.quitSafely();
+ try {
+ mRequestThread.join();
+ } catch (InterruptedException e) {
+ Log.e(TAG, String.format("Thread %s (%d) interrupted while quitting.",
+ mRequestThread.getName(), mRequestThread.getId()));
+ }
+ }
+ }
+
+ /**
+ * Submit the given burst of requests to be captured.
+ *
+ * <p>If the burst is repeating, replace the current repeating burst.</p>
+ *
+ * @param requests the burst of requests to add to the queue.
+ * @param repeating true if the burst is repeating.
+ * @return the submission info, including the new request id, and the last frame number, which
+ * contains either the frame number of the last frame that will be returned for this request,
+ * or the frame number of the last frame that will be returned for the current repeating
+ * request if this burst is set to be repeating.
+ */
+ public SubmitInfo submitCaptureRequests(CaptureRequest[] requests, boolean repeating) {
+ Handler handler = mRequestThread.waitAndGetHandler();
+ SubmitInfo info;
+ synchronized (mIdleLock) {
+ info = mRequestQueue.submit(requests, repeating);
+ handler.sendEmptyMessage(MSG_SUBMIT_CAPTURE_REQUEST);
+ }
+ return info;
+ }
+
+ /**
+ * Cancel a repeating request.
+ *
+ * @param requestId the id of the repeating request to cancel.
+ * @return the last frame to be returned from the HAL for the given repeating request, or
+ * {@code INVALID_FRAME} if none exists.
+ */
+ public long cancelRepeating(int requestId) {
+ return mRequestQueue.stopRepeating(requestId);
+ }
+
+ /**
+ * Configure with the current list of output Surfaces.
+ *
+ * <p>
+ * This operation blocks until the configuration is complete.
+ * </p>
+ *
+ * <p>Using a {@code null} or empty {@code outputs} list is the equivalent of unconfiguring.</p>
+ *
+ * @param outputs a {@link java.util.Collection} of outputs to configure.
+ */
+ public void configure(Collection<Pair<Surface, Size>> outputs) {
+ Handler handler = mRequestThread.waitAndGetHandler();
+ final ConditionVariable condition = new ConditionVariable(/*closed*/false);
+ ConfigureHolder holder = new ConfigureHolder(condition, outputs);
+ handler.sendMessage(handler.obtainMessage(MSG_CONFIGURE_OUTPUTS, 0, 0, holder));
+ condition.block();
+ }
+}
diff --git a/android/hardware/camera2/legacy/SizeAreaComparator.java b/android/hardware/camera2/legacy/SizeAreaComparator.java
new file mode 100644
index 00000000..75a5bab9
--- /dev/null
+++ b/android/hardware/camera2/legacy/SizeAreaComparator.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License
+ */
+
+package android.hardware.camera2.legacy;
+
+import android.hardware.Camera;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Comparator for api1 {@link Camera.Size} objects by the area.
+ *
+ * <p>This comparator totally orders by rectangle area. Tie-breaks on width.</p>
+ */
+@SuppressWarnings("deprecation")
+public class SizeAreaComparator implements Comparator<Camera.Size> {
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int compare(Camera.Size size, Camera.Size size2) {
+ checkNotNull(size, "size must not be null");
+ checkNotNull(size2, "size2 must not be null");
+
+ if (size.equals(size2)) {
+ return 0;
+ }
+
+ long width = size.width;
+ long width2 = size2.width;
+ long area = width * size.height;
+ long area2 = width2 * size2.height;
+
+ if (area == area2) {
+ return (width > width2) ? 1 : -1;
+ }
+
+ return (area > area2) ? 1 : -1;
+ }
+
+ /**
+ * Get the largest api1 {@code Camera.Size} from the list by comparing each size's area
+ * by each other using {@link SizeAreaComparator}.
+ *
+ * @param sizes a non-{@code null} list of non-{@code null} sizes
+ * @return a non-{@code null} size
+ *
+ * @throws NullPointerException if {@code sizes} or any elements in it were {@code null}
+ */
+ public static Camera.Size findLargestByArea(List<Camera.Size> sizes) {
+ checkNotNull(sizes, "sizes must not be null");
+
+ return Collections.max(sizes, new SizeAreaComparator());
+ }
+}
diff --git a/android/hardware/camera2/legacy/SurfaceTextureRenderer.java b/android/hardware/camera2/legacy/SurfaceTextureRenderer.java
new file mode 100644
index 00000000..a05a8ec0
--- /dev/null
+++ b/android/hardware/camera2/legacy/SurfaceTextureRenderer.java
@@ -0,0 +1,832 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.legacy;
+
+import android.graphics.ImageFormat;
+import android.graphics.RectF;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraCharacteristics;
+import android.os.Environment;
+import android.opengl.EGL14;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLSurface;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.opengl.Matrix;
+import android.text.format.Time;
+import android.util.Log;
+import android.util.Pair;
+import android.util.Size;
+import android.view.Surface;
+import android.os.SystemProperties;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * A renderer class that manages the GL state, and can draw a frame into a set of output
+ * {@link Surface}s.
+ */
+public class SurfaceTextureRenderer {
+ private static final String TAG = SurfaceTextureRenderer.class.getSimpleName();
+ private static final boolean DEBUG = false;
+ private static final int EGL_RECORDABLE_ANDROID = 0x3142; // from EGL/eglext.h
+ private static final int GL_MATRIX_SIZE = 16;
+ private static final int VERTEX_POS_SIZE = 3;
+ private static final int VERTEX_UV_SIZE = 2;
+ private static final int EGL_COLOR_BITLENGTH = 8;
+ private static final int GLES_VERSION = 2;
+ private static final int PBUFFER_PIXEL_BYTES = 4;
+
+ private static final int FLIP_TYPE_NONE = 0;
+ private static final int FLIP_TYPE_HORIZONTAL = 1;
+ private static final int FLIP_TYPE_VERTICAL = 2;
+ private static final int FLIP_TYPE_BOTH = FLIP_TYPE_HORIZONTAL | FLIP_TYPE_VERTICAL;
+
+ private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT;
+ private EGLConfig mConfigs;
+
+ private class EGLSurfaceHolder {
+ Surface surface;
+ EGLSurface eglSurface;
+ int width;
+ int height;
+ }
+
+ private List<EGLSurfaceHolder> mSurfaces = new ArrayList<EGLSurfaceHolder>();
+ private List<EGLSurfaceHolder> mConversionSurfaces = new ArrayList<EGLSurfaceHolder>();
+
+ private ByteBuffer mPBufferPixels;
+
+ // Hold this to avoid GC
+ private volatile SurfaceTexture mSurfaceTexture;
+
+ private static final int FLOAT_SIZE_BYTES = 4;
+ private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES;
+ private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0;
+ private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3;
+
+ // Sampling is mirrored across the horizontal axis
+ private static final float[] sHorizontalFlipTriangleVertices = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 1.f, 0.f,
+ 1.0f, -1.0f, 0, 0.f, 0.f,
+ -1.0f, 1.0f, 0, 1.f, 1.f,
+ 1.0f, 1.0f, 0, 0.f, 1.f,
+ };
+
+ // Sampling is mirrored across the vertical axis
+ private static final float[] sVerticalFlipTriangleVertices = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 1.f,
+ 1.0f, -1.0f, 0, 1.f, 1.f,
+ -1.0f, 1.0f, 0, 0.f, 0.f,
+ 1.0f, 1.0f, 0, 1.f, 0.f,
+ };
+
+ // Sampling is mirrored across the both axes
+ private static final float[] sBothFlipTriangleVertices = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 1.f, 1.f,
+ 1.0f, -1.0f, 0, 0.f, 1.f,
+ -1.0f, 1.0f, 0, 1.f, 0.f,
+ 1.0f, 1.0f, 0, 0.f, 0.f,
+ };
+
+ // Sampling is 1:1 for a straight copy for the back camera
+ private static final float[] sRegularTriangleVertices = {
+ // X, Y, Z, U, V
+ -1.0f, -1.0f, 0, 0.f, 0.f,
+ 1.0f, -1.0f, 0, 1.f, 0.f,
+ -1.0f, 1.0f, 0, 0.f, 1.f,
+ 1.0f, 1.0f, 0, 1.f, 1.f,
+ };
+
+ private FloatBuffer mRegularTriangleVertices;
+ private FloatBuffer mHorizontalFlipTriangleVertices;
+ private FloatBuffer mVerticalFlipTriangleVertices;
+ private FloatBuffer mBothFlipTriangleVertices;
+ private final int mFacing;
+
+ /**
+ * As used in this file, this vertex shader maps a unit square to the view, and
+ * tells the fragment shader to interpolate over it. Each surface pixel position
+ * is mapped to a 2D homogeneous texture coordinate of the form (s, t, 0, 1) with
+ * s and t in the inclusive range [0, 1], and the matrix from
+ * {@link SurfaceTexture#getTransformMatrix(float[])} is used to map this
+ * coordinate to a texture location.
+ */
+ private static final String VERTEX_SHADER =
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "attribute vec4 aPosition;\n" +
+ "attribute vec4 aTextureCoord;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ /**
+ * This fragment shader simply draws the color in the 2D texture at
+ * the location from the {@code VERTEX_SHADER}.
+ */
+ private static final String FRAGMENT_SHADER =
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "precision mediump float;\n" +
+ "varying vec2 vTextureCoord;\n" +
+ "uniform samplerExternalOES sTexture;\n" +
+ "void main() {\n" +
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
+ "}\n";
+
+ private float[] mMVPMatrix = new float[GL_MATRIX_SIZE];
+ private float[] mSTMatrix = new float[GL_MATRIX_SIZE];
+
+ private int mProgram;
+ private int mTextureID = 0;
+ private int muMVPMatrixHandle;
+ private int muSTMatrixHandle;
+ private int maPositionHandle;
+ private int maTextureHandle;
+
+ private PerfMeasurement mPerfMeasurer = null;
+ private static final String LEGACY_PERF_PROPERTY = "persist.camera.legacy_perf";
+
+ public SurfaceTextureRenderer(int facing) {
+ mFacing = facing;
+
+ mRegularTriangleVertices = ByteBuffer.allocateDirect(sRegularTriangleVertices.length *
+ FLOAT_SIZE_BYTES).order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mRegularTriangleVertices.put(sRegularTriangleVertices).position(0);
+
+ mHorizontalFlipTriangleVertices = ByteBuffer.allocateDirect(
+ sHorizontalFlipTriangleVertices.length * FLOAT_SIZE_BYTES).
+ order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mHorizontalFlipTriangleVertices.put(sHorizontalFlipTriangleVertices).position(0);
+
+ mVerticalFlipTriangleVertices = ByteBuffer.allocateDirect(
+ sVerticalFlipTriangleVertices.length * FLOAT_SIZE_BYTES).
+ order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mVerticalFlipTriangleVertices.put(sVerticalFlipTriangleVertices).position(0);
+
+ mBothFlipTriangleVertices = ByteBuffer.allocateDirect(
+ sBothFlipTriangleVertices.length * FLOAT_SIZE_BYTES).
+ order(ByteOrder.nativeOrder()).asFloatBuffer();
+ mBothFlipTriangleVertices.put(sBothFlipTriangleVertices).position(0);
+
+ Matrix.setIdentityM(mSTMatrix, 0);
+ }
+
+ private int loadShader(int shaderType, String source) {
+ int shader = GLES20.glCreateShader(shaderType);
+ checkGlError("glCreateShader type=" + shaderType);
+ GLES20.glShaderSource(shader, source);
+ GLES20.glCompileShader(shader);
+ int[] compiled = new int[1];
+ GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
+ if (compiled[0] == 0) {
+ Log.e(TAG, "Could not compile shader " + shaderType + ":");
+ Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader));
+ GLES20.glDeleteShader(shader);
+ // TODO: handle this more gracefully
+ throw new IllegalStateException("Could not compile shader " + shaderType);
+ }
+ return shader;
+ }
+
+ private int createProgram(String vertexSource, String fragmentSource) {
+ int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
+ if (vertexShader == 0) {
+ return 0;
+ }
+ int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
+ if (pixelShader == 0) {
+ return 0;
+ }
+
+ int program = GLES20.glCreateProgram();
+ checkGlError("glCreateProgram");
+ if (program == 0) {
+ Log.e(TAG, "Could not create program");
+ }
+ GLES20.glAttachShader(program, vertexShader);
+ checkGlError("glAttachShader");
+ GLES20.glAttachShader(program, pixelShader);
+ checkGlError("glAttachShader");
+ GLES20.glLinkProgram(program);
+ int[] linkStatus = new int[1];
+ GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
+ if (linkStatus[0] != GLES20.GL_TRUE) {
+ Log.e(TAG, "Could not link program: ");
+ Log.e(TAG, GLES20.glGetProgramInfoLog(program));
+ GLES20.glDeleteProgram(program);
+ // TODO: handle this more gracefully
+ throw new IllegalStateException("Could not link program");
+ }
+ return program;
+ }
+
+ private void drawFrame(SurfaceTexture st, int width, int height, int flipType)
+ throws LegacyExceptionUtils.BufferQueueAbandonedException {
+ checkGlError("onDrawFrame start");
+ st.getTransformMatrix(mSTMatrix);
+
+ Matrix.setIdentityM(mMVPMatrix, /*smOffset*/0);
+
+ // Find intermediate buffer dimensions
+ Size dimens;
+ try {
+ dimens = LegacyCameraDevice.getTextureSize(st);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ // Should never hit this.
+ throw new IllegalStateException("Surface abandoned, skipping drawFrame...", e);
+ }
+ float texWidth = dimens.getWidth();
+ float texHeight = dimens.getHeight();
+
+ if (texWidth <= 0 || texHeight <= 0) {
+ throw new IllegalStateException("Illegal intermediate texture with dimension of 0");
+ }
+
+ // Letterbox or pillar-box output dimensions into intermediate dimensions.
+ RectF intermediate = new RectF(/*left*/0, /*top*/0, /*right*/texWidth, /*bottom*/texHeight);
+ RectF output = new RectF(/*left*/0, /*top*/0, /*right*/width, /*bottom*/height);
+ android.graphics.Matrix boxingXform = new android.graphics.Matrix();
+ boxingXform.setRectToRect(output, intermediate, android.graphics.Matrix.ScaleToFit.CENTER);
+ boxingXform.mapRect(output);
+
+ // Find scaling factor from pillar-boxed/letter-boxed output dimensions to intermediate
+ // buffer dimensions.
+ float scaleX = intermediate.width() / output.width();
+ float scaleY = intermediate.height() / output.height();
+
+ // Intermediate texture is implicitly scaled to 'fill' the output dimensions in clip space
+ // coordinates in the shader. To avoid stretching, we need to scale the larger dimension
+ // of the intermediate buffer so that the output buffer is actually letter-boxed
+ // or pillar-boxed into the intermediate buffer after clipping.
+ Matrix.scaleM(mMVPMatrix, /*offset*/0, /*x*/scaleX, /*y*/scaleY, /*z*/1);
+
+ if (DEBUG) {
+ Log.d(TAG, "Scaling factors (S_x = " + scaleX + ",S_y = " + scaleY + ") used for " +
+ width + "x" + height + " surface, intermediate buffer size is " + texWidth +
+ "x" + texHeight);
+ }
+
+ // Set viewport to be output buffer dimensions
+ GLES20.glViewport(0, 0, width, height);
+
+ if (DEBUG) {
+ GLES20.glClearColor(1.0f, 0.0f, 0.0f, 1.0f);
+ GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);
+ }
+
+ GLES20.glUseProgram(mProgram);
+ checkGlError("glUseProgram");
+
+ GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+
+ FloatBuffer triangleVertices;
+ switch(flipType) {
+ case FLIP_TYPE_HORIZONTAL:
+ triangleVertices = mHorizontalFlipTriangleVertices;
+ break;
+ case FLIP_TYPE_VERTICAL:
+ triangleVertices = mVerticalFlipTriangleVertices;
+ break;
+ case FLIP_TYPE_BOTH:
+ triangleVertices = mBothFlipTriangleVertices;
+ break;
+ default:
+ triangleVertices = mRegularTriangleVertices;
+ break;
+ }
+
+ triangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
+ GLES20.glVertexAttribPointer(maPositionHandle, VERTEX_POS_SIZE, GLES20.GL_FLOAT,
+ /*normalized*/ false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
+ checkGlError("glVertexAttribPointer maPosition");
+ GLES20.glEnableVertexAttribArray(maPositionHandle);
+ checkGlError("glEnableVertexAttribArray maPositionHandle");
+
+ triangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
+ GLES20.glVertexAttribPointer(maTextureHandle, VERTEX_UV_SIZE, GLES20.GL_FLOAT,
+ /*normalized*/ false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, triangleVertices);
+ checkGlError("glVertexAttribPointer maTextureHandle");
+ GLES20.glEnableVertexAttribArray(maTextureHandle);
+ checkGlError("glEnableVertexAttribArray maTextureHandle");
+
+ GLES20.glUniformMatrix4fv(muMVPMatrixHandle, /*count*/ 1, /*transpose*/ false, mMVPMatrix,
+ /*offset*/ 0);
+ GLES20.glUniformMatrix4fv(muSTMatrixHandle, /*count*/ 1, /*transpose*/ false, mSTMatrix,
+ /*offset*/ 0);
+
+ GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, /*offset*/ 0, /*count*/ 4);
+ checkGlDrawError("glDrawArrays");
+ }
+
+ /**
+ * Initializes GL state. Call this after the EGL surface has been created and made current.
+ */
+ private void initializeGLState() {
+ mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+ if (mProgram == 0) {
+ throw new IllegalStateException("failed creating program");
+ }
+ maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
+ checkGlError("glGetAttribLocation aPosition");
+ if (maPositionHandle == -1) {
+ throw new IllegalStateException("Could not get attrib location for aPosition");
+ }
+ maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
+ checkGlError("glGetAttribLocation aTextureCoord");
+ if (maTextureHandle == -1) {
+ throw new IllegalStateException("Could not get attrib location for aTextureCoord");
+ }
+
+ muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
+ checkGlError("glGetUniformLocation uMVPMatrix");
+ if (muMVPMatrixHandle == -1) {
+ throw new IllegalStateException("Could not get attrib location for uMVPMatrix");
+ }
+
+ muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
+ checkGlError("glGetUniformLocation uSTMatrix");
+ if (muSTMatrixHandle == -1) {
+ throw new IllegalStateException("Could not get attrib location for uSTMatrix");
+ }
+
+ int[] textures = new int[1];
+ GLES20.glGenTextures(/*n*/ 1, textures, /*offset*/ 0);
+
+ mTextureID = textures[0];
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID);
+ checkGlError("glBindTexture mTextureID");
+
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
+ GLES20.GL_NEAREST);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
+ GLES20.GL_LINEAR);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
+ GLES20.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
+ GLES20.GL_CLAMP_TO_EDGE);
+ checkGlError("glTexParameter");
+ }
+
+ private int getTextureId() {
+ return mTextureID;
+ }
+
+ private void clearState() {
+ mSurfaces.clear();
+ for (EGLSurfaceHolder holder : mConversionSurfaces) {
+ try {
+ LegacyCameraDevice.disconnectSurface(holder.surface);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, skipping...", e);
+ }
+ }
+ mConversionSurfaces.clear();
+ mPBufferPixels = null;
+ if (mSurfaceTexture != null) {
+ mSurfaceTexture.release();
+ }
+ mSurfaceTexture = null;
+ }
+
+ private void configureEGLContext() {
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new IllegalStateException("No EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, /*offset*/ 0, version, /*offset*/ 1)) {
+ throw new IllegalStateException("Cannot initialize EGL14");
+ }
+
+ int[] attribList = {
+ EGL14.EGL_RED_SIZE, EGL_COLOR_BITLENGTH,
+ EGL14.EGL_GREEN_SIZE, EGL_COLOR_BITLENGTH,
+ EGL14.EGL_BLUE_SIZE, EGL_COLOR_BITLENGTH,
+ EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
+ EGL_RECORDABLE_ANDROID, 1,
+ EGL14.EGL_SURFACE_TYPE, EGL14.EGL_PBUFFER_BIT | EGL14.EGL_WINDOW_BIT,
+ EGL14.EGL_NONE
+ };
+ EGLConfig[] configs = new EGLConfig[1];
+ int[] numConfigs = new int[1];
+ EGL14.eglChooseConfig(mEGLDisplay, attribList, /*offset*/ 0, configs, /*offset*/ 0,
+ configs.length, numConfigs, /*offset*/ 0);
+ checkEglError("eglCreateContext RGB888+recordable ES2");
+ mConfigs = configs[0];
+ int[] attrib_list = {
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, GLES_VERSION,
+ EGL14.EGL_NONE
+ };
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+ attrib_list, /*offset*/ 0);
+ checkEglError("eglCreateContext");
+ if(mEGLContext == EGL14.EGL_NO_CONTEXT) {
+ throw new IllegalStateException("No EGLContext could be made");
+ }
+ }
+
+ private void configureEGLOutputSurfaces(Collection<EGLSurfaceHolder> surfaces) {
+ if (surfaces == null || surfaces.size() == 0) {
+ throw new IllegalStateException("No Surfaces were provided to draw to");
+ }
+ int[] surfaceAttribs = {
+ EGL14.EGL_NONE
+ };
+ for (EGLSurfaceHolder holder : surfaces) {
+ holder.eglSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, mConfigs,
+ holder.surface, surfaceAttribs, /*offset*/ 0);
+ checkEglError("eglCreateWindowSurface");
+ }
+ }
+
+ private void configureEGLPbufferSurfaces(Collection<EGLSurfaceHolder> surfaces) {
+ if (surfaces == null || surfaces.size() == 0) {
+ throw new IllegalStateException("No Surfaces were provided to draw to");
+ }
+
+ int maxLength = 0;
+ for (EGLSurfaceHolder holder : surfaces) {
+ int length = holder.width * holder.height;
+ // Find max surface size, ensure PBuffer can hold this many pixels
+ maxLength = (length > maxLength) ? length : maxLength;
+ int[] surfaceAttribs = {
+ EGL14.EGL_WIDTH, holder.width,
+ EGL14.EGL_HEIGHT, holder.height,
+ EGL14.EGL_NONE
+ };
+ holder.eglSurface =
+ EGL14.eglCreatePbufferSurface(mEGLDisplay, mConfigs, surfaceAttribs, 0);
+ checkEglError("eglCreatePbufferSurface");
+ }
+ mPBufferPixels = ByteBuffer.allocateDirect(maxLength * PBUFFER_PIXEL_BYTES)
+ .order(ByteOrder.nativeOrder());
+ }
+
+ private void releaseEGLContext() {
+ if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) {
+ EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT);
+ dumpGlTiming();
+ if (mSurfaces != null) {
+ for (EGLSurfaceHolder holder : mSurfaces) {
+ if (holder.eglSurface != null) {
+ EGL14.eglDestroySurface(mEGLDisplay, holder.eglSurface);
+ }
+ }
+ }
+ if (mConversionSurfaces != null) {
+ for (EGLSurfaceHolder holder : mConversionSurfaces) {
+ if (holder.eglSurface != null) {
+ EGL14.eglDestroySurface(mEGLDisplay, holder.eglSurface);
+ }
+ }
+ }
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ EGL14.eglReleaseThread();
+ EGL14.eglTerminate(mEGLDisplay);
+ }
+
+ mConfigs = null;
+ mEGLDisplay = EGL14.EGL_NO_DISPLAY;
+ mEGLContext = EGL14.EGL_NO_CONTEXT;
+ clearState();
+ }
+
+ private void makeCurrent(EGLSurface surface) {
+ EGL14.eglMakeCurrent(mEGLDisplay, surface, surface, mEGLContext);
+ checkEglError("makeCurrent");
+ }
+
+ private boolean swapBuffers(EGLSurface surface)
+ throws LegacyExceptionUtils.BufferQueueAbandonedException {
+ boolean result = EGL14.eglSwapBuffers(mEGLDisplay, surface);
+ int error = EGL14.eglGetError();
+ if (error == EGL14.EGL_BAD_SURFACE) {
+ throw new LegacyExceptionUtils.BufferQueueAbandonedException();
+ } else if (error != EGL14.EGL_SUCCESS) {
+ throw new IllegalStateException("swapBuffers: EGL error: 0x" +
+ Integer.toHexString(error));
+ }
+ return result;
+ }
+
+ private void checkEglError(String msg) {
+ int error;
+ if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
+ throw new IllegalStateException(msg + ": EGL error: 0x" + Integer.toHexString(error));
+ }
+ }
+
+ private void checkGlError(String msg) {
+ int error;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ throw new IllegalStateException(
+ msg + ": GLES20 error: 0x" + Integer.toHexString(error));
+ }
+ }
+
+ private void checkGlDrawError(String msg)
+ throws LegacyExceptionUtils.BufferQueueAbandonedException {
+ int error;
+ boolean surfaceAbandoned = false;
+ boolean glError = false;
+ while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
+ if (error == GLES20.GL_OUT_OF_MEMORY) {
+ surfaceAbandoned = true;
+ } else {
+ glError = true;
+ }
+ }
+ if (glError) {
+ throw new IllegalStateException(
+ msg + ": GLES20 error: 0x" + Integer.toHexString(error));
+ }
+ if (surfaceAbandoned) {
+ throw new LegacyExceptionUtils.BufferQueueAbandonedException();
+ }
+ }
+
+ /**
+ * Save a measurement dump to disk, in
+ * {@code /sdcard/CameraLegacy/durations_<time>_<width1>x<height1>_...txt}
+ */
+ private void dumpGlTiming() {
+ if (mPerfMeasurer == null) return;
+
+ File legacyStorageDir = new File(Environment.getExternalStorageDirectory(), "CameraLegacy");
+ if (!legacyStorageDir.exists()){
+ if (!legacyStorageDir.mkdirs()){
+ Log.e(TAG, "Failed to create directory for data dump");
+ return;
+ }
+ }
+
+ StringBuilder path = new StringBuilder(legacyStorageDir.getPath());
+ path.append(File.separator);
+ path.append("durations_");
+
+ Time now = new Time();
+ now.setToNow();
+ path.append(now.format2445());
+ path.append("_S");
+ for (EGLSurfaceHolder surface : mSurfaces) {
+ path.append(String.format("_%d_%d", surface.width, surface.height));
+ }
+ path.append("_C");
+ for (EGLSurfaceHolder surface : mConversionSurfaces) {
+ path.append(String.format("_%d_%d", surface.width, surface.height));
+ }
+ path.append(".txt");
+ mPerfMeasurer.dumpPerformanceData(path.toString());
+ }
+
+ private void setupGlTiming() {
+ if (PerfMeasurement.isGlTimingSupported()) {
+ Log.d(TAG, "Enabling GL performance measurement");
+ mPerfMeasurer = new PerfMeasurement();
+ } else {
+ Log.d(TAG, "GL performance measurement not supported on this device");
+ mPerfMeasurer = null;
+ }
+ }
+
+ private void beginGlTiming() {
+ if (mPerfMeasurer == null) return;
+ mPerfMeasurer.startTimer();
+ }
+
+ private void addGlTimestamp(long timestamp) {
+ if (mPerfMeasurer == null) return;
+ mPerfMeasurer.addTimestamp(timestamp);
+ }
+
+ private void endGlTiming() {
+ if (mPerfMeasurer == null) return;
+ mPerfMeasurer.stopTimer();
+ }
+
+ /**
+ * Return the surface texture to draw to - this is the texture use to when producing output
+ * surface buffers.
+ *
+ * @return a {@link SurfaceTexture}.
+ */
+ public SurfaceTexture getSurfaceTexture() {
+ return mSurfaceTexture;
+ }
+
+ /**
+ * Set a collection of output {@link Surface}s that can be drawn to.
+ *
+ * @param surfaces a {@link Collection} of surfaces.
+ */
+ public void configureSurfaces(Collection<Pair<Surface, Size>> surfaces) {
+ releaseEGLContext();
+
+ if (surfaces == null || surfaces.size() == 0) {
+ Log.w(TAG, "No output surfaces configured for GL drawing.");
+ return;
+ }
+
+ for (Pair<Surface, Size> p : surfaces) {
+ Surface s = p.first;
+ Size surfaceSize = p.second;
+ // If pixel conversions aren't handled by egl, use a pbuffer
+ try {
+ EGLSurfaceHolder holder = new EGLSurfaceHolder();
+ holder.surface = s;
+ holder.width = surfaceSize.getWidth();
+ holder.height = surfaceSize.getHeight();
+ if (LegacyCameraDevice.needsConversion(s)) {
+ mConversionSurfaces.add(holder);
+ // LegacyCameraDevice is the producer of surfaces if it's not handled by EGL,
+ // so LegacyCameraDevice needs to connect to the surfaces.
+ LegacyCameraDevice.connectSurface(s);
+ } else {
+ mSurfaces.add(holder);
+ }
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, skipping configuration... ", e);
+ }
+ }
+
+ // Set up egl display
+ configureEGLContext();
+
+ // Set up regular egl surfaces if needed
+ if (mSurfaces.size() > 0) {
+ configureEGLOutputSurfaces(mSurfaces);
+ }
+
+ // Set up pbuffer surface if needed
+ if (mConversionSurfaces.size() > 0) {
+ configureEGLPbufferSurfaces(mConversionSurfaces);
+ }
+ makeCurrent((mSurfaces.size() > 0) ? mSurfaces.get(0).eglSurface :
+ mConversionSurfaces.get(0).eglSurface);
+ initializeGLState();
+ mSurfaceTexture = new SurfaceTexture(getTextureId());
+
+ // Set up performance tracking if enabled
+ if (SystemProperties.getBoolean(LEGACY_PERF_PROPERTY, false)) {
+ setupGlTiming();
+ }
+ }
+
+ /**
+ * Draw the current buffer in the {@link SurfaceTexture} returned from
+ * {@link #getSurfaceTexture()} into the set of target {@link Surface}s
+ * in the next request from the given {@link CaptureCollector}, or drop
+ * the frame if none is available.
+ *
+ * <p>
+ * Any {@link Surface}s targeted must be a subset of the {@link Surface}s
+ * set in the last {@link #configureSurfaces(java.util.Collection)} call.
+ * </p>
+ *
+ * @param targetCollector the surfaces to draw to.
+ */
+ public void drawIntoSurfaces(CaptureCollector targetCollector) {
+ if ((mSurfaces == null || mSurfaces.size() == 0)
+ && (mConversionSurfaces == null || mConversionSurfaces.size() == 0)) {
+ return;
+ }
+
+ boolean doTiming = targetCollector.hasPendingPreviewCaptures();
+ checkGlError("before updateTexImage");
+
+ if (doTiming) {
+ beginGlTiming();
+ }
+
+ mSurfaceTexture.updateTexImage();
+
+ long timestamp = mSurfaceTexture.getTimestamp();
+
+ Pair<RequestHolder, Long> captureHolder = targetCollector.previewCaptured(timestamp);
+
+ // No preview request queued, drop frame.
+ if (captureHolder == null) {
+ if (DEBUG) {
+ Log.d(TAG, "Dropping preview frame.");
+ }
+ if (doTiming) {
+ endGlTiming();
+ }
+ return;
+ }
+
+ RequestHolder request = captureHolder.first;
+
+ Collection<Surface> targetSurfaces = request.getHolderTargets();
+ if (doTiming) {
+ addGlTimestamp(timestamp);
+ }
+
+ List<Long> targetSurfaceIds = new ArrayList();
+ try {
+ targetSurfaceIds = LegacyCameraDevice.getSurfaceIds(targetSurfaces);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, dropping frame. ", e);
+ request.setOutputAbandoned();
+ }
+
+ for (EGLSurfaceHolder holder : mSurfaces) {
+ if (LegacyCameraDevice.containsSurfaceId(holder.surface, targetSurfaceIds)) {
+ try{
+ LegacyCameraDevice.setSurfaceDimens(holder.surface, holder.width,
+ holder.height);
+ makeCurrent(holder.eglSurface);
+
+ LegacyCameraDevice.setNextTimestamp(holder.surface, captureHolder.second);
+ drawFrame(mSurfaceTexture, holder.width, holder.height,
+ (mFacing == CameraCharacteristics.LENS_FACING_FRONT) ?
+ FLIP_TYPE_HORIZONTAL : FLIP_TYPE_NONE);
+ swapBuffers(holder.eglSurface);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, dropping frame. ", e);
+ request.setOutputAbandoned();
+ }
+ }
+ }
+ for (EGLSurfaceHolder holder : mConversionSurfaces) {
+ if (LegacyCameraDevice.containsSurfaceId(holder.surface, targetSurfaceIds)) {
+ makeCurrent(holder.eglSurface);
+ // glReadPixels reads from the bottom of the buffer, so add an extra vertical flip
+ try {
+ drawFrame(mSurfaceTexture, holder.width, holder.height,
+ (mFacing == CameraCharacteristics.LENS_FACING_FRONT) ?
+ FLIP_TYPE_BOTH : FLIP_TYPE_VERTICAL);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ // Should never hit this.
+ throw new IllegalStateException("Surface abandoned, skipping drawFrame...", e);
+ }
+ mPBufferPixels.clear();
+ GLES20.glReadPixels(/*x*/ 0, /*y*/ 0, holder.width, holder.height,
+ GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPBufferPixels);
+ checkGlError("glReadPixels");
+
+ try {
+ int format = LegacyCameraDevice.detectSurfaceType(holder.surface);
+ LegacyCameraDevice.setSurfaceDimens(holder.surface, holder.width,
+ holder.height);
+ LegacyCameraDevice.setNextTimestamp(holder.surface, captureHolder.second);
+ LegacyCameraDevice.produceFrame(holder.surface, mPBufferPixels.array(),
+ holder.width, holder.height, format);
+ } catch (LegacyExceptionUtils.BufferQueueAbandonedException e) {
+ Log.w(TAG, "Surface abandoned, dropping frame. ", e);
+ request.setOutputAbandoned();
+ }
+ }
+ }
+ targetCollector.previewProduced();
+
+ if (doTiming) {
+ endGlTiming();
+ }
+ }
+
+ /**
+ * Clean up the current GL context.
+ */
+ public void cleanupEGLContext() {
+ releaseEGLContext();
+ }
+
+ /**
+ * Drop all current GL operations on the floor.
+ */
+ public void flush() {
+ // TODO: implement flush
+ Log.e(TAG, "Flush not yet implemented.");
+ }
+}
diff --git a/android/hardware/camera2/marshal/MarshalHelpers.java b/android/hardware/camera2/marshal/MarshalHelpers.java
new file mode 100644
index 00000000..35ecc2a0
--- /dev/null
+++ b/android/hardware/camera2/marshal/MarshalHelpers.java
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static com.android.internal.util.Preconditions.*;
+
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.util.Rational;
+
+/**
+ * Static functions in order to help implementing various marshaler functionality.
+ *
+ * <p>The intention is to statically import everything from this file into another file when
+ * implementing a new marshaler (or marshal queryable).</p>
+ *
+ * <p>The helpers are centered around providing primitive knowledge of the native types,
+ * such as the native size, the managed class wrappers, and various precondition checks.</p>
+ */
+public final class MarshalHelpers {
+
+ public static final int SIZEOF_BYTE = 1;
+ public static final int SIZEOF_INT32 = Integer.SIZE / Byte.SIZE;
+ public static final int SIZEOF_INT64 = Long.SIZE / Byte.SIZE;
+ public static final int SIZEOF_FLOAT = Float.SIZE / Byte.SIZE;
+ public static final int SIZEOF_DOUBLE = Double.SIZE / Byte.SIZE;
+ public static final int SIZEOF_RATIONAL = SIZEOF_INT32 * 2;
+
+ /**
+ * Get the size in bytes for the native camera metadata type.
+ *
+ * <p>This used to determine how many bytes it would take to encode/decode a single value
+ * of that {@link nativeType}.</p>
+ *
+ * @param nativeType the native type, e.g.
+ * {@link android.hardware.camera2.impl.CameraMetadataNative#TYPE_BYTE TYPE_BYTE}.
+ * @return size in bytes >= 1
+ *
+ * @throws UnsupportedOperationException if nativeType was not one of the built-in types
+ */
+ public static int getPrimitiveTypeSize(int nativeType) {
+ switch (nativeType) {
+ case TYPE_BYTE:
+ return SIZEOF_BYTE;
+ case TYPE_INT32:
+ return SIZEOF_INT32;
+ case TYPE_FLOAT:
+ return SIZEOF_FLOAT;
+ case TYPE_INT64:
+ return SIZEOF_INT64;
+ case TYPE_DOUBLE:
+ return SIZEOF_DOUBLE;
+ case TYPE_RATIONAL:
+ return SIZEOF_RATIONAL;
+ }
+
+ throw new UnsupportedOperationException("Unknown type, can't get size for "
+ + nativeType);
+ }
+
+
+ /**
+ * Ensure that the {@code klass} is one of the metadata-primitive classes.
+ *
+ * @param klass a non-{@code null} reference
+ * @return {@code klass} instance
+ *
+ * @throws UnsupportedOperationException if klass was not one of the built-in classes
+ * @throws NullPointerException if klass was null
+ *
+ * @see #isPrimitiveClass
+ */
+ public static <T> Class<T> checkPrimitiveClass(Class<T> klass) {
+ checkNotNull(klass, "klass must not be null");
+
+ if (isPrimitiveClass(klass)) {
+ return klass;
+ }
+
+ throw new UnsupportedOperationException("Unsupported class '" + klass +
+ "'; expected a metadata primitive class");
+ }
+
+ /**
+ * Checks whether or not {@code klass} is one of the metadata-primitive classes.
+ *
+ * <p>The following types (whether boxed or unboxed) are considered primitive:
+ * <ul>
+ * <li>byte
+ * <li>int
+ * <li>float
+ * <li>double
+ * <li>Rational
+ * </ul>
+ * </p>
+ *
+ * <p>This doesn't strictly follow the java understanding of primitive since
+ * boxed objects are included, Rational is included, and other types such as char and
+ * short are not included.</p>
+ *
+ * @param klass a {@link Class} instance; using {@code null} will return {@code false}
+ * @return {@code true} if primitive, {@code false} otherwise
+ */
+ public static <T> boolean isPrimitiveClass(Class<T> klass) {
+ if (klass == null) {
+ return false;
+ }
+
+ if (klass == byte.class || klass == Byte.class) {
+ return true;
+ } else if (klass == int.class || klass == Integer.class) {
+ return true;
+ } else if (klass == float.class || klass == Float.class) {
+ return true;
+ } else if (klass == long.class || klass == Long.class) {
+ return true;
+ } else if (klass == double.class || klass == Double.class) {
+ return true;
+ } else if (klass == Rational.class) {
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Wrap {@code klass} with its wrapper variant if it was a {@code Class} corresponding
+ * to a Java primitive.
+ *
+ * <p>Non-primitive classes are passed through as-is.</p>
+ *
+ * <p>For example, for a primitive {@code int.class => Integer.class},
+ * but for a non-primitive {@code Rational.class => Rational.class}.</p>
+ *
+ * @param klass a {@code Class} reference
+ *
+ * @return wrapped class object, or same class object if non-primitive
+ */
+ @SuppressWarnings("unchecked")
+ public static <T> Class<T> wrapClassIfPrimitive(Class<T> klass) {
+ if (klass == byte.class) {
+ return (Class<T>)Byte.class;
+ } else if (klass == int.class) {
+ return (Class<T>)Integer.class;
+ } else if (klass == float.class) {
+ return (Class<T>)Float.class;
+ } else if (klass == long.class) {
+ return (Class<T>)Long.class;
+ } else if (klass == double.class) {
+ return (Class<T>)Double.class;
+ }
+
+ return klass;
+ }
+
+ /**
+ * Return a human-readable representation of the {@code nativeType}, e.g. "TYPE_INT32"
+ *
+ * <p>Out-of-range values return a string with "UNKNOWN" as the prefix.</p>
+ *
+ * @param nativeType the native type
+ *
+ * @return human readable type name
+ */
+ public static String toStringNativeType(int nativeType) {
+ switch (nativeType) {
+ case TYPE_BYTE:
+ return "TYPE_BYTE";
+ case TYPE_INT32:
+ return "TYPE_INT32";
+ case TYPE_FLOAT:
+ return "TYPE_FLOAT";
+ case TYPE_INT64:
+ return "TYPE_INT64";
+ case TYPE_DOUBLE:
+ return "TYPE_DOUBLE";
+ case TYPE_RATIONAL:
+ return "TYPE_RATIONAL";
+ }
+
+ return "UNKNOWN(" + nativeType + ")";
+ }
+
+ /**
+ * Ensure that the {@code nativeType} is one of the native types supported
+ * by {@link CameraMetadataNative}.
+ *
+ * @param nativeType the native type
+ *
+ * @return the native type
+ *
+ * @throws UnsupportedOperationException if the native type was invalid
+ */
+ public static int checkNativeType(int nativeType) {
+ switch (nativeType) {
+ case TYPE_BYTE:
+ case TYPE_INT32:
+ case TYPE_FLOAT:
+ case TYPE_INT64:
+ case TYPE_DOUBLE:
+ case TYPE_RATIONAL:
+ return nativeType;
+ }
+
+ throw new UnsupportedOperationException("Unknown nativeType " + nativeType);
+ }
+
+ /**
+ * Ensure that the expected and actual native types are equal.
+ *
+ * @param expectedNativeType the expected native type
+ * @param actualNativeType the actual native type
+ * @return the actual native type
+ *
+ * @throws UnsupportedOperationException if the types are not equal
+ */
+ public static int checkNativeTypeEquals(int expectedNativeType, int actualNativeType) {
+ if (expectedNativeType != actualNativeType) {
+ throw new UnsupportedOperationException(
+ String.format("Expected native type %d, but got %d",
+ expectedNativeType, actualNativeType));
+ }
+
+ return actualNativeType;
+ }
+
+ private MarshalHelpers() {
+ throw new AssertionError();
+ }
+}
diff --git a/android/hardware/camera2/marshal/MarshalQueryable.java b/android/hardware/camera2/marshal/MarshalQueryable.java
new file mode 100644
index 00000000..35fed1f1
--- /dev/null
+++ b/android/hardware/camera2/marshal/MarshalQueryable.java
@@ -0,0 +1,63 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal;
+
+import android.hardware.camera2.utils.TypeReference;
+
+/**
+ * Query if a marshaler can marshal to/from a particular native and managed type; if it supports
+ * the combination, allow creating a marshaler instance to do the serialization.
+ *
+ * <p>Not all queryable instances will support exactly one combination. Some, such as the
+ * primitive queryable will support all primitive to/from managed mappings (as long as they are
+ * 1:1). Others, such as the rectangle queryable will only support integer to rectangle mappings.
+ * </p>
+ *
+ * <p>Yet some others are codependent on other queryables; e.g. array queryables might only support
+ * a type map for {@code T[]} if another queryable exists with support for the component type
+ * {@code T}.</p>
+ */
+public interface MarshalQueryable<T> {
+ /**
+ * Create a marshaler between the selected managed and native type.
+ *
+ * <p>This marshaler instance is only good for that specific type mapping; and will refuse
+ * to map other managed types, other native types, or an other combination that isn't
+ * this exact one.</p>
+ *
+ * @param managedType a managed type reference
+ * @param nativeType the native type, e.g.
+ * {@link android.hardware.camera2.impl.CameraMetadataNative#TYPE_BYTE TYPE_BYTE}
+ * @return
+ *
+ * @throws UnsupportedOperationException
+ * if {@link #isTypeMappingSupported} returns {@code false}
+ */
+ public Marshaler<T> createMarshaler(
+ TypeReference<T> managedType, int nativeType);
+
+ /**
+ * Determine whether or not this query marshal is able to create a marshaler that will
+ * support the managed type and native type mapping.
+ *
+ * <p>If this returns {@code true}, then a marshaler can be instantiated by
+ * {@link #createMarshaler} that will marshal data to/from the native type
+ * from/to the managed type.</p>
+ *
+ * <p>Most marshalers are likely to only support one type map.</p>
+ */
+ public boolean isTypeMappingSupported(TypeReference<T> managedType, int nativeType);
+}
diff --git a/android/hardware/camera2/marshal/MarshalRegistry.java b/android/hardware/camera2/marshal/MarshalRegistry.java
new file mode 100644
index 00000000..15650879
--- /dev/null
+++ b/android/hardware/camera2/marshal/MarshalRegistry.java
@@ -0,0 +1,144 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal;
+
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.utils.TypeReference;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * Registry of supported marshalers; add new query-able marshalers or lookup existing ones.</p>
+ */
+public class MarshalRegistry {
+
+ /**
+ * Register a marshal queryable for the managed type {@code T}.
+ *
+ * <p>Multiple marshal queryables for the same managed type {@code T} may be registered;
+ * this is desirable if they support different native types (e.g. marshaler 1 supports
+ * {@code Integer <-> TYPE_INT32}, marshaler 2 supports {@code Integer <-> TYPE_BYTE}.</p>
+ *
+ * @param queryable a non-{@code null} marshal queryable that supports marshaling {@code T}
+ */
+ public static <T> void registerMarshalQueryable(MarshalQueryable<T> queryable) {
+ synchronized(sMarshalLock) {
+ sRegisteredMarshalQueryables.add(queryable);
+ }
+ }
+
+ /**
+ * Lookup a marshaler between {@code T} and {@code nativeType}.
+ *
+ * <p>Marshalers are looked up in the order they were registered; earlier registered
+ * marshal queriers get priority.</p>
+ *
+ * @param typeToken The compile-time type reference for {@code T}
+ * @param nativeType The native type, e.g. {@link CameraMetadataNative#TYPE_BYTE TYPE_BYTE}
+ * @return marshaler a non-{@code null} marshaler that supports marshaling the type combo
+ *
+ * @throws UnsupportedOperationException If no marshaler matching the args could be found
+ */
+ @SuppressWarnings("unchecked")
+ public static <T> Marshaler<T> getMarshaler(TypeReference<T> typeToken, int nativeType) {
+ synchronized(sMarshalLock) {
+ // TODO: can avoid making a new token each time by code-genning
+ // the list of type tokens and native types from the keys (at the call sites)
+ MarshalToken<T> marshalToken = new MarshalToken<T>(typeToken, nativeType);
+
+ /*
+ * Marshalers are instantiated lazily once they are looked up; successive lookups
+ * will not instantiate new marshalers.
+ */
+ Marshaler<T> marshaler =
+ (Marshaler<T>) sMarshalerMap.get(marshalToken);
+
+ if (marshaler == null) {
+
+ if (sRegisteredMarshalQueryables.size() == 0) {
+ throw new AssertionError("No available query marshalers registered");
+ }
+
+ // Query each marshaler to see if they support the native/managed type combination
+ for (MarshalQueryable<?> potentialMarshaler : sRegisteredMarshalQueryables) {
+
+ MarshalQueryable<T> castedPotential =
+ (MarshalQueryable<T>)potentialMarshaler;
+
+ if (castedPotential.isTypeMappingSupported(typeToken, nativeType)) {
+ marshaler = castedPotential.createMarshaler(typeToken, nativeType);
+ break;
+ }
+ }
+
+ if (marshaler == null) {
+ throw new UnsupportedOperationException(
+ "Could not find marshaler that matches the requested " +
+ "combination of type reference " +
+ typeToken + " and native type " +
+ MarshalHelpers.toStringNativeType(nativeType));
+ }
+
+ // Only put when no cached version exists to avoid +0.5ms lookup per call.
+ sMarshalerMap.put(marshalToken, marshaler);
+ }
+
+ return marshaler;
+ }
+ }
+
+ private static class MarshalToken<T> {
+ public MarshalToken(TypeReference<T> typeReference, int nativeType) {
+ this.typeReference = typeReference;
+ this.nativeType = nativeType;
+ this.hash = typeReference.hashCode() ^ nativeType;
+ }
+
+ final TypeReference<T> typeReference;
+ final int nativeType;
+ private final int hash;
+
+ @Override
+ public boolean equals(Object other) {
+ if (other instanceof MarshalToken<?>) {
+ MarshalToken<?> otherToken = (MarshalToken<?>)other;
+ return typeReference.equals(otherToken.typeReference) &&
+ nativeType == otherToken.nativeType;
+ }
+
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return hash;
+ }
+ }
+
+ // Control access to the static data structures below
+ private static final Object sMarshalLock = new Object();
+
+ private static final List<MarshalQueryable<?>> sRegisteredMarshalQueryables =
+ new ArrayList<MarshalQueryable<?>>();
+ private static final HashMap<MarshalToken<?>, Marshaler<?>> sMarshalerMap =
+ new HashMap<MarshalToken<?>, Marshaler<?>>();
+
+ private MarshalRegistry() {
+ throw new AssertionError();
+ }
+}
diff --git a/android/hardware/camera2/marshal/Marshaler.java b/android/hardware/camera2/marshal/Marshaler.java
new file mode 100644
index 00000000..eb0ad156
--- /dev/null
+++ b/android/hardware/camera2/marshal/Marshaler.java
@@ -0,0 +1,148 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal;
+
+import android.hardware.camera2.utils.TypeReference;
+
+import java.nio.ByteBuffer;
+
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Base class to marshal data to/from managed/native metadata byte buffers.
+ *
+ * <p>This class should not be created directly; an instance of it can be obtained
+ * using {@link MarshalQueryable#createMarshaler} for the same type {@code T} if the native type
+ * mapping for {@code T} {@link MarshalQueryable#isTypeMappingSupported supported}.</p>
+ *
+ * @param <T> the compile-time managed type
+ */
+public abstract class Marshaler<T> {
+
+ protected final TypeReference<T> mTypeReference;
+ protected final int mNativeType;
+
+ /**
+ * Instantiate a marshaler between a single managed/native type combination.
+ *
+ * <p>This particular managed/native type combination must be supported by
+ * {@link #isTypeMappingSupported}.</p>
+ *
+ * @param query an instance of {@link MarshalQueryable}
+ * @param typeReference the managed type reference
+ * Must be one for which {@link #isTypeMappingSupported} returns {@code true}
+ * @param nativeType the native type, e.g.
+ * {@link android.hardware.camera2.impl.CameraMetadataNative#TYPE_BYTE TYPE_BYTE}.
+ * Must be one for which {@link #isTypeMappingSupported} returns {@code true}.
+ *
+ * @throws NullPointerException if any args were {@code null}
+ * @throws UnsupportedOperationException if the type mapping was not supported
+ */
+ protected Marshaler(
+ MarshalQueryable<T> query, TypeReference<T> typeReference, int nativeType) {
+ mTypeReference = checkNotNull(typeReference, "typeReference must not be null");
+ mNativeType = checkNativeType(nativeType);
+
+ if (!query.isTypeMappingSupported(typeReference, nativeType)) {
+ throw new UnsupportedOperationException(
+ "Unsupported type marshaling for managed type "
+ + typeReference + " and native type "
+ + MarshalHelpers.toStringNativeType(nativeType));
+ }
+ }
+
+ /**
+ * Marshal the specified object instance (value) into a byte buffer.
+ *
+ * <p>Upon completion, the {@link ByteBuffer#position()} will have advanced by
+ * the {@link #calculateMarshalSize marshal size} of {@code value}.</p>
+ *
+ * @param value the value of type T that we wish to write into the byte buffer
+ * @param buffer the byte buffer into which the marshaled object will be written
+ */
+ public abstract void marshal(T value, ByteBuffer buffer);
+
+ /**
+ * Get the size in bytes for how much space would be required to write this {@code value}
+ * into a byte buffer using the given {@code nativeType}.
+ *
+ * <p>If the size of this {@code T} instance when serialized into a buffer is always constant,
+ * then this method will always return the same value (and particularly, it will return
+ * an equivalent value to {@link #getNativeSize()}.</p>
+ *
+ * <p>Overriding this method is a must when the size is {@link NATIVE_SIZE_DYNAMIC dynamic}.</p>
+ *
+ * @param value the value of type T that we wish to write into the byte buffer
+ * @return the size that would need to be written to the byte buffer
+ */
+ public int calculateMarshalSize(T value) {
+ int nativeSize = getNativeSize();
+
+ if (nativeSize == NATIVE_SIZE_DYNAMIC) {
+ throw new AssertionError("Override this function for dynamically-sized objects");
+ }
+
+ return nativeSize;
+ }
+
+ /**
+ * Unmarshal a new object instance from the byte buffer into its managed type.
+ *
+ * <p>Upon completion, the {@link ByteBuffer#position()} will have advanced by
+ * the {@link #calculateMarshalSize marshal size} of the returned {@code T} instance.</p>
+ *
+ * @param buffer the byte buffer, from which we will read the object
+ * @return a new instance of type T read from the byte buffer
+ */
+ public abstract T unmarshal(ByteBuffer buffer);
+
+ /**
+ * Used to denote variable-length data structures.
+ *
+ * <p>If the size is dynamic then we can't know ahead of time how big of a data structure
+ * to preallocate for e.g. arrays, so one object must be unmarshaled at a time.</p>
+ */
+ public static int NATIVE_SIZE_DYNAMIC = -1;
+
+ /**
+ * How many bytes a single instance of {@code T} will take up if marshalled to/from
+ * {@code nativeType}.
+ *
+ * <p>When unmarshaling data from native to managed, the instance {@code T} is not yet
+ * available. If the native size is always a fixed mapping regardless of the instance of
+ * {@code T} (e.g. if the type is not a container of some sort), it can be used to preallocate
+ * containers for {@code T} to avoid resizing them.</p>
+ *
+ * <p>In particular, the array marshaler takes advantage of this (when size is not dynamic)
+ * to preallocate arrays of the right length when unmarshaling an array {@code T[]}.</p>
+ *
+ * @return a size in bytes, or {@link #NATIVE_SIZE_DYNAMIC} if the size is dynamic
+ */
+ public abstract int getNativeSize();
+
+ /**
+ * The type reference for {@code T} for the managed type side of this marshaler.
+ */
+ public TypeReference<T> getTypeReference() {
+ return mTypeReference;
+ }
+
+ /** The native type corresponding to this marshaler for the native side of this marshaler.*/
+ public int getNativeType() {
+ return mNativeType;
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableArray.java b/android/hardware/camera2/marshal/impl/MarshalQueryableArray.java
new file mode 100644
index 00000000..ebc74f09
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableArray.java
@@ -0,0 +1,179 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.marshal.MarshalRegistry;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.Log;
+
+import java.lang.reflect.Array;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+
+/**
+ * Marshal any array {@code T}.
+ *
+ * <p>To marshal any {@code T} to/from a native type, the marshaler for T to/from that native type
+ * also has to exist.</p>
+ *
+ * <p>{@code T} can be either a T2[] where T2 is an object type, or a P[] where P is a
+ * built-in primitive (e.g. int[], float[], etc).</p>
+
+ * @param <T> the type of the array (e.g. T = int[], or T = Rational[])
+ */
+public class MarshalQueryableArray<T> implements MarshalQueryable<T> {
+
+ private static final String TAG = MarshalQueryableArray.class.getSimpleName();
+ private static final boolean DEBUG = false;
+
+ private class MarshalerArray extends Marshaler<T> {
+ private final Class<T> mClass;
+ private final Marshaler<?> mComponentMarshaler;
+ private final Class<?> mComponentClass;
+
+ @SuppressWarnings("unchecked")
+ protected MarshalerArray(TypeReference<T> typeReference, int nativeType) {
+ super(MarshalQueryableArray.this, typeReference, nativeType);
+
+ mClass = (Class<T>)typeReference.getRawType();
+
+ TypeReference<?> componentToken = typeReference.getComponentType();
+ mComponentMarshaler = MarshalRegistry.getMarshaler(componentToken, mNativeType);
+ mComponentClass = componentToken.getRawType();
+ }
+
+ @Override
+ public void marshal(T value, ByteBuffer buffer) {
+ int length = Array.getLength(value);
+ for (int i = 0; i < length; ++i) {
+ marshalArrayElement(mComponentMarshaler, buffer, value, i);
+ }
+ }
+
+ @Override
+ public T unmarshal(ByteBuffer buffer) {
+ Object array;
+
+ int elementSize = mComponentMarshaler.getNativeSize();
+
+ if (elementSize != Marshaler.NATIVE_SIZE_DYNAMIC) {
+ int remaining = buffer.remaining();
+ int arraySize = remaining / elementSize;
+
+ if (remaining % elementSize != 0) {
+ throw new UnsupportedOperationException("Arrays for " + mTypeReference
+ + " must be packed tighly into a multiple of " + elementSize
+ + "; but there are " + (remaining % elementSize) + " left over bytes");
+ }
+
+ if (DEBUG) {
+ Log.v(TAG, String.format(
+ "Attempting to unpack array (count = %d, element size = %d, bytes "
+ + "remaining = %d) for type %s",
+ arraySize, elementSize, remaining, mClass));
+ }
+
+ array = Array.newInstance(mComponentClass, arraySize);
+ for (int i = 0; i < arraySize; ++i) {
+ Object elem = mComponentMarshaler.unmarshal(buffer);
+ Array.set(array, i, elem);
+ }
+ } else {
+ // Dynamic size, use an array list.
+ ArrayList<Object> arrayList = new ArrayList<Object>();
+
+ // Assumes array is packed tightly; no unused bytes allowed
+ while (buffer.hasRemaining()) {
+ Object elem = mComponentMarshaler.unmarshal(buffer);
+ arrayList.add(elem);
+ }
+
+ int arraySize = arrayList.size();
+ array = copyListToArray(arrayList, Array.newInstance(mComponentClass, arraySize));
+ }
+
+ if (buffer.remaining() != 0) {
+ Log.e(TAG, "Trailing bytes (" + buffer.remaining() + ") left over after unpacking "
+ + mClass);
+ }
+
+ return mClass.cast(array);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return NATIVE_SIZE_DYNAMIC;
+ }
+
+ @Override
+ public int calculateMarshalSize(T value) {
+ int elementSize = mComponentMarshaler.getNativeSize();
+ int arrayLength = Array.getLength(value);
+
+ if (elementSize != Marshaler.NATIVE_SIZE_DYNAMIC) {
+ // The fast way. Every element size is uniform.
+ return elementSize * arrayLength;
+ } else {
+ // The slow way. Accumulate size for each element.
+ int size = 0;
+ for (int i = 0; i < arrayLength; ++i) {
+ size += calculateElementMarshalSize(mComponentMarshaler, value, i);
+ }
+
+ return size;
+ }
+ }
+
+ /*
+ * Helpers to avoid compiler errors regarding types with wildcards (?)
+ */
+
+ @SuppressWarnings("unchecked")
+ private <TElem> void marshalArrayElement(Marshaler<TElem> marshaler,
+ ByteBuffer buffer, Object array, int index) {
+ marshaler.marshal((TElem)Array.get(array, index), buffer);
+ }
+
+ @SuppressWarnings("unchecked")
+ private Object copyListToArray(ArrayList<?> arrayList, Object arrayDest) {
+ return arrayList.toArray((T[]) arrayDest);
+ }
+
+ @SuppressWarnings("unchecked")
+ private <TElem> int calculateElementMarshalSize(Marshaler<TElem> marshaler,
+ Object array, int index) {
+ Object elem = Array.get(array, index);
+
+ return marshaler.calculateMarshalSize((TElem) elem);
+ }
+ }
+
+ @Override
+ public Marshaler<T> createMarshaler(TypeReference<T> managedType, int nativeType) {
+ return new MarshalerArray(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<T> managedType, int nativeType) {
+ // support both ConcreteType[] and GenericType<ConcreteType>[]
+ return managedType.getRawType().isArray();
+
+ // TODO: Should this recurse deeper and check that there is
+ // a valid marshaler for the ConcreteType as well?
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableBlackLevelPattern.java b/android/hardware/camera2/marshal/impl/MarshalQueryableBlackLevelPattern.java
new file mode 100644
index 00000000..bcb035ea
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableBlackLevelPattern.java
@@ -0,0 +1,76 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.params.BlackLevelPattern;
+import android.hardware.camera2.utils.TypeReference;
+
+import java.nio.ByteBuffer;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.TYPE_INT32;
+import static android.hardware.camera2.marshal.MarshalHelpers.SIZEOF_INT32;
+
+/**
+ * Marshal {@link BlackLevelPattern} to/from {@link #TYPE_INT32} {@code x 4}
+ */
+public class MarshalQueryableBlackLevelPattern implements MarshalQueryable<BlackLevelPattern> {
+ private static final int SIZE = SIZEOF_INT32 * BlackLevelPattern.COUNT;
+
+ private class MarshalerBlackLevelPattern extends Marshaler<BlackLevelPattern> {
+ protected MarshalerBlackLevelPattern(TypeReference<BlackLevelPattern> typeReference,
+ int nativeType) {
+ super(MarshalQueryableBlackLevelPattern.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(BlackLevelPattern value, ByteBuffer buffer) {
+ for (int i = 0; i < BlackLevelPattern.COUNT / 2; ++i) {
+ for (int j = 0; j < BlackLevelPattern.COUNT / 2; ++j) {
+ buffer.putInt(value.getOffsetForIndex(j, i));
+ }
+ }
+ }
+
+ @Override
+ public BlackLevelPattern unmarshal(ByteBuffer buffer) {
+ int[] channelOffsets = new int[BlackLevelPattern.COUNT];
+ for (int i = 0; i < BlackLevelPattern.COUNT; ++i) {
+ channelOffsets[i] = buffer.getInt();
+ }
+ return new BlackLevelPattern(channelOffsets);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+ }
+
+ @Override
+ public Marshaler<BlackLevelPattern> createMarshaler(
+ TypeReference<BlackLevelPattern> managedType, int nativeType) {
+ return new MarshalerBlackLevelPattern(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(
+ TypeReference<BlackLevelPattern> managedType, int nativeType) {
+ return nativeType == TYPE_INT32 &&
+ (BlackLevelPattern.class.equals(managedType.getType()));
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableBoolean.java b/android/hardware/camera2/marshal/impl/MarshalQueryableBoolean.java
new file mode 100644
index 00000000..4aa4b4ad
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableBoolean.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Marshal booleans: TYPE_BYTE <-> boolean/Boolean
+ */
+public class MarshalQueryableBoolean implements MarshalQueryable<Boolean> {
+
+ private class MarshalerBoolean extends Marshaler<Boolean> {
+ protected MarshalerBoolean(TypeReference<Boolean> typeReference, int nativeType) {
+ super(MarshalQueryableBoolean.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(Boolean value, ByteBuffer buffer) {
+ boolean unboxValue = value;
+ buffer.put((byte)(unboxValue ? 1 : 0));
+ }
+
+ @Override
+ public Boolean unmarshal(ByteBuffer buffer) {
+ return buffer.get() != 0;
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZEOF_BYTE;
+ }
+ }
+
+ @Override
+ public Marshaler<Boolean> createMarshaler(TypeReference<Boolean> managedType,
+ int nativeType) {
+ return new MarshalerBoolean(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<Boolean> managedType, int nativeType) {
+ return (Boolean.class.equals(managedType.getType())
+ || boolean.class.equals(managedType.getType())) && nativeType == TYPE_BYTE;
+ }
+
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableColorSpaceTransform.java b/android/hardware/camera2/marshal/impl/MarshalQueryableColorSpaceTransform.java
new file mode 100644
index 00000000..47f79bfc
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableColorSpaceTransform.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.ColorSpaceTransform;
+import android.hardware.camera2.utils.TypeReference;
+
+import java.nio.ByteBuffer;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+/**
+ * Marshal {@link ColorSpaceTransform} to/from {@link #TYPE_RATIONAL}
+ */
+public class MarshalQueryableColorSpaceTransform implements
+ MarshalQueryable<ColorSpaceTransform> {
+
+ private static final int ELEMENTS_INT32 = 3 * 3 * (SIZEOF_RATIONAL / SIZEOF_INT32);
+ private static final int SIZE = SIZEOF_INT32 * ELEMENTS_INT32;
+
+ /** rational x 3 x 3 */
+ private class MarshalerColorSpaceTransform extends Marshaler<ColorSpaceTransform> {
+ protected MarshalerColorSpaceTransform(TypeReference<ColorSpaceTransform> typeReference,
+ int nativeType) {
+ super(MarshalQueryableColorSpaceTransform.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(ColorSpaceTransform value, ByteBuffer buffer) {
+ int[] transformAsArray = new int[ELEMENTS_INT32];
+ value.copyElements(transformAsArray, /*offset*/0);
+
+ for (int i = 0; i < ELEMENTS_INT32; ++i) {
+ buffer.putInt(transformAsArray[i]);
+ }
+ }
+
+ @Override
+ public ColorSpaceTransform unmarshal(ByteBuffer buffer) {
+ int[] transformAsArray = new int[ELEMENTS_INT32];
+
+ for (int i = 0; i < ELEMENTS_INT32; ++i) {
+ transformAsArray[i] = buffer.getInt();
+ }
+
+ return new ColorSpaceTransform(transformAsArray);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+ }
+
+ @Override
+ public Marshaler<ColorSpaceTransform> createMarshaler(
+ TypeReference<ColorSpaceTransform> managedType, int nativeType) {
+ return new MarshalerColorSpaceTransform(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(
+ TypeReference<ColorSpaceTransform> managedType, int nativeType) {
+ return nativeType == TYPE_RATIONAL &&
+ ColorSpaceTransform.class.equals(managedType.getType());
+ }
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableEnum.java b/android/hardware/camera2/marshal/impl/MarshalQueryableEnum.java
new file mode 100644
index 00000000..621a418f
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableEnum.java
@@ -0,0 +1,220 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.util.HashMap;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+/**
+ * Marshal any simple enum (0-arg constructors only) into/from either
+ * {@code TYPE_BYTE} or {@code TYPE_INT32}.
+ *
+ * <p>Default values of the enum are mapped to its ordinal; this can be overridden
+ * by providing a manual value with {@link #registerEnumValues}.</p>
+
+ * @param <T> the type of {@code Enum}
+ */
+public class MarshalQueryableEnum<T extends Enum<T>> implements MarshalQueryable<T> {
+
+ private static final String TAG = MarshalQueryableEnum.class.getSimpleName();
+ private static final boolean DEBUG = false;
+
+ private static final int UINT8_MIN = 0x0;
+ private static final int UINT8_MAX = (1 << Byte.SIZE) - 1;
+ private static final int UINT8_MASK = UINT8_MAX;
+
+ private class MarshalerEnum extends Marshaler<T> {
+
+ private final Class<T> mClass;
+
+ @SuppressWarnings("unchecked")
+ protected MarshalerEnum(TypeReference<T> typeReference, int nativeType) {
+ super(MarshalQueryableEnum.this, typeReference, nativeType);
+
+ mClass = (Class<T>)typeReference.getRawType();
+ }
+
+ @Override
+ public void marshal(T value, ByteBuffer buffer) {
+ int enumValue = getEnumValue(value);
+
+ if (mNativeType == TYPE_INT32) {
+ buffer.putInt(enumValue);
+ } else if (mNativeType == TYPE_BYTE) {
+ if (enumValue < UINT8_MIN || enumValue > UINT8_MAX) {
+ throw new UnsupportedOperationException(String.format(
+ "Enum value %x too large to fit into unsigned byte", enumValue));
+ }
+ buffer.put((byte)enumValue);
+ } else {
+ throw new AssertionError();
+ }
+ }
+
+ @Override
+ public T unmarshal(ByteBuffer buffer) {
+ int enumValue;
+
+ switch (mNativeType) {
+ case TYPE_INT32:
+ enumValue = buffer.getInt();
+ break;
+ case TYPE_BYTE:
+ // get the unsigned byte value; avoid sign extension
+ enumValue = buffer.get() & UINT8_MASK;
+ break;
+ default:
+ throw new AssertionError(
+ "Unexpected native type; impossible since its not supported");
+ }
+
+ return getEnumFromValue(mClass, enumValue);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return getPrimitiveTypeSize(mNativeType);
+ }
+ }
+
+ @Override
+ public Marshaler<T> createMarshaler(TypeReference<T> managedType, int nativeType) {
+ return new MarshalerEnum(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<T> managedType, int nativeType) {
+ if (nativeType == TYPE_INT32 || nativeType == TYPE_BYTE) {
+ if (managedType.getType() instanceof Class<?>) {
+ Class<?> typeClass = (Class<?>)managedType.getType();
+
+ if (typeClass.isEnum()) {
+ if (DEBUG) {
+ Log.v(TAG, "possible enum detected for " + typeClass);
+ }
+
+ // The enum must not take extra arguments
+ try {
+ // match a class like: "public enum Fruits { Apple, Orange; }"
+ typeClass.getDeclaredConstructor(String.class, int.class);
+ return true;
+ } catch (NoSuchMethodException e) {
+ // Skip: custom enum with a special constructor e.g. Foo(T), but need Foo()
+ Log.e(TAG, "Can't marshal class " + typeClass + "; no default constructor");
+ } catch (SecurityException e) {
+ // Skip: wouldn't be able to touch the enum anyway
+ Log.e(TAG, "Can't marshal class " + typeClass + "; not accessible");
+ }
+ }
+ }
+ }
+
+ return false;
+ }
+
+ @SuppressWarnings("rawtypes")
+ private static final HashMap<Class<? extends Enum>, int[]> sEnumValues =
+ new HashMap<Class<? extends Enum>, int[]>();
+
+ /**
+ * Register a non-sequential set of values to be used with the marshal/unmarshal functions.
+ *
+ * <p>This enables get/set to correctly marshal the enum into a value that is C-compatible.</p>
+ *
+ * @param enumType The class for an enum
+ * @param values A list of values mapping to the ordinals of the enum
+ */
+ public static <T extends Enum<T>> void registerEnumValues(Class<T> enumType, int[] values) {
+ if (enumType.getEnumConstants().length != values.length) {
+ throw new IllegalArgumentException(
+ "Expected values array to be the same size as the enumTypes values "
+ + values.length + " for type " + enumType);
+ }
+ if (DEBUG) {
+ Log.v(TAG, "Registered enum values for type " + enumType + " values");
+ }
+
+ sEnumValues.put(enumType, values);
+ }
+
+ /**
+ * Get the numeric value from an enum.
+ *
+ * <p>This is usually the same as the ordinal value for
+ * enums that have fully sequential values, although for C-style enums the range of values
+ * may not map 1:1.</p>
+ *
+ * @param enumValue Enum instance
+ * @return Int guaranteed to be ABI-compatible with the C enum equivalent
+ */
+ private static <T extends Enum<T>> int getEnumValue(T enumValue) {
+ int[] values;
+ values = sEnumValues.get(enumValue.getClass());
+
+ int ordinal = enumValue.ordinal();
+ if (values != null) {
+ return values[ordinal];
+ }
+
+ return ordinal;
+ }
+
+ /**
+ * Finds the enum corresponding to it's numeric value. Opposite of {@link #getEnumValue} method.
+ *
+ * @param enumType Class of the enum we want to find
+ * @param value The numeric value of the enum
+ * @return An instance of the enum
+ */
+ private static <T extends Enum<T>> T getEnumFromValue(Class<T> enumType, int value) {
+ int ordinal;
+
+ int[] registeredValues = sEnumValues.get(enumType);
+ if (registeredValues != null) {
+ ordinal = -1;
+
+ for (int i = 0; i < registeredValues.length; ++i) {
+ if (registeredValues[i] == value) {
+ ordinal = i;
+ break;
+ }
+ }
+ } else {
+ ordinal = value;
+ }
+
+ T[] values = enumType.getEnumConstants();
+
+ if (ordinal < 0 || ordinal >= values.length) {
+ throw new IllegalArgumentException(
+ String.format(
+ "Argument 'value' (%d) was not a valid enum value for type %s "
+ + "(registered? %b)",
+ value,
+ enumType, (registeredValues != null)));
+ }
+
+ return values[ordinal];
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableHighSpeedVideoConfiguration.java b/android/hardware/camera2/marshal/impl/MarshalQueryableHighSpeedVideoConfiguration.java
new file mode 100644
index 00000000..2449abed
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableHighSpeedVideoConfiguration.java
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.HighSpeedVideoConfiguration;
+import android.hardware.camera2.utils.TypeReference;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Marshaler for {@code android.control.availableHighSpeedVideoConfigurations} custom class
+ * {@link HighSpeedVideoConfiguration}
+ *
+ * <p>Data is stored as {@code (width, height, fpsMin, fpsMax)} tuples (int32).</p>
+ */
+public class MarshalQueryableHighSpeedVideoConfiguration
+ implements MarshalQueryable<HighSpeedVideoConfiguration> {
+ private static final int SIZE = SIZEOF_INT32 * 5;
+
+ private class MarshalerHighSpeedVideoConfiguration
+ extends Marshaler<HighSpeedVideoConfiguration> {
+ protected MarshalerHighSpeedVideoConfiguration(
+ TypeReference<HighSpeedVideoConfiguration> typeReference,
+ int nativeType) {
+ super(MarshalQueryableHighSpeedVideoConfiguration.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(HighSpeedVideoConfiguration value, ByteBuffer buffer) {
+ buffer.putInt(value.getWidth());
+ buffer.putInt(value.getHeight());
+ buffer.putInt(value.getFpsMin());
+ buffer.putInt(value.getFpsMax());
+ buffer.putInt(value.getBatchSizeMax());
+ }
+
+ @Override
+ public HighSpeedVideoConfiguration unmarshal(ByteBuffer buffer) {
+ int width = buffer.getInt();
+ int height = buffer.getInt();
+ int fpsMin = buffer.getInt();
+ int fpsMax = buffer.getInt();
+ int batchSizeMax = buffer.getInt();
+
+ return new HighSpeedVideoConfiguration(width, height, fpsMin, fpsMax, batchSizeMax);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+
+ }
+
+ @Override
+ public Marshaler<HighSpeedVideoConfiguration> createMarshaler(
+ TypeReference<HighSpeedVideoConfiguration> managedType, int nativeType) {
+ return new MarshalerHighSpeedVideoConfiguration(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<HighSpeedVideoConfiguration> managedType,
+ int nativeType) {
+ return nativeType == TYPE_INT32 &&
+ managedType.getType().equals(HighSpeedVideoConfiguration.class);
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableMeteringRectangle.java b/android/hardware/camera2/marshal/impl/MarshalQueryableMeteringRectangle.java
new file mode 100644
index 00000000..01780db4
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableMeteringRectangle.java
@@ -0,0 +1,88 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.MeteringRectangle;
+import android.hardware.camera2.utils.TypeReference;
+
+import java.nio.ByteBuffer;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+/**
+ * Marshal {@link MeteringRectangle} to/from {@link #TYPE_INT32}
+ */
+public class MarshalQueryableMeteringRectangle implements MarshalQueryable<MeteringRectangle> {
+ private static final int SIZE = SIZEOF_INT32 * 5;
+
+ /** (xmin, ymin, xmax, ymax, weight) */
+ private class MarshalerMeteringRectangle extends Marshaler<MeteringRectangle> {
+ protected MarshalerMeteringRectangle(TypeReference<MeteringRectangle> typeReference,
+ int nativeType) {
+ super(MarshalQueryableMeteringRectangle.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(MeteringRectangle value, ByteBuffer buffer) {
+ int xMin = value.getX();
+ int yMin = value.getY();
+ int xMax = xMin + value.getWidth();
+ int yMax = yMin + value.getHeight();
+ int weight = value.getMeteringWeight();
+
+ buffer.putInt(xMin);
+ buffer.putInt(yMin);
+ buffer.putInt(xMax);
+ buffer.putInt(yMax);
+ buffer.putInt(weight);
+ }
+
+ @Override
+ public MeteringRectangle unmarshal(ByteBuffer buffer) {
+ int xMin = buffer.getInt();
+ int yMin = buffer.getInt();
+ int xMax = buffer.getInt();
+ int yMax = buffer.getInt();
+ int weight = buffer.getInt();
+
+ int width = xMax - xMin;
+ int height = yMax - yMin;
+
+ return new MeteringRectangle(xMin, yMin, width, height, weight);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+ }
+
+ @Override
+ public Marshaler<MeteringRectangle> createMarshaler(
+ TypeReference<MeteringRectangle> managedType, int nativeType) {
+ return new MarshalerMeteringRectangle(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(
+ TypeReference<MeteringRectangle> managedType, int nativeType) {
+ return nativeType == TYPE_INT32 && MeteringRectangle.class.equals(managedType.getType());
+ }
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableNativeByteToInteger.java b/android/hardware/camera2/marshal/impl/MarshalQueryableNativeByteToInteger.java
new file mode 100644
index 00000000..3b89c829
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableNativeByteToInteger.java
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Marshal fake native enums (ints): TYPE_BYTE <-> int/Integer
+ */
+public class MarshalQueryableNativeByteToInteger implements MarshalQueryable<Integer> {
+
+ private static final int UINT8_MASK = (1 << Byte.SIZE) - 1;
+
+ private class MarshalerNativeByteToInteger extends Marshaler<Integer> {
+ protected MarshalerNativeByteToInteger(TypeReference<Integer> typeReference,
+ int nativeType) {
+ super(MarshalQueryableNativeByteToInteger.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(Integer value, ByteBuffer buffer) {
+ buffer.put((byte)(int)value); // truncate down to byte
+ }
+
+ @Override
+ public Integer unmarshal(ByteBuffer buffer) {
+ // expand unsigned byte to int; avoid sign extension
+ return buffer.get() & UINT8_MASK;
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZEOF_BYTE;
+ }
+ }
+
+ @Override
+ public Marshaler<Integer> createMarshaler(TypeReference<Integer> managedType,
+ int nativeType) {
+ return new MarshalerNativeByteToInteger(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<Integer> managedType, int nativeType) {
+ return (Integer.class.equals(managedType.getType())
+ || int.class.equals(managedType.getType())) && nativeType == TYPE_BYTE;
+ }
+
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryablePair.java b/android/hardware/camera2/marshal/impl/MarshalQueryablePair.java
new file mode 100644
index 00000000..0a9935dc
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryablePair.java
@@ -0,0 +1,158 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.marshal.MarshalRegistry;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.Pair;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.nio.ByteBuffer;
+
+/**
+ * Marshal {@link Pair} to/from any native type
+ */
+public class MarshalQueryablePair<T1, T2>
+ implements MarshalQueryable<Pair<T1, T2>> {
+
+ private class MarshalerPair extends Marshaler<Pair<T1, T2>> {
+ private final Class<? super Pair<T1, T2>> mClass;
+ private final Constructor<Pair<T1, T2>> mConstructor;
+ /** Marshal the {@code T1} inside of {@code Pair<T1, T2>} */
+ private final Marshaler<T1> mNestedTypeMarshalerFirst;
+ /** Marshal the {@code T1} inside of {@code Pair<T1, T2>} */
+ private final Marshaler<T2> mNestedTypeMarshalerSecond;
+
+ @SuppressWarnings("unchecked")
+ protected MarshalerPair(TypeReference<Pair<T1, T2>> typeReference,
+ int nativeType) {
+ super(MarshalQueryablePair.this, typeReference, nativeType);
+
+ mClass = typeReference.getRawType();
+
+ /*
+ * Lookup the actual type arguments, e.g. Pair<Integer, Float> --> [Integer, Float]
+ * and then get the marshalers for that managed type.
+ */
+ ParameterizedType paramType;
+ try {
+ paramType = (ParameterizedType) typeReference.getType();
+ } catch (ClassCastException e) {
+ throw new AssertionError("Raw use of Pair is not supported", e);
+ }
+
+ // Get type marshaler for T1
+ {
+ Type actualTypeArgument = paramType.getActualTypeArguments()[0];
+
+ TypeReference<?> actualTypeArgToken =
+ TypeReference.createSpecializedTypeReference(actualTypeArgument);
+
+ mNestedTypeMarshalerFirst = (Marshaler<T1>)MarshalRegistry.getMarshaler(
+ actualTypeArgToken, mNativeType);
+ }
+ // Get type marshaler for T2
+ {
+ Type actualTypeArgument = paramType.getActualTypeArguments()[1];
+
+ TypeReference<?> actualTypeArgToken =
+ TypeReference.createSpecializedTypeReference(actualTypeArgument);
+
+ mNestedTypeMarshalerSecond = (Marshaler<T2>)MarshalRegistry.getMarshaler(
+ actualTypeArgToken, mNativeType);
+ }
+ try {
+ mConstructor = (Constructor<Pair<T1, T2>>)mClass.getConstructor(
+ Object.class, Object.class);
+ } catch (NoSuchMethodException e) {
+ throw new AssertionError(e);
+ }
+ }
+
+ @Override
+ public void marshal(Pair<T1, T2> value, ByteBuffer buffer) {
+ if (value.first == null) {
+ throw new UnsupportedOperationException("Pair#first must not be null");
+ } else if (value.second == null) {
+ throw new UnsupportedOperationException("Pair#second must not be null");
+ }
+
+ mNestedTypeMarshalerFirst.marshal(value.first, buffer);
+ mNestedTypeMarshalerSecond.marshal(value.second, buffer);
+ }
+
+ @Override
+ public Pair<T1, T2> unmarshal(ByteBuffer buffer) {
+ T1 first = mNestedTypeMarshalerFirst.unmarshal(buffer);
+ T2 second = mNestedTypeMarshalerSecond.unmarshal(buffer);
+
+ try {
+ return mConstructor.newInstance(first, second);
+ } catch (InstantiationException e) {
+ throw new AssertionError(e);
+ } catch (IllegalAccessException e) {
+ throw new AssertionError(e);
+ } catch (IllegalArgumentException e) {
+ throw new AssertionError(e);
+ } catch (InvocationTargetException e) {
+ throw new AssertionError(e);
+ }
+ }
+
+ @Override
+ public int getNativeSize() {
+ int firstSize = mNestedTypeMarshalerFirst.getNativeSize();
+ int secondSize = mNestedTypeMarshalerSecond.getNativeSize();
+
+ if (firstSize != NATIVE_SIZE_DYNAMIC && secondSize != NATIVE_SIZE_DYNAMIC) {
+ return firstSize + secondSize;
+ } else {
+ return NATIVE_SIZE_DYNAMIC;
+ }
+ }
+
+ @Override
+ public int calculateMarshalSize(Pair<T1, T2> value) {
+ int nativeSize = getNativeSize();
+
+ if (nativeSize != NATIVE_SIZE_DYNAMIC) {
+ return nativeSize;
+ } else {
+ int firstSize = mNestedTypeMarshalerFirst.calculateMarshalSize(value.first);
+ int secondSize = mNestedTypeMarshalerSecond.calculateMarshalSize(value.second);
+
+ return firstSize + secondSize;
+ }
+ }
+ }
+
+ @Override
+ public Marshaler<Pair<T1, T2>> createMarshaler(TypeReference<Pair<T1, T2>> managedType,
+ int nativeType) {
+ return new MarshalerPair(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<Pair<T1, T2>> managedType, int nativeType) {
+ return (Pair.class.equals(managedType.getRawType()));
+ }
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableParcelable.java b/android/hardware/camera2/marshal/impl/MarshalQueryableParcelable.java
new file mode 100644
index 00000000..fdde2057
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableParcelable.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+import java.lang.reflect.Field;
+import java.nio.ByteBuffer;
+
+/**
+ * Marshal any {@code T extends Parcelable} to/from any native type
+ *
+ * <p>Use with extreme caution! File descriptors and binders will not be marshaled across.</p>
+ */
+public class MarshalQueryableParcelable<T extends Parcelable>
+ implements MarshalQueryable<T> {
+
+ private static final String TAG = "MarshalParcelable";
+ private static final boolean DEBUG = false;
+
+ private static final String FIELD_CREATOR = "CREATOR";
+
+ private class MarshalerParcelable extends Marshaler<T> {
+
+ private final Class<T> mClass;
+ private final Parcelable.Creator<T> mCreator;
+
+ @SuppressWarnings("unchecked")
+ protected MarshalerParcelable(TypeReference<T> typeReference,
+ int nativeType) {
+ super(MarshalQueryableParcelable.this, typeReference, nativeType);
+
+ mClass = (Class<T>)typeReference.getRawType();
+ Field creatorField;
+ try {
+ creatorField = mClass.getDeclaredField(FIELD_CREATOR);
+ } catch (NoSuchFieldException e) {
+ // Impossible. All Parcelable implementations must have a 'CREATOR' static field
+ throw new AssertionError(e);
+ }
+
+ try {
+ mCreator = (Parcelable.Creator<T>)creatorField.get(null);
+ } catch (IllegalAccessException e) {
+ // Impossible: All 'CREATOR' static fields must be public
+ throw new AssertionError(e);
+ } catch (IllegalArgumentException e) {
+ // Impossible: This is a static field, so null must be ok
+ throw new AssertionError(e);
+ }
+ }
+
+ @Override
+ public void marshal(T value, ByteBuffer buffer) {
+ if (DEBUG) {
+ Log.v(TAG, "marshal " + value);
+ }
+
+ Parcel parcel = Parcel.obtain();
+ byte[] parcelContents;
+
+ try {
+ value.writeToParcel(parcel, /*flags*/0);
+
+ if (parcel.hasFileDescriptors()) {
+ throw new UnsupportedOperationException(
+ "Parcelable " + value + " must not have file descriptors");
+ }
+
+ parcelContents = parcel.marshall();
+ }
+ finally {
+ parcel.recycle();
+ }
+
+ if (parcelContents.length == 0) {
+ throw new AssertionError("No data marshaled for " + value);
+ }
+
+ buffer.put(parcelContents);
+ }
+
+ @Override
+ public T unmarshal(ByteBuffer buffer) {
+ if (DEBUG) {
+ Log.v(TAG, "unmarshal, buffer remaining " + buffer.remaining());
+ }
+
+ /*
+ * Quadratically slow when marshaling an array of parcelables.
+ *
+ * Read out the entire byte buffer as an array, then copy it into the parcel.
+ *
+ * Once we unparcel the entire object, advance the byte buffer by only how many
+ * bytes the parcel actually used up.
+ *
+ * Future: If we ever do need to use parcelable arrays, we can do this a little smarter
+ * by reading out a chunk like 4,8,16,24 each time, but not sure how to detect
+ * parcels being too short in this case.
+ *
+ * Future: Alternatively use Parcel#obtain(long) directly into the native
+ * pointer of a ByteBuffer, which would not copy if the ByteBuffer was direct.
+ */
+ buffer.mark();
+
+ Parcel parcel = Parcel.obtain();
+ try {
+ int maxLength = buffer.remaining();
+
+ byte[] remaining = new byte[maxLength];
+ buffer.get(remaining);
+
+ parcel.unmarshall(remaining, /*offset*/0, maxLength);
+ parcel.setDataPosition(/*pos*/0);
+
+ T value = mCreator.createFromParcel(parcel);
+ int actualLength = parcel.dataPosition();
+
+ if (actualLength == 0) {
+ throw new AssertionError("No data marshaled for " + value);
+ }
+
+ // set the position past the bytes the parcelable actually used
+ buffer.reset();
+ buffer.position(buffer.position() + actualLength);
+
+ if (DEBUG) {
+ Log.v(TAG, "unmarshal, parcel length was " + actualLength);
+ Log.v(TAG, "unmarshal, value is " + value);
+ }
+
+ return mClass.cast(value);
+ } finally {
+ parcel.recycle();
+ }
+ }
+
+ @Override
+ public int getNativeSize() {
+ return NATIVE_SIZE_DYNAMIC;
+ }
+
+ @Override
+ public int calculateMarshalSize(T value) {
+ Parcel parcel = Parcel.obtain();
+ try {
+ value.writeToParcel(parcel, /*flags*/0);
+ int length = parcel.marshall().length;
+
+ if (DEBUG) {
+ Log.v(TAG, "calculateMarshalSize, length when parceling "
+ + value + " is " + length);
+ }
+
+ return length;
+ } finally {
+ parcel.recycle();
+ }
+ }
+ }
+
+ @Override
+ public Marshaler<T> createMarshaler(TypeReference<T> managedType, int nativeType) {
+ return new MarshalerParcelable(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<T> managedType, int nativeType) {
+ return Parcelable.class.isAssignableFrom(managedType.getRawType());
+ }
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryablePrimitive.java b/android/hardware/camera2/marshal/impl/MarshalQueryablePrimitive.java
new file mode 100644
index 00000000..090dd48a
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryablePrimitive.java
@@ -0,0 +1,183 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.impl.CameraMetadataNative;
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.Rational;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+import java.nio.ByteBuffer;
+
+/**
+ * Marshal/unmarshal built-in primitive types to and from a {@link ByteBuffer}.
+ *
+ * <p>The following list of type marshaling is supported:
+ * <ul>
+ * <li>byte <-> TYPE_BYTE
+ * <li>int <-> TYPE_INT32
+ * <li>long <-> TYPE_INT64
+ * <li>float <-> TYPE_FLOAT
+ * <li>double <-> TYPE_DOUBLE
+ * <li>Rational <-> TYPE_RATIONAL
+ * </ul>
+ * </p>
+ *
+ * <p>Due to the nature of generics, values are always boxed; this also means that both
+ * the boxed and unboxed types are supported (i.e. both {@code int} and {@code Integer}).</p>
+ *
+ * <p>Each managed type <!--(other than boolean)--> must correspond 1:1 to the native type
+ * (e.g. a byte will not map to a {@link CameraMetadataNative#TYPE_INT32 TYPE_INT32} or vice versa)
+ * for marshaling.</p>
+ */
+public final class MarshalQueryablePrimitive<T> implements MarshalQueryable<T> {
+
+ private class MarshalerPrimitive extends Marshaler<T> {
+ /** Always the wrapped class variant of the primitive class for {@code T} */
+ private final Class<T> mClass;
+
+ @SuppressWarnings("unchecked")
+ protected MarshalerPrimitive(TypeReference<T> typeReference, int nativeType) {
+ super(MarshalQueryablePrimitive.this, typeReference, nativeType);
+
+ // Turn primitives into wrappers, otherwise int.class.cast(Integer) will fail
+ mClass = wrapClassIfPrimitive((Class<T>)typeReference.getRawType());
+ }
+
+ @Override
+ public T unmarshal(ByteBuffer buffer) {
+ return mClass.cast(unmarshalObject(buffer));
+ }
+
+ @Override
+ public int calculateMarshalSize(T value) {
+ return getPrimitiveTypeSize(mNativeType);
+ }
+
+ @Override
+ public void marshal(T value, ByteBuffer buffer) {
+ if (value instanceof Integer) {
+ checkNativeTypeEquals(TYPE_INT32, mNativeType);
+ final int val = (Integer) value;
+ marshalPrimitive(val, buffer);
+ } else if (value instanceof Float) {
+ checkNativeTypeEquals(TYPE_FLOAT, mNativeType);
+ final float val = (Float) value;
+ marshalPrimitive(val, buffer);
+ } else if (value instanceof Long) {
+ checkNativeTypeEquals(TYPE_INT64, mNativeType);
+ final long val = (Long) value;
+ marshalPrimitive(val, buffer);
+ } else if (value instanceof Rational) {
+ checkNativeTypeEquals(TYPE_RATIONAL, mNativeType);
+ marshalPrimitive((Rational) value, buffer);
+ } else if (value instanceof Double) {
+ checkNativeTypeEquals(TYPE_DOUBLE, mNativeType);
+ final double val = (Double) value;
+ marshalPrimitive(val, buffer);
+ } else if (value instanceof Byte) {
+ checkNativeTypeEquals(TYPE_BYTE, mNativeType);
+ final byte val = (Byte) value;
+ marshalPrimitive(val, buffer);
+ } else {
+ throw new UnsupportedOperationException(
+ "Can't marshal managed type " + mTypeReference);
+ }
+ }
+
+ private void marshalPrimitive(int value, ByteBuffer buffer) {
+ buffer.putInt(value);
+ }
+
+ private void marshalPrimitive(float value, ByteBuffer buffer) {
+ buffer.putFloat(value);
+ }
+
+ private void marshalPrimitive(double value, ByteBuffer buffer) {
+ buffer.putDouble(value);
+ }
+
+ private void marshalPrimitive(long value, ByteBuffer buffer) {
+ buffer.putLong(value);
+ }
+
+ private void marshalPrimitive(Rational value, ByteBuffer buffer) {
+ buffer.putInt(value.getNumerator());
+ buffer.putInt(value.getDenominator());
+ }
+
+ private void marshalPrimitive(byte value, ByteBuffer buffer) {
+ buffer.put(value);
+ }
+
+ private Object unmarshalObject(ByteBuffer buffer) {
+ switch (mNativeType) {
+ case TYPE_INT32:
+ return buffer.getInt();
+ case TYPE_FLOAT:
+ return buffer.getFloat();
+ case TYPE_INT64:
+ return buffer.getLong();
+ case TYPE_RATIONAL:
+ int numerator = buffer.getInt();
+ int denominator = buffer.getInt();
+ return new Rational(numerator, denominator);
+ case TYPE_DOUBLE:
+ return buffer.getDouble();
+ case TYPE_BYTE:
+ return buffer.get(); // getByte
+ default:
+ throw new UnsupportedOperationException(
+ "Can't unmarshal native type " + mNativeType);
+ }
+ }
+
+ @Override
+ public int getNativeSize() {
+ return getPrimitiveTypeSize(mNativeType);
+ }
+ }
+
+ @Override
+ public Marshaler<T> createMarshaler(TypeReference<T> managedType, int nativeType) {
+ return new MarshalerPrimitive(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<T> managedType, int nativeType) {
+ if (managedType.getType() instanceof Class<?>) {
+ Class<?> klass = (Class<?>)managedType.getType();
+
+ if (klass == byte.class || klass == Byte.class) {
+ return nativeType == TYPE_BYTE;
+ } else if (klass == int.class || klass == Integer.class) {
+ return nativeType == TYPE_INT32;
+ } else if (klass == float.class || klass == Float.class) {
+ return nativeType == TYPE_FLOAT;
+ } else if (klass == long.class || klass == Long.class) {
+ return nativeType == TYPE_INT64;
+ } else if (klass == double.class || klass == Double.class) {
+ return nativeType == TYPE_DOUBLE;
+ } else if (klass == Rational.class) {
+ return nativeType == TYPE_RATIONAL;
+ }
+ }
+ return false;
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableRange.java b/android/hardware/camera2/marshal/impl/MarshalQueryableRange.java
new file mode 100644
index 00000000..64763e7b
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableRange.java
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.marshal.MarshalRegistry;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.Range;
+
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.nio.ByteBuffer;
+
+/**
+ * Marshal {@link Range} to/from any native type
+ */
+public class MarshalQueryableRange<T extends Comparable<? super T>>
+ implements MarshalQueryable<Range<T>> {
+ private static final int RANGE_COUNT = 2;
+
+ private class MarshalerRange extends Marshaler<Range<T>> {
+ private final Class<? super Range<T>> mClass;
+ private final Constructor<Range<T>> mConstructor;
+ /** Marshal the {@code T} inside of {@code Range<T>} */
+ private final Marshaler<T> mNestedTypeMarshaler;
+
+ @SuppressWarnings("unchecked")
+ protected MarshalerRange(TypeReference<Range<T>> typeReference,
+ int nativeType) {
+ super(MarshalQueryableRange.this, typeReference, nativeType);
+
+ mClass = typeReference.getRawType();
+
+ /*
+ * Lookup the actual type argument, e.g. Range<Integer> --> Integer
+ * and then get the marshaler for that managed type.
+ */
+ ParameterizedType paramType;
+ try {
+ paramType = (ParameterizedType) typeReference.getType();
+ } catch (ClassCastException e) {
+ throw new AssertionError("Raw use of Range is not supported", e);
+ }
+ Type actualTypeArgument = paramType.getActualTypeArguments()[0];
+
+ TypeReference<?> actualTypeArgToken =
+ TypeReference.createSpecializedTypeReference(actualTypeArgument);
+
+ mNestedTypeMarshaler = (Marshaler<T>)MarshalRegistry.getMarshaler(
+ actualTypeArgToken, mNativeType);
+ try {
+ mConstructor = (Constructor<Range<T>>)mClass.getConstructor(
+ Comparable.class, Comparable.class);
+ } catch (NoSuchMethodException e) {
+ throw new AssertionError(e);
+ }
+ }
+
+ @Override
+ public void marshal(Range<T> value, ByteBuffer buffer) {
+ mNestedTypeMarshaler.marshal(value.getLower(), buffer);
+ mNestedTypeMarshaler.marshal(value.getUpper(), buffer);
+ }
+
+ @Override
+ public Range<T> unmarshal(ByteBuffer buffer) {
+ T lower = mNestedTypeMarshaler.unmarshal(buffer);
+ T upper = mNestedTypeMarshaler.unmarshal(buffer);
+
+ try {
+ return mConstructor.newInstance(lower, upper);
+ } catch (InstantiationException e) {
+ throw new AssertionError(e);
+ } catch (IllegalAccessException e) {
+ throw new AssertionError(e);
+ } catch (IllegalArgumentException e) {
+ throw new AssertionError(e);
+ } catch (InvocationTargetException e) {
+ throw new AssertionError(e);
+ }
+ }
+
+ @Override
+ public int getNativeSize() {
+ int nestedSize = mNestedTypeMarshaler.getNativeSize();
+
+ if (nestedSize != NATIVE_SIZE_DYNAMIC) {
+ return nestedSize * RANGE_COUNT;
+ } else {
+ return NATIVE_SIZE_DYNAMIC;
+ }
+ }
+
+ @Override
+ public int calculateMarshalSize(Range<T> value) {
+ int nativeSize = getNativeSize();
+
+ if (nativeSize != NATIVE_SIZE_DYNAMIC) {
+ return nativeSize;
+ } else {
+ int lowerSize = mNestedTypeMarshaler.calculateMarshalSize(value.getLower());
+ int upperSize = mNestedTypeMarshaler.calculateMarshalSize(value.getUpper());
+
+ return lowerSize + upperSize;
+ }
+ }
+ }
+
+ @Override
+ public Marshaler<Range<T>> createMarshaler(TypeReference<Range<T>> managedType,
+ int nativeType) {
+ return new MarshalerRange(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<Range<T>> managedType, int nativeType) {
+ return (Range.class.equals(managedType.getRawType()));
+ }
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableRect.java b/android/hardware/camera2/marshal/impl/MarshalQueryableRect.java
new file mode 100644
index 00000000..de20a1f8
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableRect.java
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.graphics.Rect;
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+
+import java.nio.ByteBuffer;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+/**
+ * Marshal {@link Rect} to/from {@link #TYPE_INT32}
+ */
+public class MarshalQueryableRect implements MarshalQueryable<Rect> {
+ private static final int SIZE = SIZEOF_INT32 * 4;
+
+ private class MarshalerRect extends Marshaler<Rect> {
+ protected MarshalerRect(TypeReference<Rect> typeReference,
+ int nativeType) {
+ super(MarshalQueryableRect.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(Rect value, ByteBuffer buffer) {
+ buffer.putInt(value.left);
+ buffer.putInt(value.top);
+ buffer.putInt(value.width());
+ buffer.putInt(value.height());
+ }
+
+ @Override
+ public Rect unmarshal(ByteBuffer buffer) {
+ int left = buffer.getInt();
+ int top = buffer.getInt();
+ int width = buffer.getInt();
+ int height = buffer.getInt();
+
+ int right = left + width;
+ int bottom = top + height;
+
+ return new Rect(left, top, right, bottom);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+ }
+
+ @Override
+ public Marshaler<Rect> createMarshaler(TypeReference<Rect> managedType, int nativeType) {
+ return new MarshalerRect(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<Rect> managedType, int nativeType) {
+ return nativeType == TYPE_INT32 && (Rect.class.equals(managedType.getType()));
+ }
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java b/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java
new file mode 100644
index 00000000..98a7ad77
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableReprocessFormatsMap.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.ReprocessFormatsMap;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.hardware.camera2.utils.TypeReference;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+import java.nio.ByteBuffer;
+import java.nio.IntBuffer;
+
+/**
+ * Marshaler for {@code android.scaler.availableInputOutputFormatsMap} custom class
+ * {@link ReprocessFormatsMap}
+ */
+public class MarshalQueryableReprocessFormatsMap
+ implements MarshalQueryable<ReprocessFormatsMap> {
+
+ private class MarshalerReprocessFormatsMap extends Marshaler<ReprocessFormatsMap> {
+ protected MarshalerReprocessFormatsMap(
+ TypeReference<ReprocessFormatsMap> typeReference, int nativeType) {
+ super(MarshalQueryableReprocessFormatsMap.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(ReprocessFormatsMap value, ByteBuffer buffer) {
+ /*
+ * // writing (static example, DNG+ZSL)
+ * int32_t[] contents = {
+ * RAW_OPAQUE, 3, RAW16, YUV_420_888, BLOB,
+ * RAW16, 2, YUV_420_888, BLOB,
+ * ...,
+ * INPUT_FORMAT, OUTPUT_FORMAT_COUNT, [OUTPUT_0, OUTPUT_1, ..., OUTPUT_FORMAT_COUNT-1]
+ * };
+ */
+ int[] inputs = StreamConfigurationMap.imageFormatToInternal(value.getInputs());
+ for (int input : inputs) {
+ // INPUT_FORMAT
+ buffer.putInt(input);
+
+ int[] outputs =
+ StreamConfigurationMap.imageFormatToInternal(value.getOutputs(input));
+ // OUTPUT_FORMAT_COUNT
+ buffer.putInt(outputs.length);
+
+ // [OUTPUT_0, OUTPUT_1, ..., OUTPUT_FORMAT_COUNT-1]
+ for (int output : outputs) {
+ buffer.putInt(output);
+ }
+ }
+ }
+
+ @Override
+ public ReprocessFormatsMap unmarshal(ByteBuffer buffer) {
+ int len = buffer.remaining() / SIZEOF_INT32;
+ if (buffer.remaining() % SIZEOF_INT32 != 0) {
+ throw new AssertionError("ReprocessFormatsMap was not TYPE_INT32");
+ }
+
+ int[] entries = new int[len];
+
+ IntBuffer intBuffer = buffer.asIntBuffer();
+ intBuffer.get(entries);
+
+ // TODO: consider moving rest of parsing code from ReprocessFormatsMap to here
+
+ return new ReprocessFormatsMap(entries);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return NATIVE_SIZE_DYNAMIC;
+ }
+
+ @Override
+ public int calculateMarshalSize(ReprocessFormatsMap value) {
+ /*
+ * // writing (static example, DNG+ZSL)
+ * int32_t[] contents = {
+ * RAW_OPAQUE, 3, RAW16, YUV_420_888, BLOB,
+ * RAW16, 2, YUV_420_888, BLOB,
+ * ...,
+ * INPUT_FORMAT, OUTPUT_FORMAT_COUNT, [OUTPUT_0, OUTPUT_1, ..., OUTPUT_FORMAT_COUNT-1]
+ * };
+ */
+ int length = 0;
+
+ int[] inputs = value.getInputs();
+ for (int input : inputs) {
+
+ length += 1; // INPUT_FORMAT
+ length += 1; // OUTPUT_FORMAT_COUNT
+
+ int[] outputs = value.getOutputs(input);
+ length += outputs.length; // [OUTPUT_0, OUTPUT_1, ..., OUTPUT_FORMAT_COUNT-1]
+ }
+
+ return length * SIZEOF_INT32;
+ }
+ }
+
+ @Override
+ public Marshaler<ReprocessFormatsMap> createMarshaler(
+ TypeReference<ReprocessFormatsMap> managedType, int nativeType) {
+ return new MarshalerReprocessFormatsMap(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<ReprocessFormatsMap> managedType,
+ int nativeType) {
+ return nativeType == TYPE_INT32 && managedType.getType().equals(ReprocessFormatsMap.class);
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableRggbChannelVector.java b/android/hardware/camera2/marshal/impl/MarshalQueryableRggbChannelVector.java
new file mode 100644
index 00000000..4253a0a5
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableRggbChannelVector.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.RggbChannelVector;
+import android.hardware.camera2.utils.TypeReference;
+
+import java.nio.ByteBuffer;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+/**
+ * Marshal {@link RggbChannelVector} to/from {@link #TYPE_FLOAT} {@code x 4}
+ */
+public class MarshalQueryableRggbChannelVector implements MarshalQueryable<RggbChannelVector> {
+ private static final int SIZE = SIZEOF_FLOAT * RggbChannelVector.COUNT;
+
+ private class MarshalerRggbChannelVector extends Marshaler<RggbChannelVector> {
+ protected MarshalerRggbChannelVector(TypeReference<RggbChannelVector> typeReference,
+ int nativeType) {
+ super(MarshalQueryableRggbChannelVector.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(RggbChannelVector value, ByteBuffer buffer) {
+ for (int i = 0; i < RggbChannelVector.COUNT; ++i) {
+ buffer.putFloat(value.getComponent(i));
+ }
+ }
+
+ @Override
+ public RggbChannelVector unmarshal(ByteBuffer buffer) {
+ float red = buffer.getFloat();
+ float gEven = buffer.getFloat();
+ float gOdd = buffer.getFloat();
+ float blue = buffer.getFloat();
+
+ return new RggbChannelVector(red, gEven, gOdd, blue);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+ }
+
+ @Override
+ public Marshaler<RggbChannelVector> createMarshaler(
+ TypeReference<RggbChannelVector> managedType, int nativeType) {
+ return new MarshalerRggbChannelVector(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(
+ TypeReference<RggbChannelVector> managedType, int nativeType) {
+ return nativeType == TYPE_FLOAT && (RggbChannelVector.class.equals(managedType.getType()));
+ }
+
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableSize.java b/android/hardware/camera2/marshal/impl/MarshalQueryableSize.java
new file mode 100644
index 00000000..721644ed
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableSize.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.util.Size;
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Marshal {@link Size} to/from {@code TYPE_INT32}
+ */
+public class MarshalQueryableSize implements MarshalQueryable<Size> {
+ private static final int SIZE = SIZEOF_INT32 * 2;
+
+ private class MarshalerSize extends Marshaler<Size> {
+ protected MarshalerSize(TypeReference<Size> typeReference, int nativeType) {
+ super(MarshalQueryableSize.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(Size value, ByteBuffer buffer) {
+ buffer.putInt(value.getWidth());
+ buffer.putInt(value.getHeight());
+ }
+
+ @Override
+ public Size unmarshal(ByteBuffer buffer) {
+ int width = buffer.getInt();
+ int height = buffer.getInt();
+
+ return new Size(width, height);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+ }
+
+ @Override
+ public Marshaler<Size> createMarshaler(TypeReference<Size> managedType, int nativeType) {
+ return new MarshalerSize(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<Size> managedType, int nativeType) {
+ return nativeType == TYPE_INT32 && (Size.class.equals(managedType.getType()));
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableSizeF.java b/android/hardware/camera2/marshal/impl/MarshalQueryableSizeF.java
new file mode 100644
index 00000000..b60a46dc
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableSizeF.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.SizeF;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Marshal {@link SizeF} to/from {@code TYPE_FLOAT}
+ */
+public class MarshalQueryableSizeF implements MarshalQueryable<SizeF> {
+
+ private static final int SIZE = SIZEOF_FLOAT * 2;
+
+ private class MarshalerSizeF extends Marshaler<SizeF> {
+
+ protected MarshalerSizeF(TypeReference<SizeF> typeReference, int nativeType) {
+ super(MarshalQueryableSizeF.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(SizeF value, ByteBuffer buffer) {
+ buffer.putFloat(value.getWidth());
+ buffer.putFloat(value.getHeight());
+ }
+
+ @Override
+ public SizeF unmarshal(ByteBuffer buffer) {
+ float width = buffer.getFloat();
+ float height = buffer.getFloat();
+
+ return new SizeF(width, height);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+ }
+
+ @Override
+ public Marshaler<SizeF> createMarshaler(
+ TypeReference<SizeF> managedType, int nativeType) {
+ return new MarshalerSizeF(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<SizeF> managedType, int nativeType) {
+ return nativeType == TYPE_FLOAT && (SizeF.class.equals(managedType.getType()));
+ }
+}
+
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java b/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java
new file mode 100644
index 00000000..62ace312
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfiguration.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.StreamConfiguration;
+import android.hardware.camera2.utils.TypeReference;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Marshaler for {@code android.scaler.availableStreamConfigurations} custom class
+ * {@link StreamConfiguration}
+ *
+ * <p>Data is stored as {@code (format, width, height, input?)} tuples (int32).</p>
+ */
+public class MarshalQueryableStreamConfiguration
+ implements MarshalQueryable<StreamConfiguration> {
+ private static final int SIZE = SIZEOF_INT32 * 4;
+
+ private class MarshalerStreamConfiguration extends Marshaler<StreamConfiguration> {
+ protected MarshalerStreamConfiguration(TypeReference<StreamConfiguration> typeReference,
+ int nativeType) {
+ super(MarshalQueryableStreamConfiguration.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(StreamConfiguration value, ByteBuffer buffer) {
+ buffer.putInt(value.getFormat());
+ buffer.putInt(value.getWidth());
+ buffer.putInt(value.getHeight());
+ buffer.putInt(value.isInput() ? 1 : 0);
+ }
+
+ @Override
+ public StreamConfiguration unmarshal(ByteBuffer buffer) {
+ int format = buffer.getInt();
+ int width = buffer.getInt();
+ int height = buffer.getInt();
+ boolean input = buffer.getInt() != 0;
+
+ return new StreamConfiguration(format, width, height, input);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+
+ }
+
+ @Override
+ public Marshaler<StreamConfiguration> createMarshaler(
+ TypeReference<StreamConfiguration> managedType, int nativeType) {
+ return new MarshalerStreamConfiguration(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<StreamConfiguration> managedType,
+ int nativeType) {
+ return nativeType == TYPE_INT32 && managedType.getType().equals(StreamConfiguration.class);
+ }
+}
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java b/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java
new file mode 100644
index 00000000..fd3dfacb
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableStreamConfigurationDuration.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.params.StreamConfigurationDuration;
+import android.hardware.camera2.utils.TypeReference;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+import static android.hardware.camera2.marshal.MarshalHelpers.*;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Marshaler for custom class {@link StreamConfigurationDuration} for min-frame and stall durations.
+ *
+ * <p>
+ * Data is stored as {@code (format, width, height, durationNs)} tuples (int64).
+ * </p>
+ */
+public class MarshalQueryableStreamConfigurationDuration
+ implements MarshalQueryable<StreamConfigurationDuration> {
+
+ private static final int SIZE = SIZEOF_INT64 * 4;
+ /**
+ * Values and-ed with this will do an unsigned int to signed long conversion;
+ * in other words the sign bit from the int will not be extended.
+ * */
+ private static final long MASK_UNSIGNED_INT = 0x00000000ffffffffL;
+
+ private class MarshalerStreamConfigurationDuration
+ extends Marshaler<StreamConfigurationDuration> {
+
+ protected MarshalerStreamConfigurationDuration(
+ TypeReference<StreamConfigurationDuration> typeReference, int nativeType) {
+ super(MarshalQueryableStreamConfigurationDuration.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(StreamConfigurationDuration value, ByteBuffer buffer) {
+ buffer.putLong(value.getFormat() & MASK_UNSIGNED_INT); // unsigned int -> long
+ buffer.putLong(value.getWidth());
+ buffer.putLong(value.getHeight());
+ buffer.putLong(value.getDuration());
+ }
+
+ @Override
+ public StreamConfigurationDuration unmarshal(ByteBuffer buffer) {
+ int format = (int)buffer.getLong();
+ int width = (int)buffer.getLong();
+ int height = (int)buffer.getLong();
+ long durationNs = buffer.getLong();
+
+ return new StreamConfigurationDuration(format, width, height, durationNs);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return SIZE;
+ }
+ }
+
+ @Override
+ public Marshaler<StreamConfigurationDuration> createMarshaler(
+ TypeReference<StreamConfigurationDuration> managedType, int nativeType) {
+ return new MarshalerStreamConfigurationDuration(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<StreamConfigurationDuration> managedType,
+ int nativeType) {
+ return nativeType == TYPE_INT64 &&
+ (StreamConfigurationDuration.class.equals(managedType.getType()));
+ }
+
+} \ No newline at end of file
diff --git a/android/hardware/camera2/marshal/impl/MarshalQueryableString.java b/android/hardware/camera2/marshal/impl/MarshalQueryableString.java
new file mode 100644
index 00000000..c81e18d8
--- /dev/null
+++ b/android/hardware/camera2/marshal/impl/MarshalQueryableString.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.marshal.impl;
+
+import android.hardware.camera2.marshal.Marshaler;
+import android.hardware.camera2.marshal.MarshalQueryable;
+import android.hardware.camera2.utils.TypeReference;
+import android.util.Log;
+
+import java.nio.ByteBuffer;
+import java.nio.charset.Charset;
+
+import static android.hardware.camera2.impl.CameraMetadataNative.*;
+
+/**
+ * Marshal {@link String} to/from {@link #TYPE_BYTE}.
+ */
+public class MarshalQueryableString implements MarshalQueryable<String> {
+
+ private static final String TAG = MarshalQueryableString.class.getSimpleName();
+ private static final boolean DEBUG = false;
+
+ private static class PreloadHolder {
+ public static final Charset UTF8_CHARSET = Charset.forName("UTF-8");
+ }
+ private static final byte NUL = (byte)'\0'; // used as string terminator
+
+ private class MarshalerString extends Marshaler<String> {
+
+ protected MarshalerString(TypeReference<String> typeReference, int nativeType) {
+ super(MarshalQueryableString.this, typeReference, nativeType);
+ }
+
+ @Override
+ public void marshal(String value, ByteBuffer buffer) {
+ byte[] arr = value.getBytes(PreloadHolder.UTF8_CHARSET);
+
+ buffer.put(arr);
+ buffer.put(NUL); // metadata strings are NUL-terminated
+ }
+
+ @Override
+ public int calculateMarshalSize(String value) {
+ byte[] arr = value.getBytes(PreloadHolder.UTF8_CHARSET);
+
+ return arr.length + 1; // metadata strings are NUL-terminated
+ }
+
+ @Override
+ public String unmarshal(ByteBuffer buffer) {
+ buffer.mark(); // save the current position
+
+ boolean foundNull = false;
+ int stringLength = 0;
+ while (buffer.hasRemaining()) {
+ if (buffer.get() == NUL) {
+ foundNull = true;
+ break;
+ }
+
+ stringLength++;
+ }
+
+ if (DEBUG) {
+ Log.v(TAG,
+ "unmarshal - scanned " + stringLength + " characters; found null? "
+ + foundNull);
+ }
+
+ if (!foundNull) {
+ throw new UnsupportedOperationException("Strings must be null-terminated");
+ }
+
+ buffer.reset(); // go back to the previously marked position
+
+ byte[] strBytes = new byte[stringLength + 1];
+ buffer.get(strBytes, /*dstOffset*/0, stringLength + 1); // including null character
+
+ // not including null character
+ return new String(strBytes, /*offset*/0, stringLength, PreloadHolder.UTF8_CHARSET);
+ }
+
+ @Override
+ public int getNativeSize() {
+ return NATIVE_SIZE_DYNAMIC;
+ }
+ }
+
+ @Override
+ public Marshaler<String> createMarshaler(
+ TypeReference<String> managedType, int nativeType) {
+ return new MarshalerString(managedType, nativeType);
+ }
+
+ @Override
+ public boolean isTypeMappingSupported(TypeReference<String> managedType, int nativeType) {
+ return nativeType == TYPE_BYTE && String.class.equals(managedType.getType());
+ }
+}
diff --git a/android/hardware/camera2/params/BlackLevelPattern.java b/android/hardware/camera2/params/BlackLevelPattern.java
new file mode 100644
index 00000000..6d6c094e
--- /dev/null
+++ b/android/hardware/camera2/params/BlackLevelPattern.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import java.util.Arrays;
+
+import static com.android.internal.util.Preconditions.checkNotNull;
+
+/**
+ * Immutable class to store a 4-element vector of integers corresponding to a 2x2 pattern
+ * of color channel offsets used for the black level offsets of each color channel.
+ */
+public final class BlackLevelPattern {
+
+ /**
+ * The number of offsets in this vector.
+ */
+ public static final int COUNT = 4;
+
+ /**
+ * Create a new {@link BlackLevelPattern} from a given offset array.
+ *
+ * <p>The given offset array must contain offsets for each color channel in
+ * a 2x2 pattern corresponding to the color filter arrangement. Offsets are
+ * given in row-column scan order.</p>
+ *
+ * @param offsets an array containing a 2x2 pattern of offsets.
+ *
+ * @throws IllegalArgumentException if the given array has an incorrect length.
+ * @throws NullPointerException if the given array is null.
+ * @hide
+ */
+ public BlackLevelPattern(int[] offsets) {
+ if (offsets == null) {
+ throw new NullPointerException("Null offsets array passed to constructor");
+ }
+ if (offsets.length < COUNT) {
+ throw new IllegalArgumentException("Invalid offsets array length");
+ }
+ mCfaOffsets = Arrays.copyOf(offsets, COUNT);
+ }
+
+ /**
+ * Return the color channel offset for a given index into the array of raw pixel values.
+ *
+ * @param column the column index in the the raw pixel array.
+ * @param row the row index in the raw pixel array.
+ * @return a color channel offset.
+ *
+ * @throws IllegalArgumentException if a column or row given is negative.
+ */
+ public int getOffsetForIndex(int column, int row) {
+ if (row < 0 || column < 0) {
+ throw new IllegalArgumentException("column, row arguments must be positive");
+ }
+ return mCfaOffsets[((row & 1) << 1) | (column & 1)];
+ }
+
+ /**
+ * Copy the ColorChannel offsets into the destination vector.
+ *
+ * <p>Offsets are given in row-column scan order for a given 2x2 color pattern.</p>
+ *
+ * @param destination an array big enough to hold at least {@value #COUNT} elements after the
+ * {@code offset}
+ * @param offset a non-negative offset into the array
+ *
+ * @throws IllegalArgumentException if the offset is invalid.
+ * @throws ArrayIndexOutOfBoundsException if the destination vector is too small.
+ * @throws NullPointerException if the destination is null.
+ */
+ public void copyTo(int[] destination, int offset) {
+ checkNotNull(destination, "destination must not be null");
+ if (offset < 0) {
+ throw new IllegalArgumentException("Null offset passed to copyTo");
+ }
+ if (destination.length - offset < COUNT) {
+ throw new ArrayIndexOutOfBoundsException("destination too small to fit elements");
+ }
+ for (int i = 0; i < COUNT; ++i) {
+ destination[offset + i] = mCfaOffsets[i];
+ }
+ }
+
+ /**
+ * Check if this {@link BlackLevelPattern} is equal to another {@link BlackLevelPattern}.
+ *
+ * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ } else if (this == obj) {
+ return true;
+ } else if (obj instanceof BlackLevelPattern) {
+ final BlackLevelPattern other = (BlackLevelPattern) obj;
+ return Arrays.equals(other.mCfaOffsets, mCfaOffsets);
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return Arrays.hashCode(mCfaOffsets);
+ }
+
+ /**
+ * Return this {@link BlackLevelPattern} as a string representation.
+ *
+ * <p> {@code "BlackLevelPattern([%d, %d], [%d, %d])"}, where each {@code %d} represents one
+ * black level offset of a color channel. The values are in the same order as channels listed
+ * for the CFA layout key (see
+ * {@link android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT}).
+ * </p>
+ *
+ * @return string representation of {@link BlackLevelPattern}
+ *
+ * @see android.hardware.camera2.CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT
+ */
+ @Override
+ public String toString() {
+ return String.format("BlackLevelPattern([%d, %d], [%d, %d])", mCfaOffsets[0],
+ mCfaOffsets[1], mCfaOffsets[2], mCfaOffsets[3]);
+ }
+
+ private final int[] mCfaOffsets;
+}
diff --git a/android/hardware/camera2/params/ColorSpaceTransform.java b/android/hardware/camera2/params/ColorSpaceTransform.java
new file mode 100644
index 00000000..1e1c4b17
--- /dev/null
+++ b/android/hardware/camera2/params/ColorSpaceTransform.java
@@ -0,0 +1,299 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import static com.android.internal.util.Preconditions.*;
+
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.util.Rational;
+
+import java.util.Arrays;
+
+/**
+ * Immutable class for describing a 3x3 matrix of {@link Rational} values in row-major order.
+ *
+ * <p>This matrix maps a transform from one color space to another. For the particular color space
+ * source and target, see the appropriate camera metadata documentation for the key that provides
+ * this value.</p>
+ *
+ * @see CameraMetadata
+ */
+public final class ColorSpaceTransform {
+
+ /** The number of rows in this matrix. */
+ private static final int ROWS = 3;
+
+ /** The number of columns in this matrix. */
+ private static final int COLUMNS = 3;
+
+ /** The number of total Rational elements in this matrix. */
+ private static final int COUNT = ROWS * COLUMNS;
+
+ /** Number of int elements in a rational. */
+ private static final int RATIONAL_SIZE = 2;
+
+ /** Numerator offset inside a rational (pair). */
+ private static final int OFFSET_NUMERATOR = 0;
+
+ /** Denominator offset inside a rational (pair). */
+ private static final int OFFSET_DENOMINATOR = 1;
+
+ /** Number of int elements in this matrix. */
+ private static final int COUNT_INT = ROWS * COLUMNS * RATIONAL_SIZE;
+
+ /**
+ * Create a new immutable {@link ColorSpaceTransform} instance from a {@link Rational} array.
+ *
+ * <p>The elements must be stored in a row-major order.</p>
+ *
+ * @param elements An array of {@code 9} elements
+ *
+ * @throws IllegalArgumentException
+ * if the count of {@code elements} is not {@code 9}
+ * @throws NullPointerException
+ * if {@code elements} or any sub-element is {@code null}
+ */
+ public ColorSpaceTransform(Rational[] elements) {
+
+ checkNotNull(elements, "elements must not be null");
+ if (elements.length != COUNT) {
+ throw new IllegalArgumentException("elements must be " + COUNT + " length");
+ }
+
+ mElements = new int[COUNT_INT];
+
+ for (int i = 0; i < elements.length; ++i) {
+ checkNotNull(elements, "element[" + i + "] must not be null");
+ mElements[i * RATIONAL_SIZE + OFFSET_NUMERATOR] = elements[i].getNumerator();
+ mElements[i * RATIONAL_SIZE + OFFSET_DENOMINATOR] = elements[i].getDenominator();
+ }
+ }
+
+ /**
+ * Create a new immutable {@link ColorSpaceTransform} instance from an {@code int} array.
+ *
+ * <p>The elements must be stored in a row-major order. Each rational is stored
+ * contiguously as a {@code (numerator, denominator)} pair.</p>
+ *
+ * <p>In particular:<pre>{@code
+ * int[] elements = new int[
+ * N11, D11, N12, D12, N13, D13,
+ * N21, D21, N22, D22, N23, D23,
+ * N31, D31, N32, D32, N33, D33
+ * ];
+ *
+ * new ColorSpaceTransform(elements)}</pre>
+ *
+ * where {@code Nij} and {@code Dij} is the numerator and denominator for row {@code i} and
+ * column {@code j}.</p>
+ *
+ * @param elements An array of {@code 18} elements
+ *
+ * @throws IllegalArgumentException
+ * if the count of {@code elements} is not {@code 18}
+ * @throws NullPointerException
+ * if {@code elements} is {@code null}
+ */
+ public ColorSpaceTransform(int[] elements) {
+ checkNotNull(elements, "elements must not be null");
+ if (elements.length != COUNT_INT) {
+ throw new IllegalArgumentException("elements must be " + COUNT_INT + " length");
+ }
+
+ for (int i = 0; i < elements.length; ++i) {
+ checkNotNull(elements, "element " + i + " must not be null");
+ }
+
+ mElements = Arrays.copyOf(elements, elements.length);
+ }
+
+ /**
+ * Get an element of this matrix by its row and column.
+ *
+ * <p>The rows must be within the range [0, 3),
+ * and the column must be within the range [0, 3).</p>
+ *
+ * @return element (non-{@code null})
+ *
+ * @throws IllegalArgumentException if column or row was out of range
+ */
+ public Rational getElement(int column, int row) {
+ if (column < 0 || column >= COLUMNS) {
+ throw new IllegalArgumentException("column out of range");
+ } else if (row < 0 || row >= ROWS) {
+ throw new IllegalArgumentException("row out of range");
+ }
+
+ int numerator = mElements[(row * COLUMNS + column) * RATIONAL_SIZE + OFFSET_NUMERATOR];
+ int denominator = mElements[(row * COLUMNS + column) * RATIONAL_SIZE + OFFSET_DENOMINATOR];
+
+ return new Rational(numerator, denominator);
+ }
+
+ /**
+ * Copy the {@link Rational} elements in row-major order from this matrix into the destination.
+ *
+ * @param destination
+ * an array big enough to hold at least {@code 9} elements after the
+ * {@code offset}
+ * @param offset
+ * a non-negative offset into the array
+ * @throws NullPointerException
+ * If {@code destination} was {@code null}
+ * @throws ArrayIndexOutOfBoundsException
+ * If there's not enough room to write the elements at the specified destination and
+ * offset.
+ */
+ public void copyElements(Rational[] destination, int offset) {
+ checkArgumentNonnegative(offset, "offset must not be negative");
+ checkNotNull(destination, "destination must not be null");
+ if (destination.length - offset < COUNT) {
+ throw new ArrayIndexOutOfBoundsException("destination too small to fit elements");
+ }
+
+ for (int i = 0, j = 0; i < COUNT; ++i, j += RATIONAL_SIZE) {
+ int numerator = mElements[j + OFFSET_NUMERATOR];
+ int denominator = mElements[j + OFFSET_DENOMINATOR];
+
+ destination[i + offset] = new Rational(numerator, denominator);
+ }
+ }
+
+ /**
+ * Copy the {@link Rational} elements in row-major order from this matrix into the destination.
+ *
+ * <p>Each element is stored as a contiguous rational packed as a
+ * {@code (numerator, denominator)} pair of ints, identical to the
+ * {@link ColorSpaceTransform#ColorSpaceTransform(int[]) constructor}.</p>
+ *
+ * @param destination
+ * an array big enough to hold at least {@code 18} elements after the
+ * {@code offset}
+ * @param offset
+ * a non-negative offset into the array
+ * @throws NullPointerException
+ * If {@code destination} was {@code null}
+ * @throws ArrayIndexOutOfBoundsException
+ * If there's not enough room to write the elements at the specified destination and
+ * offset.
+ *
+ * @see ColorSpaceTransform#ColorSpaceTransform(int[])
+ */
+ public void copyElements(int[] destination, int offset) {
+ checkArgumentNonnegative(offset, "offset must not be negative");
+ checkNotNull(destination, "destination must not be null");
+ if (destination.length - offset < COUNT_INT) {
+ throw new ArrayIndexOutOfBoundsException("destination too small to fit elements");
+ }
+
+ // Manual copy faster than System#arraycopy for very small loops
+ for (int i = 0; i < COUNT_INT; ++i) {
+ destination[i + offset] = mElements[i];
+ }
+ }
+
+ /**
+ * Check if this {@link ColorSpaceTransform} is equal to another {@link ColorSpaceTransform}.
+ *
+ * <p>Two color space transforms are equal if and only if all of their elements are
+ * {@link Object#equals equal}.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof ColorSpaceTransform) {
+ final ColorSpaceTransform other = (ColorSpaceTransform) obj;
+ for (int i = 0, j = 0; i < COUNT; ++i, j += RATIONAL_SIZE) {
+ int numerator = mElements[j + OFFSET_NUMERATOR];
+ int denominator = mElements[j + OFFSET_DENOMINATOR];
+ int numeratorOther = other.mElements[j + OFFSET_NUMERATOR];
+ int denominatorOther = other.mElements[j + OFFSET_DENOMINATOR];
+ Rational r = new Rational(numerator, denominator);
+ Rational rOther = new Rational(numeratorOther, denominatorOther);
+ if (!r.equals(rOther)) {
+ return false;
+ }
+ }
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return HashCodeHelpers.hashCode(mElements);
+ }
+
+ /**
+ * Return the color space transform as a string representation.
+ *
+ * <p> Example:
+ * {@code "ColorSpaceTransform([1/1, 0/1, 0/1], [0/1, 1/1, 0/1], [0/1, 0/1, 1/1])"} is an
+ * identity transform. Elements are printed in row major order. </p>
+ *
+ * @return string representation of color space transform
+ */
+ @Override
+ public String toString() {
+ return String.format("ColorSpaceTransform%s", toShortString());
+ }
+
+ /**
+ * Return the color space transform as a compact string representation.
+ *
+ * <p> Example:
+ * {@code "([1/1, 0/1, 0/1], [0/1, 1/1, 0/1], [0/1, 0/1, 1/1])"} is an identity transform.
+ * Elements are printed in row major order. </p>
+ *
+ * @return compact string representation of color space transform
+ */
+ private String toShortString() {
+ StringBuilder sb = new StringBuilder("(");
+ for (int row = 0, i = 0; row < ROWS; row++) {
+ sb.append("[");
+ for (int col = 0; col < COLUMNS; col++, i += RATIONAL_SIZE) {
+ int numerator = mElements[i + OFFSET_NUMERATOR];
+ int denominator = mElements[i + OFFSET_DENOMINATOR];
+ sb.append(numerator);
+ sb.append("/");
+ sb.append(denominator);
+ if (col < COLUMNS - 1) {
+ sb.append(", ");
+ }
+ }
+ sb.append("]");
+ if (row < ROWS - 1) {
+ sb.append(", ");
+ }
+ }
+ sb.append(")");
+ return sb.toString();
+ }
+
+ private final int[] mElements;
+}
diff --git a/android/hardware/camera2/params/Face.java b/android/hardware/camera2/params/Face.java
new file mode 100644
index 00000000..2cd83a32
--- /dev/null
+++ b/android/hardware/camera2/params/Face.java
@@ -0,0 +1,265 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.hardware.camera2.params;
+
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureResult;
+
+/**
+ * Describes a face detected in an image.
+ */
+public final class Face {
+
+ /**
+ * The ID is {@code -1} when the optional set of fields is unsupported.
+ *
+ * @see Face#Face(Rect, int)
+ * @see #getId()
+ */
+ public static final int ID_UNSUPPORTED = -1;
+
+ /**
+ * The minimum possible value for the confidence level.
+ *
+ * @see #getScore()
+ */
+ public static final int SCORE_MIN = 1;
+
+ /**
+ * The maximum possible value for the confidence level.
+ *
+ * @see #getScore()
+ */
+ public static final int SCORE_MAX = 100;
+
+ private final Rect mBounds;
+ private final int mScore;
+ private final int mId;
+ private final Point mLeftEye;
+ private final Point mRightEye;
+ private final Point mMouth;
+
+ /**
+ * Create a new face with all fields set.
+ *
+ * <p>The id, leftEyePosition, rightEyePosition, and mouthPosition are considered optional.
+ * They are only required when the {@link CaptureResult} reports that the value of key
+ * {@link CaptureResult#STATISTICS_FACE_DETECT_MODE} is
+ * {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_FULL}.
+ * If the id is {@value #ID_UNSUPPORTED} then the leftEyePosition, rightEyePosition, and
+ * mouthPositions are guaranteed to be {@code null}. Otherwise, each of leftEyePosition,
+ * rightEyePosition, and mouthPosition may be independently null or not-null.</p>
+ *
+ * @param bounds Bounds of the face.
+ * @param score Confidence level between {@value #SCORE_MIN}-{@value #SCORE_MAX}.
+ * @param id A unique ID per face visible to the tracker.
+ * @param leftEyePosition The position of the left eye.
+ * @param rightEyePosition The position of the right eye.
+ * @param mouthPosition The position of the mouth.
+ *
+ * @throws IllegalArgumentException
+ * if bounds is {@code null},
+ * or if the confidence is not in the range of
+ * {@value #SCORE_MIN}-{@value #SCORE_MAX},
+ * or if id is {@value #ID_UNSUPPORTED} and
+ * leftEyePosition/rightEyePosition/mouthPosition aren't all null,
+ * or else if id is negative.
+ *
+ * @hide
+ */
+ public Face(Rect bounds, int score, int id,
+ Point leftEyePosition, Point rightEyePosition, Point mouthPosition) {
+ checkNotNull("bounds", bounds);
+ if (score < SCORE_MIN || score > SCORE_MAX) {
+ throw new IllegalArgumentException("Confidence out of range");
+ } else if (id < 0 && id != ID_UNSUPPORTED) {
+ throw new IllegalArgumentException("Id out of range");
+ }
+ if (id == ID_UNSUPPORTED) {
+ checkNull("leftEyePosition", leftEyePosition);
+ checkNull("rightEyePosition", rightEyePosition);
+ checkNull("mouthPosition", mouthPosition);
+ }
+
+ mBounds = bounds;
+ mScore = score;
+ mId = id;
+ mLeftEye = leftEyePosition;
+ mRightEye = rightEyePosition;
+ mMouth = mouthPosition;
+ }
+
+ /**
+ * Create a new face without the optional fields.
+ *
+ * <p>The id, leftEyePosition, rightEyePosition, and mouthPosition are considered optional.
+ * If the id is {@value #ID_UNSUPPORTED} then the leftEyePosition, rightEyePosition, and
+ * mouthPositions are guaranteed to be {@code null}. Otherwise, each of leftEyePosition,
+ * rightEyePosition, and mouthPosition may be independently null or not-null. When devices
+ * report the value of key {@link CaptureResult#STATISTICS_FACE_DETECT_MODE} as
+ * {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_SIMPLE} in {@link CaptureResult},
+ * the face id of each face is expected to be {@value #ID_UNSUPPORTED}, the leftEyePosition,
+ * rightEyePosition, and mouthPositions are expected to be {@code null} for each face.</p>
+ *
+ * @param bounds Bounds of the face.
+ * @param score Confidence level between {@value #SCORE_MIN}-{@value #SCORE_MAX}.
+ *
+ * @throws IllegalArgumentException
+ * if bounds is {@code null},
+ * or if the confidence is not in the range of
+ * {@value #SCORE_MIN}-{@value #SCORE_MAX}.
+ *
+ * @hide
+ */
+ public Face(Rect bounds, int score) {
+ this(bounds, score, ID_UNSUPPORTED,
+ /*leftEyePosition*/null, /*rightEyePosition*/null, /*mouthPosition*/null);
+ }
+
+ /**
+ * Bounds of the face.
+ *
+ * <p>A rectangle relative to the sensor's
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE}, with (0,0)
+ * representing the top-left corner of the active array rectangle.</p>
+ *
+ * <p>There is no constraints on the the Rectangle value other than it
+ * is not-{@code null}.</p>
+ */
+ public Rect getBounds() {
+ return mBounds;
+ }
+
+ /**
+ * The confidence level for the detection of the face.
+ *
+ * <p>The range is {@value #SCORE_MIN} to {@value #SCORE_MAX}.
+ * {@value #SCORE_MAX} is the highest confidence.</p>
+ *
+ * <p>Depending on the device, even very low-confidence faces may be
+ * listed, so applications should filter out faces with low confidence,
+ * depending on the use case. For a typical point-and-shoot camera
+ * application that wishes to display rectangles around detected faces,
+ * filtering out faces with confidence less than half of {@value #SCORE_MAX}
+ * is recommended.</p>
+ *
+ * @see #SCORE_MAX
+ * @see #SCORE_MIN
+ */
+ public int getScore() {
+ return mScore;
+ }
+
+ /**
+ * An unique id per face while the face is visible to the tracker.
+ *
+ * <p>
+ * If the face leaves the field-of-view and comes back, it will get a new
+ * id.</p>
+ *
+ * <p>This is an optional field, may not be supported on all devices.
+ * If the id is {@value #ID_UNSUPPORTED} then the leftEyePosition, rightEyePosition, and
+ * mouthPositions are guaranteed to be {@code null}. Otherwise, each of leftEyePosition,
+ * rightEyePosition, and mouthPosition may be independently null or not-null. When devices
+ * report the value of key {@link CaptureResult#STATISTICS_FACE_DETECT_MODE} as
+ * {@link CameraMetadata#STATISTICS_FACE_DETECT_MODE_SIMPLE} in {@link CaptureResult},
+ * the face id of each face is expected to be {@value #ID_UNSUPPORTED}.</p>
+ *
+ * <p>This value will either be {@value #ID_UNSUPPORTED} or
+ * otherwise greater than {@code 0}.</p>
+ *
+ * @see #ID_UNSUPPORTED
+ */
+ public int getId() {
+ return mId;
+ }
+
+ /**
+ * The coordinates of the center of the left eye.
+ *
+ * <p>The coordinates are in
+ * the same space as the ones for {@link #getBounds}. This is an
+ * optional field, may not be supported on all devices. If not
+ * supported, the value will always be set to null.
+ * This value will always be null only if {@link #getId()} returns
+ * {@value #ID_UNSUPPORTED}.</p>
+ *
+ * @return The left eye position, or {@code null} if unknown.
+ */
+ public Point getLeftEyePosition() {
+ return mLeftEye;
+ }
+
+ /**
+ * The coordinates of the center of the right eye.
+ *
+ * <p>The coordinates are
+ * in the same space as the ones for {@link #getBounds}.This is an
+ * optional field, may not be supported on all devices. If not
+ * supported, the value will always be set to null.
+ * This value will always be null only if {@link #getId()} returns
+ * {@value #ID_UNSUPPORTED}.</p>
+ *
+ * @return The right eye position, or {@code null} if unknown.
+ */
+ public Point getRightEyePosition() {
+ return mRightEye;
+ }
+
+ /**
+ * The coordinates of the center of the mouth.
+ *
+ * <p>The coordinates are in
+ * the same space as the ones for {@link #getBounds}. This is an optional
+ * field, may not be supported on all devices. If not
+ * supported, the value will always be set to null.
+ * This value will always be null only if {@link #getId()} returns
+ * {@value #ID_UNSUPPORTED}.</p>
+ * </p>
+ *
+ * @return The mouth position, or {@code null} if unknown.
+ */
+ public Point getMouthPosition() {
+ return mMouth;
+ }
+
+ /**
+ * Represent the Face as a string for debugging purposes.
+ */
+ @Override
+ public String toString() {
+ return String.format("{ bounds: %s, score: %s, id: %d, " +
+ "leftEyePosition: %s, rightEyePosition: %s, mouthPosition: %s }",
+ mBounds, mScore, mId, mLeftEye, mRightEye, mMouth);
+ }
+
+ private static void checkNotNull(String name, Object obj) {
+ if (obj == null) {
+ throw new IllegalArgumentException(name + " was required, but it was null");
+ }
+ }
+
+ private static void checkNull(String name, Object obj) {
+ if (obj != null) {
+ throw new IllegalArgumentException(name + " was required to be null, but it wasn't");
+ }
+ }
+}
diff --git a/android/hardware/camera2/params/HighSpeedVideoConfiguration.java b/android/hardware/camera2/params/HighSpeedVideoConfiguration.java
new file mode 100644
index 00000000..b4691269
--- /dev/null
+++ b/android/hardware/camera2/params/HighSpeedVideoConfiguration.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import static com.android.internal.util.Preconditions.*;
+
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.util.Range;
+import android.util.Size;
+
+/**
+ * Immutable class to store the available
+ * {@link CameraCharacteristics#CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS high speed video
+ * configurations}
+ *
+ * @see CameraCharacteristics#CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS
+ *
+ * @hide
+ */
+public final class HighSpeedVideoConfiguration {
+ static final private int HIGH_SPEED_MAX_MINIMAL_FPS = 120;
+
+ /**
+ * Create a new {@link HighSpeedVideoConfiguration}.
+ *
+ * @param width image width, in pixels (positive)
+ * @param height image height, in pixels (positive)
+ * @param fpsMin minimum frames per second for the configuration (positive)
+ * @param fpsMax maximum frames per second for the configuration (larger or equal to 60)
+ *
+ * @throws IllegalArgumentException
+ * if width/height/fpsMin were not positive or fpsMax less than 60
+ *
+ * @hide
+ */
+ public HighSpeedVideoConfiguration(
+ final int width, final int height, final int fpsMin, final int fpsMax,
+ final int batchSizeMax) {
+ if (fpsMax < HIGH_SPEED_MAX_MINIMAL_FPS) {
+ throw new IllegalArgumentException("fpsMax must be at least " +
+ HIGH_SPEED_MAX_MINIMAL_FPS);
+ }
+ mFpsMax = fpsMax;
+ mWidth = checkArgumentPositive(width, "width must be positive");
+ mHeight = checkArgumentPositive(height, "height must be positive");
+ mFpsMin = checkArgumentPositive(fpsMin, "fpsMin must be positive");
+ mSize = new Size(mWidth, mHeight);
+ mBatchSizeMax = checkArgumentPositive(batchSizeMax, "batchSizeMax must be positive");
+ mFpsRange = new Range<Integer>(mFpsMin, mFpsMax);
+ }
+
+ /**
+ * Return the width of the high speed video configuration.
+ *
+ * @return width > 0
+ */
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /**
+ * Return the height of the high speed video configuration.
+ *
+ * @return height > 0
+ */
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * Return the minimum frame per second of the high speed video configuration.
+ *
+ * @return fpsMin > 0
+ */
+ public int getFpsMin() {
+ return mFpsMin;
+ }
+
+ /**
+ * Return the maximum frame per second of the high speed video configuration.
+ *
+ * @return fpsMax >= 60
+ */
+ public int getFpsMax() {
+ return mFpsMax;
+ }
+
+ /**
+ * Convenience method to return the size of this high speed video configuration.
+ *
+ * @return a Size with positive width and height
+ */
+ public Size getSize() {
+ return mSize;
+ }
+
+ /**
+ * Convenience method to return the max batch size of this high speed video configuration.
+ *
+ * @return the maximal batch size for this high speed video configuration
+ */
+ public int getBatchSizeMax() {
+ return mBatchSizeMax;
+ }
+
+ /**
+ * Convenience method to return the FPS range of this high speed video configuration.
+ *
+ * @return a Range with high bound >= {@value #HIGH_SPEED_MAX_MINIMAL_FPS}
+ */
+ public Range<Integer> getFpsRange() {
+ return mFpsRange;
+ }
+
+ /**
+ * Check if this {@link HighSpeedVideoConfiguration} is equal to another
+ * {@link HighSpeedVideoConfiguration}.
+ *
+ * <p>Two configurations are equal if and only if each of the respective elements is equal.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof HighSpeedVideoConfiguration) {
+ final HighSpeedVideoConfiguration other = (HighSpeedVideoConfiguration) obj;
+ return mWidth == other.mWidth &&
+ mHeight == other.mHeight &&
+ mFpsMin == other.mFpsMin &&
+ mFpsMax == other.mFpsMax &&
+ mBatchSizeMax == other.mBatchSizeMax;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return HashCodeHelpers.hashCode(mWidth, mHeight, mFpsMin, mFpsMax);
+ }
+
+ private final int mWidth;
+ private final int mHeight;
+ private final int mFpsMin;
+ private final int mFpsMax;
+ private final int mBatchSizeMax;
+ private final Size mSize;
+ private final Range<Integer> mFpsRange;
+}
diff --git a/android/hardware/camera2/params/InputConfiguration.java b/android/hardware/camera2/params/InputConfiguration.java
new file mode 100644
index 00000000..d95f889b
--- /dev/null
+++ b/android/hardware/camera2/params/InputConfiguration.java
@@ -0,0 +1,128 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import android.hardware.camera2.utils.HashCodeHelpers;
+
+/**
+ * Immutable class to store an input configuration that is used to create a reprocessable capture
+ * session.
+ *
+ * @see android.hardware.camera2.CameraDevice#createReprocessableCaptureSession
+ * @see android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ */
+public final class InputConfiguration {
+
+ private final int mWidth;
+ private final int mHeight;
+ private final int mFormat;
+
+ /**
+ * Create an input configration with the width, height, and user-defined format.
+ *
+ * <p>Images of an user-defined format are accessible by applications. Use
+ * {@link android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP}
+ * to query supported input formats</p>
+ *
+ * @param width Width of the input buffers.
+ * @param height Height of the input buffers.
+ * @param format Format of the input buffers. One of ImageFormat or PixelFormat constants.
+ *
+ * @see android.graphics.ImageFormat
+ * @see android.graphics.PixelFormat
+ * @see android.hardware.camera2.CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ */
+ public InputConfiguration(int width, int height, int format) {
+ mWidth = width;
+ mHeight = height;
+ mFormat = format;
+ }
+
+ /**
+ * Get the width of this input configration.
+ *
+ * @return width of this input configuration.
+ */
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /**
+ * Get the height of this input configration.
+ *
+ * @return height of this input configuration.
+ */
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * Get the format of this input configration.
+ *
+ * @return format of this input configuration.
+ */
+ public int getFormat() {
+ return mFormat;
+ }
+
+ /**
+ * Check if this InputConfiguration is equal to another InputConfiguration.
+ *
+ * <p>Two input configurations are equal if and only if they have the same widths, heights, and
+ * formats.</p>
+ *
+ * @param obj the object to compare this instance with.
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise.
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof InputConfiguration)) {
+ return false;
+ }
+
+ InputConfiguration otherInputConfig = (InputConfiguration) obj;
+
+ if (otherInputConfig.getWidth() == mWidth &&
+ otherInputConfig.getHeight() == mHeight &&
+ otherInputConfig.getFormat() == mFormat) {
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return HashCodeHelpers.hashCode(mWidth, mHeight, mFormat);
+ }
+
+ /**
+ * Return this {@link InputConfiguration} as a string representation.
+ *
+ * <p> {@code "InputConfiguration(w:%d, h:%d, format:%d)"}, where {@code %d} represents
+ * the width, height, and format, respectively.</p>
+ *
+ * @return string representation of {@link InputConfiguration}
+ */
+ @Override
+ public String toString() {
+ return String.format("InputConfiguration(w:%d, h:%d, format:%d)", mWidth, mHeight, mFormat);
+ }
+}
diff --git a/android/hardware/camera2/params/LensShadingMap.java b/android/hardware/camera2/params/LensShadingMap.java
new file mode 100644
index 00000000..d6b84f2c
--- /dev/null
+++ b/android/hardware/camera2/params/LensShadingMap.java
@@ -0,0 +1,289 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import static com.android.internal.util.Preconditions.*;
+import static android.hardware.camera2.params.RggbChannelVector.*;
+
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.utils.HashCodeHelpers;
+
+import java.util.Arrays;
+
+/**
+ * Immutable class for describing a {@code 4 x N x M} lens shading map of floats.
+ *
+ * @see CaptureResult#STATISTICS_LENS_SHADING_CORRECTION_MAP
+ */
+public final class LensShadingMap {
+
+ /**
+ * The smallest gain factor in this map.
+ *
+ * <p>All values in this map will be at least this large.</p>
+ */
+ public static final float MINIMUM_GAIN_FACTOR = 1.0f;
+
+ /**
+ * Create a new immutable LensShadingMap instance.
+ *
+ * <p>The elements must be stored in a row-major order (fully packed).</p>
+ *
+ * <p>This constructor takes over the array; do not write to the array afterwards.</p>
+ *
+ * @param elements
+ * An array of elements whose length is
+ * {@code RggbChannelVector.COUNT * rows * columns}
+ *
+ * @throws IllegalArgumentException
+ * if the {@code elements} array length is invalid,
+ * if any of the subelems are not finite or less than {@value #MINIMUM_GAIN_FACTOR},
+ * or if rows or columns is not positive
+ * @throws NullPointerException
+ * if {@code elements} is {@code null}
+ *
+ * @hide
+ */
+ public LensShadingMap(final float[] elements, final int rows, final int columns) {
+
+ mRows = checkArgumentPositive(rows, "rows must be positive");
+ mColumns = checkArgumentPositive(columns, "columns must be positive");
+ mElements = checkNotNull(elements, "elements must not be null");
+
+ if (elements.length != getGainFactorCount()) {
+ throw new IllegalArgumentException("elements must be " + getGainFactorCount() +
+ " length, received " + elements.length);
+ }
+
+ // Every element must be finite and >= 1.0f
+ checkArrayElementsInRange(elements, MINIMUM_GAIN_FACTOR, Float.MAX_VALUE, "elements");
+ }
+
+ /**
+ * Get the number of rows in this map.
+ */
+ public int getRowCount() {
+ return mRows;
+ }
+
+ /**
+ * Get the number of columns in this map.
+ */
+ public int getColumnCount() {
+ return mColumns;
+ }
+
+ /**
+ * Get the total number of gain factors in this map.
+ *
+ * <p>A single gain factor contains exactly one color channel.
+ * Use with {@link #copyGainFactors} to allocate a large-enough array.</p>
+ */
+ public int getGainFactorCount() {
+ return mRows * mColumns * COUNT;
+ }
+
+ /**
+ * Get a single color channel gain factor from this lens shading map by its row and column.
+ *
+ * <p>The rows must be within the range [0, {@link #getRowCount}),
+ * the column must be within the range [0, {@link #getColumnCount}),
+ * and the color channel must be within the range [0, {@value RggbChannelVector#COUNT}).</p>
+ *
+ * <p>The channel order is {@code [R, Geven, Godd, B]}, where
+ * {@code Geven} is the green channel for the even rows of a Bayer pattern, and
+ * {@code Godd} is the odd rows.
+ * </p>
+ *
+ * @param colorChannel color channel from {@code [R, Geven, Godd, B]}
+ * @param column within the range [0, {@link #getColumnCount})
+ * @param row within the range [0, {@link #getRowCount})
+ *
+ * @return a gain factor >= {@value #MINIMUM_GAIN_FACTOR}
+ *
+ * @throws IllegalArgumentException if any of the parameters was out of range
+ *
+ * @see #RED
+ * @see #GREEN_EVEN
+ * @see #GREEN_ODD
+ * @see #BLUE
+ * @see #getRowCount
+ * @see #getColumnCount
+ */
+ public float getGainFactor(final int colorChannel, final int column, final int row) {
+ if (colorChannel < 0 || colorChannel > COUNT) {
+ throw new IllegalArgumentException("colorChannel out of range");
+ } else if (column < 0 || column >= mColumns) {
+ throw new IllegalArgumentException("column out of range");
+ } else if (row < 0 || row >= mRows) {
+ throw new IllegalArgumentException("row out of range");
+ }
+
+ return mElements[colorChannel + (row * mColumns + column) * COUNT ];
+ }
+
+ /**
+ * Get a gain factor vector from this lens shading map by its row and column.
+ *
+ * <p>The rows must be within the range [0, {@link #getRowCount}),
+ * the column must be within the range [0, {@link #getColumnCount}).</p>
+ *
+ * @param column within the range [0, {@link #getColumnCount})
+ * @param row within the range [0, {@link #getRowCount})
+ *
+ * @return an {@link RggbChannelVector} where each gain factor >= {@value #MINIMUM_GAIN_FACTOR}
+ *
+ * @throws IllegalArgumentException if any of the parameters was out of range
+ *
+ * @see #getRowCount
+ * @see #getColumnCount
+ */
+ public RggbChannelVector getGainFactorVector(final int column, final int row) {
+ if (column < 0 || column >= mColumns) {
+ throw new IllegalArgumentException("column out of range");
+ } else if (row < 0 || row >= mRows) {
+ throw new IllegalArgumentException("row out of range");
+ }
+
+ final int offset = (row * mColumns + column) * COUNT;
+
+ final float red =
+ mElements[RED + offset];
+ final float greenEven =
+ mElements[GREEN_EVEN + offset];
+ final float greenOdd =
+ mElements[GREEN_ODD + offset];
+ final float blue =
+ mElements[BLUE + offset];
+
+ return new RggbChannelVector(red, greenEven, greenOdd, blue);
+ }
+
+ /**
+ * Copy all gain factors in row-major order from this lens shading map into the destination.
+ *
+ * <p>Each gain factor will be >= {@link #MINIMUM_GAIN_FACTOR}.</p>
+ *
+ * @param destination
+ * an array big enough to hold at least {@link RggbChannelVector#COUNT}
+ * elements after the {@code offset}
+ * @param offset
+ * a non-negative offset into the array
+ * @throws NullPointerException
+ * If {@code destination} was {@code null}
+ * @throws IllegalArgumentException
+ * If offset was negative
+ * @throws ArrayIndexOutOfBoundsException
+ * If there's not enough room to write the elements at the specified destination and
+ * offset.
+ *
+ * @see CaptureResult#STATISTICS_LENS_SHADING_MAP
+ */
+ public void copyGainFactors(final float[] destination, final int offset) {
+ checkArgumentNonnegative(offset, "offset must not be negative");
+ checkNotNull(destination, "destination must not be null");
+ if (destination.length + offset < getGainFactorCount()) {
+ throw new ArrayIndexOutOfBoundsException("destination too small to fit elements");
+ }
+
+ System.arraycopy(mElements, /*srcPos*/0, destination, offset, getGainFactorCount());
+ }
+
+ /**
+ * Check if this LensShadingMap is equal to another LensShadingMap.
+ *
+ * <p>Two lens shading maps are equal if and only if they have the same rows/columns,
+ * and all of their elements are {@link Object#equals equal}.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof LensShadingMap) {
+ final LensShadingMap other = (LensShadingMap) obj;
+ return mRows == other.mRows
+ && mColumns == other.mColumns
+ && Arrays.equals(mElements, other.mElements);
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ int elemsHash = HashCodeHelpers.hashCode(mElements);
+ return HashCodeHelpers.hashCode(mRows, mColumns, elemsHash);
+ }
+
+ /**
+ * Return the LensShadingMap as a string representation.
+ *
+ * <p> {@code "LensShadingMap{R:([%f, %f, ... %f], ... [%f, %f, ... %f]), G_even:([%f, %f, ...
+ * %f], ... [%f, %f, ... %f]), G_odd:([%f, %f, ... %f], ... [%f, %f, ... %f]), B:([%f, %f, ...
+ * %f], ... [%f, %f, ... %f])}"},
+ * where each {@code %f} represents one gain factor and each {@code [%f, %f, ... %f]} represents
+ * a row of the lens shading map</p>
+ *
+ * @return string representation of {@link LensShadingMap}
+ */
+ @Override
+ public String toString() {
+ StringBuilder str = new StringBuilder();
+ str.append("LensShadingMap{");
+
+ final String channelPrefix[] = {"R:(", "G_even:(", "G_odd:(", "B:("};
+
+ for (int ch = 0; ch < COUNT; ch++) {
+ str.append(channelPrefix[ch]);
+
+ for (int r = 0; r < mRows; r++) {
+ str.append("[");
+ for (int c = 0; c < mColumns; c++) {
+ float gain = getGainFactor(ch, c, r);
+ str.append(gain);
+ if (c < mColumns - 1) {
+ str.append(", ");
+ }
+ }
+ str.append("]");
+ if (r < mRows - 1) {
+ str.append(", ");
+ }
+ }
+
+ str.append(")");
+ if (ch < COUNT - 1) {
+ str.append(", ");
+ }
+ }
+
+ str.append("}");
+ return str.toString();
+ }
+
+ private final int mRows;
+ private final int mColumns;
+ private final float[] mElements;
+}
diff --git a/android/hardware/camera2/params/MeteringRectangle.java b/android/hardware/camera2/params/MeteringRectangle.java
new file mode 100644
index 00000000..b1cea575
--- /dev/null
+++ b/android/hardware/camera2/params/MeteringRectangle.java
@@ -0,0 +1,269 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import android.util.Size;
+import static com.android.internal.util.Preconditions.*;
+
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.utils.HashCodeHelpers;
+
+/**
+ * An immutable class to represent a rectangle {@code (x, y, width, height)} with an additional
+ * weight component.
+ * <p>
+ * The rectangle is defined to be inclusive of the specified coordinates.
+ * </p>
+ * <p>
+ * When used with a {@link CaptureRequest}, the coordinate system is based on the active pixel
+ * array, with {@code (0,0)} being the top-left pixel in the
+ * {@link CameraCharacteristics#SENSOR_INFO_ACTIVE_ARRAY_SIZE active pixel array}, and
+ * {@code (android.sensor.info.activeArraySize.width - 1,
+ * android.sensor.info.activeArraySize.height - 1)} being the bottom-right pixel in the active pixel
+ * array.
+ * </p>
+ * <p>
+ * The weight must range from {@value #METERING_WEIGHT_MIN} to {@value #METERING_WEIGHT_MAX}
+ * inclusively, and represents a weight for every pixel in the area. This means that a large
+ * metering area with the same weight as a smaller area will have more effect in the metering
+ * result. Metering areas can partially overlap and the camera device will add the weights in the
+ * overlap rectangle.
+ * </p>
+ * <p>
+ * If all rectangles have 0 weight, then no specific metering area needs to be used by the camera
+ * device. If the metering rectangle is outside the used android.scaler.cropRegion returned in
+ * capture result metadata, the camera device will ignore the sections outside the rectangle and
+ * output the used sections in the result metadata.
+ * </p>
+ */
+public final class MeteringRectangle {
+ /**
+ * The minimum value of valid metering weight.
+ */
+ public static final int METERING_WEIGHT_MIN = 0;
+
+ /**
+ * The maximum value of valid metering weight.
+ */
+ public static final int METERING_WEIGHT_MAX = 1000;
+
+ /**
+ * Weights set to this value will cause the camera device to ignore this rectangle.
+ * If all metering rectangles are weighed with 0, the camera device will choose its own metering
+ * rectangles.
+ */
+ public static final int METERING_WEIGHT_DONT_CARE = 0;
+
+ private final int mX;
+ private final int mY;
+ private final int mWidth;
+ private final int mHeight;
+ private final int mWeight;
+
+ /**
+ * Create a new metering rectangle.
+ *
+ * @param x coordinate >= 0
+ * @param y coordinate >= 0
+ * @param width width >= 0
+ * @param height height >= 0
+ * @param meteringWeight weight between {@value #METERING_WEIGHT_MIN} and
+ * {@value #METERING_WEIGHT_MAX} inclusively
+ * @throws IllegalArgumentException if any of the parameters were negative
+ */
+ public MeteringRectangle(int x, int y, int width, int height, int meteringWeight) {
+ mX = checkArgumentNonnegative(x, "x must be nonnegative");
+ mY = checkArgumentNonnegative(y, "y must be nonnegative");
+ mWidth = checkArgumentNonnegative(width, "width must be nonnegative");
+ mHeight = checkArgumentNonnegative(height, "height must be nonnegative");
+ mWeight = checkArgumentInRange(
+ meteringWeight, METERING_WEIGHT_MIN, METERING_WEIGHT_MAX, "meteringWeight");
+ }
+
+ /**
+ * Create a new metering rectangle.
+ *
+ * <p>The point {@code xy}'s data is copied; the reference is not retained.</p>
+ *
+ * @param xy a non-{@code null} {@link Point} with both x,y >= 0
+ * @param dimensions a non-{@code null} {@link android.util.Size Size} with width, height >= 0
+ * @param meteringWeight weight >= 0
+ *
+ * @throws IllegalArgumentException if any of the parameters were negative
+ * @throws NullPointerException if any of the arguments were null
+ */
+ public MeteringRectangle(Point xy, Size dimensions, int meteringWeight) {
+ checkNotNull(xy, "xy must not be null");
+ checkNotNull(dimensions, "dimensions must not be null");
+
+ mX = checkArgumentNonnegative(xy.x, "x must be nonnegative");
+ mY = checkArgumentNonnegative(xy.y, "y must be nonnegative");
+ mWidth = checkArgumentNonnegative(dimensions.getWidth(), "width must be nonnegative");
+ mHeight = checkArgumentNonnegative(dimensions.getHeight(), "height must be nonnegative");
+ mWeight = checkArgumentNonnegative(meteringWeight, "meteringWeight must be nonnegative");
+ }
+
+ /**
+ * Create a new metering rectangle.
+ *
+ * <p>The rectangle data is copied; the reference is not retained.</p>
+ *
+ * @param rect a non-{@code null} rectangle with all x,y,w,h dimensions >= 0
+ * @param meteringWeight weight >= 0
+ *
+ * @throws IllegalArgumentException if any of the parameters were negative
+ * @throws NullPointerException if any of the arguments were null
+ */
+ public MeteringRectangle(Rect rect, int meteringWeight) {
+ checkNotNull(rect, "rect must not be null");
+
+ mX = checkArgumentNonnegative(rect.left, "rect.left must be nonnegative");
+ mY = checkArgumentNonnegative(rect.top, "rect.top must be nonnegative");
+ mWidth = checkArgumentNonnegative(rect.width(), "rect.width must be nonnegative");
+ mHeight = checkArgumentNonnegative(rect.height(), "rect.height must be nonnegative");
+ mWeight = checkArgumentNonnegative(meteringWeight, "meteringWeight must be nonnegative");
+ }
+
+ /**
+ * Return the X coordinate of the left side of the rectangle.
+ *
+ * @return x coordinate >= 0
+ */
+ public int getX() {
+ return mX;
+ }
+
+ /**
+ * Return the Y coordinate of the upper side of the rectangle.
+ *
+ * @return y coordinate >= 0
+ */
+ public int getY() {
+ return mY;
+ }
+
+ /**
+ * Return the width of the rectangle.
+ *
+ * @return width >= 0
+ */
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /**
+ * Return the height of the rectangle.
+ *
+ * @return height >= 0
+ */
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * Return the metering weight of the rectangle.
+ *
+ * @return weight >= 0
+ */
+ public int getMeteringWeight() {
+ return mWeight;
+ }
+
+ /**
+ * Convenience method to create the upper-left (X,Y) coordinate as a {@link Point}.
+ *
+ * @return a new {@code (x,y)} {@link Point} with both x,y >= 0
+ */
+ public Point getUpperLeftPoint() {
+ return new Point(mX, mY);
+ }
+
+ /**
+ * Convenience method to create the size from this metering rectangle.
+ *
+ * <p>This strips away the X,Y,weight from the rectangle.</p>
+ *
+ * @return a new {@link Size} with non-negative width and height
+ */
+ public Size getSize() {
+ return new Size(mWidth, mHeight);
+ }
+
+ /**
+ * Convenience method to create a {@link Rect} from this metering rectangle.
+ *
+ * <p>This strips away the weight from the rectangle.</p>
+ *
+ * @return a new {@link Rect} with non-negative x1, y1, x2, y2
+ */
+ public Rect getRect() {
+ return new Rect(mX, mY, mX + mWidth, mY + mHeight);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public boolean equals(final Object other) {
+ return other instanceof MeteringRectangle && equals((MeteringRectangle)other);
+ }
+
+ /**
+ * Compare two metering rectangles to see if they are equal.
+ *
+ * Two weighted rectangles are only considered equal if each of their components
+ * (x, y, width, height, weight) is respectively equal.
+ *
+ * @param other Another MeteringRectangle
+ *
+ * @return {@code true} if the metering rectangles are equal, {@code false} otherwise
+ */
+ public boolean equals(final MeteringRectangle other) {
+ if (other == null) {
+ return false;
+ }
+
+ return (mX == other.mX
+ && mY == other.mY
+ && mWidth == other.mWidth
+ && mHeight == other.mHeight
+ && mWeight == other.mWeight);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return HashCodeHelpers.hashCode(mX, mY, mWidth, mHeight, mWeight);
+ }
+
+ /**
+ * Return the metering rectangle as a string representation
+ * {@code "(x:%d, y:%d, w:%d, h:%d, wt:%d)"} where each {@code %d} respectively represents
+ * the x, y, width, height, and weight points.
+ *
+ * @return string representation of the metering rectangle
+ */
+ @Override
+ public String toString() {
+ return String.format("(x:%d, y:%d, w:%d, h:%d, wt:%d)", mX, mY, mWidth, mHeight, mWeight);
+ }
+}
diff --git a/android/hardware/camera2/params/OutputConfiguration.java b/android/hardware/camera2/params/OutputConfiguration.java
new file mode 100644
index 00000000..2b317d67
--- /dev/null
+++ b/android/hardware/camera2/params/OutputConfiguration.java
@@ -0,0 +1,613 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package android.hardware.camera2.params;
+
+import android.annotation.NonNull;
+import android.annotation.Nullable;
+import android.annotation.SystemApi;
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.hardware.camera2.utils.SurfaceUtils;
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+import android.util.Size;
+import android.view.Surface;
+
+import java.util.Arrays;
+import java.util.List;
+import java.util.Collections;
+import java.util.ArrayList;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * A class for describing camera output, which contains a {@link Surface} and its specific
+ * configuration for creating capture session.
+ *
+ * @see CameraDevice#createCaptureSessionByOutputConfigurations
+ *
+ */
+public final class OutputConfiguration implements Parcelable {
+
+ /**
+ * Rotation constant: 0 degree rotation (no rotation)
+ *
+ * @hide
+ */
+ @SystemApi
+ public static final int ROTATION_0 = 0;
+
+ /**
+ * Rotation constant: 90 degree counterclockwise rotation.
+ *
+ * @hide
+ */
+ @SystemApi
+ public static final int ROTATION_90 = 1;
+
+ /**
+ * Rotation constant: 180 degree counterclockwise rotation.
+ *
+ * @hide
+ */
+ @SystemApi
+ public static final int ROTATION_180 = 2;
+
+ /**
+ * Rotation constant: 270 degree counterclockwise rotation.
+ *
+ * @hide
+ */
+ @SystemApi
+ public static final int ROTATION_270 = 3;
+
+ /**
+ * Invalid surface group ID.
+ *
+ *<p>An {@link OutputConfiguration} with this value indicates that the included surface
+ *doesn't belong to any surface group.</p>
+ */
+ public static final int SURFACE_GROUP_ID_NONE = -1;
+
+ /**
+ * Create a new {@link OutputConfiguration} instance with a {@link Surface}.
+ *
+ * @param surface
+ * A Surface for camera to output to.
+ *
+ * <p>This constructor creates a default configuration, with a surface group ID of
+ * {@value #SURFACE_GROUP_ID_NONE}.</p>
+ *
+ */
+ public OutputConfiguration(@NonNull Surface surface) {
+ this(SURFACE_GROUP_ID_NONE, surface, ROTATION_0);
+ }
+
+ /**
+ * Unknown surface source type.
+ */
+ private final int SURFACE_TYPE_UNKNOWN = -1;
+
+ /**
+ * The surface is obtained from {@link android.view.SurfaceView}.
+ */
+ private final int SURFACE_TYPE_SURFACE_VIEW = 0;
+
+ /**
+ * The surface is obtained from {@link android.graphics.SurfaceTexture}.
+ */
+ private final int SURFACE_TYPE_SURFACE_TEXTURE = 1;
+
+ /**
+ * Maximum number of surfaces supported by one {@link OutputConfiguration}.
+ *
+ * <p>The combined number of surfaces added by the constructor and
+ * {@link OutputConfiguration#addSurface} should not exceed this value.</p>
+ *
+ */
+ private static final int MAX_SURFACES_COUNT = 2;
+
+ /**
+ * Create a new {@link OutputConfiguration} instance with a {@link Surface},
+ * with a surface group ID.
+ *
+ * <p>
+ * A surface group ID is used to identify which surface group this output surface belongs to. A
+ * surface group is a group of output surfaces that are not intended to receive camera output
+ * buffer streams simultaneously. The {@link CameraDevice} may be able to share the buffers used
+ * by all the surfaces from the same surface group, therefore may reduce the overall memory
+ * footprint. The application should only set the same set ID for the streams that are not
+ * simultaneously streaming. A negative ID indicates that this surface doesn't belong to any
+ * surface group. The default value is {@value #SURFACE_GROUP_ID_NONE}.</p>
+ *
+ * <p>For example, a video chat application that has an adaptive output resolution feature would
+ * need two (or more) output resolutions, to switch resolutions without any output glitches.
+ * However, at any given time, only one output is active to minimize outgoing network bandwidth
+ * and encoding overhead. To save memory, the application should set the video outputs to have
+ * the same non-negative group ID, so that the camera device can share the same memory region
+ * for the alternating outputs.</p>
+ *
+ * <p>It is not an error to include output streams with the same group ID in the same capture
+ * request, but the resulting memory consumption may be higher than if the two streams were
+ * not in the same surface group to begin with, especially if the outputs have substantially
+ * different dimensions.</p>
+ *
+ * @param surfaceGroupId
+ * A group ID for this output, used for sharing memory between multiple outputs.
+ * @param surface
+ * A Surface for camera to output to.
+ *
+ */
+ public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface) {
+ this(surfaceGroupId, surface, ROTATION_0);
+ }
+
+ /**
+ * Create a new {@link OutputConfiguration} instance.
+ *
+ * <p>This constructor takes an argument for desired camera rotation</p>
+ *
+ * @param surface
+ * A Surface for camera to output to.
+ * @param rotation
+ * The desired rotation to be applied on camera output. Value must be one of
+ * ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
+ * application should make sure corresponding surface size has width and height
+ * transposed relative to the width and height without rotation. For example,
+ * if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
+ * application should set rotation to {@code ROTATION_90} and make sure the
+ * corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
+ * throw {@code IllegalArgumentException} if device cannot perform such rotation.
+ * @hide
+ */
+ @SystemApi
+ public OutputConfiguration(@NonNull Surface surface, int rotation) {
+ this(SURFACE_GROUP_ID_NONE, surface, rotation);
+ }
+
+ /**
+ * Create a new {@link OutputConfiguration} instance, with rotation and a group ID.
+ *
+ * <p>This constructor takes an argument for desired camera rotation and for the surface group
+ * ID. See {@link #OutputConfiguration(int, Surface)} for details of the group ID.</p>
+ *
+ * @param surfaceGroupId
+ * A group ID for this output, used for sharing memory between multiple outputs.
+ * @param surface
+ * A Surface for camera to output to.
+ * @param rotation
+ * The desired rotation to be applied on camera output. Value must be one of
+ * ROTATION_[0, 90, 180, 270]. Note that when the rotation is 90 or 270 degrees,
+ * application should make sure corresponding surface size has width and height
+ * transposed relative to the width and height without rotation. For example,
+ * if application needs camera to capture 1280x720 picture and rotate it by 90 degree,
+ * application should set rotation to {@code ROTATION_90} and make sure the
+ * corresponding Surface size is 720x1280. Note that {@link CameraDevice} might
+ * throw {@code IllegalArgumentException} if device cannot perform such rotation.
+ * @hide
+ */
+ @SystemApi
+ public OutputConfiguration(int surfaceGroupId, @NonNull Surface surface, int rotation) {
+ checkNotNull(surface, "Surface must not be null");
+ checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
+ mSurfaceGroupId = surfaceGroupId;
+ mSurfaceType = SURFACE_TYPE_UNKNOWN;
+ mSurfaces = new ArrayList<Surface>();
+ mSurfaces.add(surface);
+ mRotation = rotation;
+ mConfiguredSize = SurfaceUtils.getSurfaceSize(surface);
+ mConfiguredFormat = SurfaceUtils.getSurfaceFormat(surface);
+ mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(surface);
+ mConfiguredGenerationId = surface.getGenerationId();
+ mIsDeferredConfig = false;
+ mIsShared = false;
+ }
+
+ /**
+ * Create a new {@link OutputConfiguration} instance, with desired Surface size and Surface
+ * source class.
+ * <p>
+ * This constructor takes an argument for desired Surface size and the Surface source class
+ * without providing the actual output Surface. This is used to setup an output configuration
+ * with a deferred Surface. The application can use this output configuration to create a
+ * session.
+ * </p>
+ * <p>
+ * However, the actual output Surface must be set via {@link #addSurface} and the deferred
+ * Surface configuration must be finalized via {@link
+ * CameraCaptureSession#finalizeOutputConfigurations} before submitting a request with this
+ * Surface target. The deferred Surface can only be obtained either from {@link
+ * android.view.SurfaceView} by calling {@link android.view.SurfaceHolder#getSurface}, or from
+ * {@link android.graphics.SurfaceTexture} via
+ * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}).
+ * </p>
+ *
+ * @param surfaceSize Size for the deferred surface.
+ * @param klass a non-{@code null} {@link Class} object reference that indicates the source of
+ * this surface. Only {@link android.view.SurfaceHolder SurfaceHolder.class} and
+ * {@link android.graphics.SurfaceTexture SurfaceTexture.class} are supported.
+ * @throws IllegalArgumentException if the Surface source class is not supported, or Surface
+ * size is zero.
+ */
+ public <T> OutputConfiguration(@NonNull Size surfaceSize, @NonNull Class<T> klass) {
+ checkNotNull(klass, "surfaceSize must not be null");
+ checkNotNull(klass, "klass must not be null");
+ if (klass == android.view.SurfaceHolder.class) {
+ mSurfaceType = SURFACE_TYPE_SURFACE_VIEW;
+ } else if (klass == android.graphics.SurfaceTexture.class) {
+ mSurfaceType = SURFACE_TYPE_SURFACE_TEXTURE;
+ } else {
+ mSurfaceType = SURFACE_TYPE_UNKNOWN;
+ throw new IllegalArgumentException("Unknow surface source class type");
+ }
+
+ if (surfaceSize.getWidth() == 0 || surfaceSize.getHeight() == 0) {
+ throw new IllegalArgumentException("Surface size needs to be non-zero");
+ }
+
+ mSurfaceGroupId = SURFACE_GROUP_ID_NONE;
+ mSurfaces = new ArrayList<Surface>();
+ mRotation = ROTATION_0;
+ mConfiguredSize = surfaceSize;
+ mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
+ mConfiguredDataspace = StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
+ mConfiguredGenerationId = 0;
+ mIsDeferredConfig = true;
+ mIsShared = false;
+ }
+
+ /**
+ * Enable multiple surfaces sharing the same OutputConfiguration
+ *
+ * <p>For advanced use cases, a camera application may require more streams than the combination
+ * guaranteed by {@link CameraDevice#createCaptureSession}. In this case, more than one
+ * compatible surface can be attached to an OutputConfiguration so that they map to one
+ * camera stream, and the outputs share memory buffers when possible. </p>
+ *
+ * <p>Two surfaces are compatible in the below cases:</p>
+ *
+ * <li> Surfaces with the same size, format, dataSpace, and Surface source class. In this case,
+ * {@link CameraDevice#createCaptureSessionByOutputConfigurations} is guaranteed to succeed.
+ *
+ * <li> Surfaces with the same size, format, and dataSpace, but different Surface source classes
+ * that are generally not compatible. However, on some devices, the underlying camera device is
+ * able to use the same buffer layout for both surfaces. The only way to discover if this is the
+ * case is to create a capture session with that output configuration. For example, if the
+ * camera device uses the same private buffer format between a SurfaceView/SurfaceTexture and a
+ * MediaRecorder/MediaCodec, {@link CameraDevice#createCaptureSessionByOutputConfigurations}
+ * will succeed. Otherwise, it fails with {@link
+ * CameraCaptureSession.StateCallback#onConfigureFailed}.
+ * </ol>
+ *
+ * <p>To enable surface sharing, this function must be called before {@link
+ * CameraDevice#createCaptureSessionByOutputConfigurations}. Calling this function after {@link
+ * CameraDevice#createCaptureSessionByOutputConfigurations} has no effect.</p>
+ *
+ * <p>Up to 2 surfaces can be shared for an OutputConfiguration. The supported surfaces for
+ * sharing must be of type SurfaceTexture, SurfaceView, MediaRecorder, MediaCodec, or
+ * implementation defined ImageReader.</p>
+ */
+ public void enableSurfaceSharing() {
+ mIsShared = true;
+ }
+
+ /**
+ * Check if this configuration has deferred configuration.
+ *
+ * <p>This will return true if the output configuration was constructed with surface deferred by
+ * {@link OutputConfiguration#OutputConfiguration(Size, Class)}. It will return true even after
+ * the deferred surface is added later by {@link OutputConfiguration#addSurface}.</p>
+ *
+ * @return true if this configuration has deferred surface.
+ * @hide
+ */
+ public boolean isDeferredConfiguration() {
+ return mIsDeferredConfig;
+ }
+
+ /**
+ * Add a surface to this OutputConfiguration.
+ *
+ * <p> This function can be called before or after {@link
+ * CameraDevice#createCaptureSessionByOutputConfigurations}. If it's called after,
+ * the application must finalize the capture session with
+ * {@link CameraCaptureSession#finalizeOutputConfigurations}.
+ * </p>
+ *
+ * <p> If the OutputConfiguration was constructed with a deferred surface by {@link
+ * OutputConfiguration#OutputConfiguration(Size, Class)}, the added surface must be obtained
+ * from {@link android.view.SurfaceView} by calling {@link android.view.SurfaceHolder#getSurface},
+ * or from {@link android.graphics.SurfaceTexture} via
+ * {@link android.view.Surface#Surface(android.graphics.SurfaceTexture)}).</p>
+ *
+ * <p> If the OutputConfiguration was constructed by other constructors, the added
+ * surface must be compatible with the existing surface. See {@link #enableSurfaceSharing} for
+ * details of compatible surfaces.</p>
+ *
+ * <p> If the OutputConfiguration already contains a Surface, {@link #enableSurfaceSharing} must
+ * be called before calling this function to add a new Surface.</p>
+ *
+ * @param surface The surface to be added.
+ * @throws IllegalArgumentException if the Surface is invalid, the Surface's
+ * dataspace/format doesn't match, or adding the Surface would exceed number of
+ * shared surfaces supported.
+ * @throws IllegalStateException if the Surface was already added to this OutputConfiguration,
+ * or if the OutputConfiguration is not shared and it already has a surface associated
+ * with it.
+ */
+ public void addSurface(@NonNull Surface surface) {
+ checkNotNull(surface, "Surface must not be null");
+ if (mSurfaces.contains(surface)) {
+ throw new IllegalStateException("Surface is already added!");
+ }
+ if (mSurfaces.size() == 1 && !mIsShared) {
+ throw new IllegalStateException("Cannot have 2 surfaces for a non-sharing configuration");
+ }
+ if (mSurfaces.size() + 1 > MAX_SURFACES_COUNT) {
+ throw new IllegalArgumentException("Exceeds maximum number of surfaces");
+ }
+
+ // This will throw IAE is the surface was abandoned.
+ Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
+ if (!surfaceSize.equals(mConfiguredSize)) {
+ Log.w(TAG, "Added surface size " + surfaceSize +
+ " is different than pre-configured size " + mConfiguredSize +
+ ", the pre-configured size will be used.");
+ }
+
+ if (mConfiguredFormat != SurfaceUtils.getSurfaceFormat(surface)) {
+ throw new IllegalArgumentException("The format of added surface format doesn't match");
+ }
+
+ // If the surface format is PRIVATE, do not enforce dataSpace because camera device may
+ // override it.
+ if (mConfiguredFormat != ImageFormat.PRIVATE &&
+ mConfiguredDataspace != SurfaceUtils.getSurfaceDataspace(surface)) {
+ throw new IllegalArgumentException("The dataspace of added surface doesn't match");
+ }
+
+ mSurfaces.add(surface);
+ }
+
+ /**
+ * Create a new {@link OutputConfiguration} instance with another {@link OutputConfiguration}
+ * instance.
+ *
+ * @param other Another {@link OutputConfiguration} instance to be copied.
+ *
+ * @hide
+ */
+ public OutputConfiguration(@NonNull OutputConfiguration other) {
+ if (other == null) {
+ throw new IllegalArgumentException("OutputConfiguration shouldn't be null");
+ }
+
+ this.mSurfaces = other.mSurfaces;
+ this.mRotation = other.mRotation;
+ this.mSurfaceGroupId = other.mSurfaceGroupId;
+ this.mSurfaceType = other.mSurfaceType;
+ this.mConfiguredDataspace = other.mConfiguredDataspace;
+ this.mConfiguredFormat = other.mConfiguredFormat;
+ this.mConfiguredSize = other.mConfiguredSize;
+ this.mConfiguredGenerationId = other.mConfiguredGenerationId;
+ this.mIsDeferredConfig = other.mIsDeferredConfig;
+ }
+
+ /**
+ * Create an OutputConfiguration from Parcel.
+ */
+ private OutputConfiguration(@NonNull Parcel source) {
+ int rotation = source.readInt();
+ int surfaceSetId = source.readInt();
+ int surfaceType = source.readInt();
+ int width = source.readInt();
+ int height = source.readInt();
+ boolean isDeferred = source.readInt() == 1;
+ ArrayList<Surface> surfaces = new ArrayList<Surface>();
+ source.readTypedList(surfaces, Surface.CREATOR);
+
+ checkArgumentInRange(rotation, ROTATION_0, ROTATION_270, "Rotation constant");
+
+ mSurfaceGroupId = surfaceSetId;
+ mRotation = rotation;
+ mSurfaces = surfaces;
+ mConfiguredSize = new Size(width, height);
+ mIsDeferredConfig = isDeferred;
+ mSurfaces = surfaces;
+ if (mSurfaces.size() > 0) {
+ mSurfaceType = SURFACE_TYPE_UNKNOWN;
+ mConfiguredFormat = SurfaceUtils.getSurfaceFormat(mSurfaces.get(0));
+ mConfiguredDataspace = SurfaceUtils.getSurfaceDataspace(mSurfaces.get(0));
+ mConfiguredGenerationId = mSurfaces.get(0).getGenerationId();
+ } else {
+ mSurfaceType = surfaceType;
+ mConfiguredFormat = StreamConfigurationMap.imageFormatToInternal(ImageFormat.PRIVATE);
+ mConfiguredDataspace =
+ StreamConfigurationMap.imageFormatToDataspace(ImageFormat.PRIVATE);
+ mConfiguredGenerationId = 0;
+ }
+ }
+
+ /**
+ * Get the {@link Surface} associated with this {@link OutputConfiguration}.
+ *
+ * If more than one surface is associated with this {@link OutputConfiguration}, return the
+ * first one as specified in the constructor or {@link OutputConfiguration#addSurface}.
+ */
+ public @Nullable Surface getSurface() {
+ if (mSurfaces.size() == 0) {
+ return null;
+ }
+
+ return mSurfaces.get(0);
+ }
+
+ /**
+ * Get the immutable list of surfaces associated with this {@link OutputConfiguration}.
+ *
+ * @return the list of surfaces associated with this {@link OutputConfiguration} as specified in
+ * the constructor and {@link OutputConfiguration#addSurface}. The list should not be modified.
+ */
+ @NonNull
+ public List<Surface> getSurfaces() {
+ return Collections.unmodifiableList(mSurfaces);
+ }
+
+ /**
+ * Get the rotation associated with this {@link OutputConfiguration}.
+ *
+ * @return the rotation associated with this {@link OutputConfiguration}.
+ * Value will be one of ROTATION_[0, 90, 180, 270]
+ *
+ * @hide
+ */
+ @SystemApi
+ public int getRotation() {
+ return mRotation;
+ }
+
+ /**
+ * Get the surface group ID associated with this {@link OutputConfiguration}.
+ *
+ * @return the surface group ID associated with this {@link OutputConfiguration}.
+ * The default value is {@value #SURFACE_GROUP_ID_NONE}.
+ */
+ public int getSurfaceGroupId() {
+ return mSurfaceGroupId;
+ }
+
+ public static final Parcelable.Creator<OutputConfiguration> CREATOR =
+ new Parcelable.Creator<OutputConfiguration>() {
+ @Override
+ public OutputConfiguration createFromParcel(Parcel source) {
+ try {
+ OutputConfiguration outputConfiguration = new OutputConfiguration(source);
+ return outputConfiguration;
+ } catch (Exception e) {
+ Log.e(TAG, "Exception creating OutputConfiguration from parcel", e);
+ return null;
+ }
+ }
+
+ @Override
+ public OutputConfiguration[] newArray(int size) {
+ return new OutputConfiguration[size];
+ }
+ };
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ if (dest == null) {
+ throw new IllegalArgumentException("dest must not be null");
+ }
+ dest.writeInt(mRotation);
+ dest.writeInt(mSurfaceGroupId);
+ dest.writeInt(mSurfaceType);
+ dest.writeInt(mConfiguredSize.getWidth());
+ dest.writeInt(mConfiguredSize.getHeight());
+ dest.writeInt(mIsDeferredConfig ? 1 : 0);
+ dest.writeInt(mIsShared ? 1 : 0);
+ dest.writeTypedList(mSurfaces);
+ }
+
+ /**
+ * Check if this {@link OutputConfiguration} is equal to another {@link OutputConfiguration}.
+ *
+ * <p>Two output configurations are only equal if and only if the underlying surfaces, surface
+ * properties (width, height, format, dataspace) when the output configurations are created,
+ * and all other configuration parameters are equal. </p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ } else if (this == obj) {
+ return true;
+ } else if (obj instanceof OutputConfiguration) {
+ final OutputConfiguration other = (OutputConfiguration) obj;
+ if (mRotation != other.mRotation ||
+ !mConfiguredSize.equals(other.mConfiguredSize) ||
+ mConfiguredFormat != other.mConfiguredFormat ||
+ mSurfaceGroupId != other.mSurfaceGroupId ||
+ mSurfaceType != other.mSurfaceType ||
+ mIsDeferredConfig != other.mIsDeferredConfig ||
+ mIsShared != other.mIsShared ||
+ mConfiguredFormat != other.mConfiguredFormat ||
+ mConfiguredDataspace != other.mConfiguredDataspace ||
+ mConfiguredGenerationId != other.mConfiguredGenerationId)
+ return false;
+
+ int minLen = Math.min(mSurfaces.size(), other.mSurfaces.size());
+ for (int i = 0; i < minLen; i++) {
+ if (mSurfaces.get(i) != other.mSurfaces.get(i))
+ return false;
+ }
+
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ // Need ensure that the hashcode remains unchanged after adding a deferred surface. Otherwise
+ // the deferred output configuration will be lost in the camera streammap after the deferred
+ // surface is set.
+ if (mIsDeferredConfig) {
+ return HashCodeHelpers.hashCode(
+ mRotation, mConfiguredSize.hashCode(), mConfiguredFormat, mConfiguredDataspace,
+ mSurfaceGroupId, mSurfaceType, mIsShared ? 1 : 0);
+ }
+
+ return HashCodeHelpers.hashCode(
+ mRotation, mSurfaces.hashCode(), mConfiguredGenerationId,
+ mConfiguredSize.hashCode(), mConfiguredFormat,
+ mConfiguredDataspace, mSurfaceGroupId, mIsShared ? 1 : 0);
+ }
+
+ private static final String TAG = "OutputConfiguration";
+ private ArrayList<Surface> mSurfaces;
+ private final int mRotation;
+ private final int mSurfaceGroupId;
+ // Surface source type, this is only used by the deferred surface configuration objects.
+ private final int mSurfaceType;
+
+ // The size, format, and dataspace of the surface when OutputConfiguration is created.
+ private final Size mConfiguredSize;
+ private final int mConfiguredFormat;
+ private final int mConfiguredDataspace;
+ // Surface generation ID to distinguish changes to Surface native internals
+ private final int mConfiguredGenerationId;
+ // Flag indicating if this config has deferred surface.
+ private final boolean mIsDeferredConfig;
+ // Flag indicating if this config has shared surfaces
+ private boolean mIsShared;
+}
diff --git a/android/hardware/camera2/params/ReprocessFormatsMap.java b/android/hardware/camera2/params/ReprocessFormatsMap.java
new file mode 100644
index 00000000..d3f5bc33
--- /dev/null
+++ b/android/hardware/camera2/params/ReprocessFormatsMap.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import static com.android.internal.util.Preconditions.*;
+
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.utils.HashCodeHelpers;
+
+import java.util.Arrays;
+
+/**
+ * Immutable class to store the input to output formats
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP map} to be used for with
+ * camera image reprocessing.
+ *
+ * <p>
+ * The mapping of image formats that are supported by this camera device for input streams,
+ * to their corresponding output formats.</p>
+ *
+ * <p>
+ * Attempting to configure an input stream with output streams not listed as available in this map
+ * is not valid.
+ * </p>
+ *
+ * @see CameraCharacteristics#SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ *
+ * <!-- hide this until we expose input streams through public API -->
+ * @hide
+ */
+public final class ReprocessFormatsMap {
+ /**
+ * Create a new {@link ReprocessFormatsMap}
+ *
+ * <p>This value is encoded as a variable-size array-of-arrays.
+ * The inner array always contains {@code [format, length, ...]} where ... has length elements.
+ * An inner array is followed by another inner array if the total metadata entry size hasn't
+ * yet been exceeded.</p>
+ *
+ * <p>Entry must not be {@code null}. Empty array is acceptable.</p>
+ *
+ * <p>The entry array ownership is passed to this instance after construction; do not
+ * write to it afterwards.</p>
+ *
+ * @param entry Array of ints, not yet deserialized (not-null)
+ *
+ * @throws IllegalArgumentException
+ * if the data was poorly formatted
+ * (missing output format length or too few output formats)
+ * or if any of the input/formats were not valid
+ * @throws NullPointerException
+ * if entry was null
+ *
+ * @see StreamConfigurationMap#checkArgumentFormatInternal
+ *
+ * @hide
+ */
+ public ReprocessFormatsMap(final int[] entry) {
+ checkNotNull(entry, "entry must not be null");
+
+ int numInputs = 0;
+ int left = entry.length;
+ for (int i = 0; i < entry.length; ) {
+ int inputFormat = StreamConfigurationMap.checkArgumentFormatInternal(entry[i]);
+
+ left--;
+ i++;
+
+ if (left < 1) {
+ throw new IllegalArgumentException(
+ String.format("Input %x had no output format length listed", inputFormat));
+ }
+
+ final int length = entry[i];
+ left--;
+ i++;
+
+ for (int j = 0; j < length; ++j) {
+ int outputFormat = entry[i + j];
+ StreamConfigurationMap.checkArgumentFormatInternal(outputFormat);
+ }
+
+ if (length > 0) {
+ if (left < length) {
+ throw new IllegalArgumentException(
+ String.format(
+ "Input %x had too few output formats listed (actual: %d, " +
+ "expected: %d)", inputFormat, left, length));
+ }
+
+ i += length;
+ left -= length;
+ }
+
+ numInputs++;
+ }
+
+ mEntry = entry;
+ mInputCount = numInputs;
+ }
+
+ /**
+ * Get a list of all input image formats that can be used to reprocess an input
+ * stream into an output stream.
+ *
+ * <p>Use this input format to look up the available output formats with {@link #getOutputs}.
+ * </p>
+ *
+ * @return an array of inputs (possibly empty, but never {@code null})
+ *
+ * @see ImageFormat
+ * @see #getOutputs
+ */
+ public int[] getInputs() {
+ final int[] inputs = new int[mInputCount];
+
+ int left = mEntry.length;
+ for (int i = 0, j = 0; i < mEntry.length; j++) {
+ final int format = mEntry[i];
+
+ left--;
+ i++;
+
+ if (left < 1) {
+ throw new AssertionError(
+ String.format("Input %x had no output format length listed", format));
+ }
+
+ final int length = mEntry[i];
+ left--;
+ i++;
+
+ if (length > 0) {
+ if (left < length) {
+ throw new AssertionError(
+ String.format(
+ "Input %x had too few output formats listed (actual: %d, " +
+ "expected: %d)", format, left, length));
+ }
+
+ i += length;
+ left -= length;
+ }
+
+ inputs[j] = format;
+ }
+
+ return StreamConfigurationMap.imageFormatToPublic(inputs);
+ }
+
+ /**
+ * Get the list of output formats that can be reprocessed into from the input {@code format}.
+ *
+ * <p>The input {@code format} must be one of the formats returned by {@link #getInputs}.</p>
+ *
+ * @param format an input format
+ *
+ * @return list of output image formats
+ *
+ * @see ImageFormat
+ * @see #getInputs
+ */
+ public int[] getOutputs(final int format) {
+
+ int left = mEntry.length;
+ for (int i = 0; i < mEntry.length; ) {
+ final int inputFormat = mEntry[i];
+
+ left--;
+ i++;
+
+ if (left < 1) {
+ throw new AssertionError(
+ String.format("Input %x had no output format length listed", format));
+ }
+
+ final int length = mEntry[i];
+ left--;
+ i++;
+
+ if (length > 0) {
+ if (left < length) {
+ throw new AssertionError(
+ String.format(
+ "Input %x had too few output formats listed (actual: %d, " +
+ "expected: %d)", format, left, length));
+ }
+ }
+
+ if (inputFormat == format) {
+ int[] outputs = new int[length];
+
+ // Copying manually faster than System.arraycopy for small arrays
+ for (int k = 0; k < length; ++k) {
+ outputs[k] = mEntry[i + k];
+ }
+
+ return StreamConfigurationMap.imageFormatToPublic(outputs);
+ }
+
+ i += length;
+ left -= length;
+
+ }
+
+ throw new IllegalArgumentException(
+ String.format("Input format %x was not one in #getInputs", format));
+ }
+
+ /**
+ * Check if this {@link ReprocessFormatsMap} is equal to another
+ * {@link ReprocessFormatsMap}.
+ *
+ * <p>These two objects are only equal if and only if each of the respective elements is equal.
+ * </p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof ReprocessFormatsMap) {
+ final ReprocessFormatsMap other = (ReprocessFormatsMap) obj;
+ // Do not compare anything besides mEntry, since the rest of the values are derived
+ return Arrays.equals(mEntry, other.mEntry);
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ // Do not hash anything besides mEntry since the rest of the values are derived
+ return HashCodeHelpers.hashCode(mEntry);
+ }
+
+ private final int[] mEntry;
+ /*
+ * Dependent fields: values are derived from mEntry
+ */
+ private final int mInputCount;
+}
diff --git a/android/hardware/camera2/params/RggbChannelVector.java b/android/hardware/camera2/params/RggbChannelVector.java
new file mode 100644
index 00000000..e08ec55d
--- /dev/null
+++ b/android/hardware/camera2/params/RggbChannelVector.java
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Immutable class to store a 4-element vector of floats indexable by a bayer RAW 2x2 pixel block.
+ */
+public final class RggbChannelVector {
+ /**
+ * The number of color channels in this vector.
+ */
+ public static final int COUNT = 4;
+
+ /** Red color channel in a bayer Raw pattern. */
+ public static final int RED = 0;
+
+ /** Green color channel in a bayer Raw pattern used by the even rows. */
+ public static final int GREEN_EVEN = 1;
+
+ /** Green color channel in a bayer Raw pattern used by the odd rows. */
+ public static final int GREEN_ODD = 2;
+
+ /** Blue color channel in a bayer Raw pattern. */
+ public static final int BLUE = 3;
+
+ /**
+ * Create a new {@link RggbChannelVector} from an RGGB 2x2 pixel.
+ *
+ * <p>All pixel values are considered normalized within {@code [0.0f, 1.0f]}
+ * (i.e. {@code 1.0f} could be linearized to {@code 255} if converting to a
+ * non-floating point pixel representation).</p>
+ *
+ * <p>All arguments must be finite; NaN and infinity is not allowed.</p>
+ *
+ * @param red red pixel
+ * @param greenEven green pixel (even row)
+ * @param greenOdd green pixel (odd row)
+ * @param blue blue pixel
+ *
+ * @throws IllegalArgumentException if any of the arguments were not finite
+ */
+ public RggbChannelVector(final float red, final float greenEven, final float greenOdd,
+ final float blue) {
+ mRed = checkArgumentFinite(red, "red");
+ mGreenEven = checkArgumentFinite(greenEven, "greenEven");
+ mGreenOdd = checkArgumentFinite(greenOdd, "greenOdd");
+ mBlue = checkArgumentFinite(blue, "blue");
+ }
+
+ /**
+ * Get the red component.
+ *
+ * @return a floating point value (guaranteed to be finite)
+ */
+ public final float getRed() {
+ return mRed;
+ }
+
+ /**
+ * Get the green (even rows) component.
+ *
+ * @return a floating point value (guaranteed to be finite)
+ */
+ public float getGreenEven() {
+ return mGreenEven;
+ }
+
+ /**
+ * Get the green (odd rows) component.
+ *
+ * @return a floating point value (guaranteed to be finite)
+ */
+ public float getGreenOdd() {
+ return mGreenOdd;
+ }
+
+ /**
+ * Get the blue component.
+ *
+ * @return a floating point value (guaranteed to be finite)
+ */
+ public float getBlue() {
+ return mBlue;
+ }
+
+ /**
+ * Get the component by the color channel index.
+ *
+ * <p>{@code colorChannel} must be one of {@link #RED}, {@link #GREEN_EVEN}, {@link #GREEN_ODD},
+ * {@link #BLUE}.</p>
+ *
+ * @param colorChannel greater or equal to {@code 0} and less than {@link #COUNT}
+ * @return a floating point value (guaranteed to be finite)
+ *
+ * @throws IllegalArgumentException if {@code colorChannel} was out of range
+ */
+ public float getComponent(final int colorChannel) {
+ if (colorChannel < 0 || colorChannel >= COUNT) {
+ throw new IllegalArgumentException("Color channel out of range");
+ }
+
+ switch (colorChannel) {
+ case RED:
+ return mRed;
+ case GREEN_EVEN:
+ return mGreenEven;
+ case GREEN_ODD:
+ return mGreenOdd;
+ case BLUE:
+ return mBlue;
+ default:
+ throw new AssertionError("Unhandled case " + colorChannel);
+ }
+ }
+
+ /**
+ * Copy the vector into the destination in the order {@code [R, Geven, Godd, B]}.
+ *
+ * @param destination
+ * an array big enough to hold at least {@value #COUNT} elements after the
+ * {@code offset}
+ * @param offset
+ * a non-negative offset into the array
+ *
+ * @throws NullPointerException
+ * If {@code destination} was {@code null}
+ * @throws ArrayIndexOutOfBoundsException
+ * If there's not enough room to write the elements at the specified destination and
+ * offset.
+ */
+ public void copyTo(final float[] destination, final int offset) {
+ checkNotNull(destination, "destination must not be null");
+ if (destination.length - offset < COUNT) {
+ throw new ArrayIndexOutOfBoundsException("destination too small to fit elements");
+ }
+
+ destination[offset + RED] = mRed;
+ destination[offset + GREEN_EVEN] = mGreenEven;
+ destination[offset + GREEN_ODD] = mGreenOdd;
+ destination[offset + BLUE] = mBlue;
+ }
+
+ /**
+ * Check if this {@link RggbChannelVector} is equal to another {@link RggbChannelVector}.
+ *
+ * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ } else if (this == obj) {
+ return true;
+ } else if (obj instanceof RggbChannelVector) {
+ final RggbChannelVector other = (RggbChannelVector) obj;
+ return mRed == other.mRed &&
+ mGreenEven == other.mGreenEven &&
+ mGreenOdd == other.mGreenOdd &&
+ mBlue == other.mBlue;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return Float.floatToIntBits(mRed) ^
+ Float.floatToIntBits(mGreenEven) ^
+ Float.floatToIntBits(mGreenOdd) ^
+ Float.floatToIntBits(mBlue);
+ }
+
+ /**
+ * Return the RggbChannelVector as a string representation.
+ *
+ * <p> {@code "RggbChannelVector{R:%f, G_even:%f, G_odd:%f, B:%f}"}, where each
+ * {@code %f} respectively represents one of the the four color channels. </p>
+ *
+ * @return string representation of {@link RggbChannelVector}
+ */
+ @Override
+ public String toString() {
+ return String.format("RggbChannelVector%s", toShortString());
+ }
+
+ /**
+ * Return the RggbChannelVector as a string in compact form.
+ *
+ * <p> {@code "{R:%f, G_even:%f, G_odd:%f, B:%f}"}, where each {@code %f}
+ * respectively represents one of the the four color channels. </p>
+ *
+ * @return compact string representation of {@link RggbChannelVector}
+ */
+ private String toShortString() {
+ return String.format("{R:%f, G_even:%f, G_odd:%f, B:%f}",
+ mRed, mGreenEven, mGreenOdd, mBlue);
+ }
+
+ private final float mRed;
+ private final float mGreenEven;
+ private final float mGreenOdd;
+ private final float mBlue;
+}
diff --git a/android/hardware/camera2/params/StreamConfiguration.java b/android/hardware/camera2/params/StreamConfiguration.java
new file mode 100644
index 00000000..a6fc10fd
--- /dev/null
+++ b/android/hardware/camera2/params/StreamConfiguration.java
@@ -0,0 +1,171 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import static com.android.internal.util.Preconditions.*;
+import static android.hardware.camera2.params.StreamConfigurationMap.checkArgumentFormatInternal;
+
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.graphics.PixelFormat;
+import android.util.Size;
+
+/**
+ * Immutable class to store the available stream
+ * {@link CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS configurations} to set up
+ * {@link android.view.Surface Surfaces} for creating a {@link CameraCaptureSession capture session}
+ * with {@link CameraDevice#createCaptureSession}.
+ * <!-- TODO: link to input stream configuration -->
+ *
+ * <p>This is the authoritative list for all input/output formats (and sizes respectively
+ * for that format) that are supported by a camera device.</p>
+ *
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ *
+ * @hide
+ */
+public final class StreamConfiguration {
+
+ /**
+ * Create a new {@link StreamConfiguration}.
+ *
+ * @param format image format
+ * @param width image width, in pixels (positive)
+ * @param height image height, in pixels (positive)
+ * @param input true if this is an input configuration, false for output configurations
+ *
+ * @throws IllegalArgumentException
+ * if width/height were not positive
+ * or if the format was not user-defined in ImageFormat/PixelFormat
+ * (IMPL_DEFINED is ok)
+ *
+ * @hide
+ */
+ public StreamConfiguration(
+ final int format, final int width, final int height, final boolean input) {
+ mFormat = checkArgumentFormatInternal(format);
+ mWidth = checkArgumentPositive(width, "width must be positive");
+ mHeight = checkArgumentPositive(height, "height must be positive");
+ mInput = input;
+ }
+
+ /**
+ * Get the internal image {@code format} in this stream configuration.
+ *
+ * @return an integer format
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public final int getFormat() {
+ return mFormat;
+ }
+
+
+ /**
+ * Return the width of the stream configuration.
+ *
+ * @return width > 0
+ */
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /**
+ * Return the height of the stream configuration.
+ *
+ * @return height > 0
+ */
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * Convenience method to return the size of this stream configuration.
+ *
+ * @return a Size with positive width and height
+ */
+ public Size getSize() {
+ return new Size(mWidth, mHeight);
+ }
+
+ /**
+ * Determines if this configuration is usable for input streams.
+ *
+ * <p>Input and output stream configurations are not interchangeable;
+ * input stream configurations must be used when configuring inputs.</p>
+ *
+ * @return {@code true} if input configuration, {@code false} otherwise
+ */
+ public boolean isInput() {
+ return mInput;
+ }
+
+ /**
+ * Determines if this configuration is usable for output streams.
+ *
+ * <p>Input and output stream configurations are not interchangeable;
+ * out stream configurations must be used when configuring outputs.</p>
+ *
+ * @return {@code true} if output configuration, {@code false} otherwise
+ *
+ * @see CameraDevice#createCaptureSession
+ */
+ public boolean isOutput() {
+ return !mInput;
+ }
+
+ /**
+ * Check if this {@link StreamConfiguration} is equal to another {@link StreamConfiguration}.
+ *
+ * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof StreamConfiguration) {
+ final StreamConfiguration other = (StreamConfiguration) obj;
+ return mFormat == other.mFormat &&
+ mWidth == other.mWidth &&
+ mHeight == other.mHeight &&
+ mInput == other.mInput;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return HashCodeHelpers.hashCode(mFormat, mWidth, mHeight, mInput ? 1 : 0);
+ }
+
+ private final int mFormat;
+ private final int mWidth;
+ private final int mHeight;
+ private final boolean mInput;
+}
diff --git a/android/hardware/camera2/params/StreamConfigurationDuration.java b/android/hardware/camera2/params/StreamConfigurationDuration.java
new file mode 100644
index 00000000..217059d1
--- /dev/null
+++ b/android/hardware/camera2/params/StreamConfigurationDuration.java
@@ -0,0 +1,151 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import static com.android.internal.util.Preconditions.*;
+import static android.hardware.camera2.params.StreamConfigurationMap.checkArgumentFormatInternal;
+
+import android.graphics.ImageFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.graphics.PixelFormat;
+import android.util.Size;
+
+/**
+ * Immutable class to store a time duration for any given format/size combination.
+ *
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_CONFIGURATIONS
+ * @see CameraCharacteristics#SCALER_AVAILABLE_MIN_FRAME_DURATIONS
+ * @see CameraCharacteristics#SCALER_AVAILABLE_STALL_DURATIONS
+ *
+ * @hide
+ */
+public final class StreamConfigurationDuration {
+
+ /**
+ * Create a new {@link StreamConfigurationDuration}.
+ *
+ * @param format image format
+ * @param width image width, in pixels (positive)
+ * @param height image height, in pixels (positive)
+ * @param durationNs duration in nanoseconds (non-negative)
+ *
+ * @throws IllegalArgumentException
+ * if width/height were not positive, or durationNs was negative
+ * or if the format was not user-defined in ImageFormat/PixelFormat
+ * (IMPL_DEFINED is OK)
+ *
+ *
+ * @hide
+ */
+ public StreamConfigurationDuration(
+ final int format, final int width, final int height, final long durationNs) {
+ mFormat = checkArgumentFormatInternal(format);
+ mWidth = checkArgumentPositive(width, "width must be positive");
+ mHeight = checkArgumentPositive(height, "height must be positive");
+ mDurationNs = checkArgumentNonnegative(durationNs, "durationNs must be non-negative");
+ }
+
+ /**
+ * Get the internal image {@code format} in this stream configuration duration
+ *
+ * @return an integer format
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public final int getFormat() {
+ return mFormat;
+ }
+
+
+ /**
+ * Return the width of the stream configuration duration.
+ *
+ * @return width > 0
+ */
+ public int getWidth() {
+ return mWidth;
+ }
+
+ /**
+ * Return the height of the stream configuration duration
+ *
+ * @return height > 0
+ */
+ public int getHeight() {
+ return mHeight;
+ }
+
+ /**
+ * Convenience method to return the size of this stream configuration duration.
+ *
+ * @return a Size with positive width and height
+ */
+ public Size getSize() {
+ return new Size(mWidth, mHeight);
+ }
+
+ /**
+ * Get the time duration (in nanoseconds).
+ *
+ * @return long >= 0
+ */
+ public long getDuration() {
+ return mDurationNs;
+ }
+
+ /**
+ * Check if this {@link StreamConfigurationDuration} is equal to another
+ * {@link StreamConfigurationDuration}.
+ *
+ * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof StreamConfigurationDuration) {
+ final StreamConfigurationDuration other = (StreamConfigurationDuration) obj;
+ return mFormat == other.mFormat &&
+ mWidth == other.mWidth &&
+ mHeight == other.mHeight &&
+ mDurationNs == other.mDurationNs;
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return HashCodeHelpers.hashCode(mFormat, mWidth, mHeight,
+ (int) mDurationNs, (int)(mDurationNs >>> Integer.SIZE));
+ }
+
+ private final int mFormat;
+ private final int mWidth;
+ private final int mHeight;
+ private final long mDurationNs;
+}
diff --git a/android/hardware/camera2/params/StreamConfigurationMap.java b/android/hardware/camera2/params/StreamConfigurationMap.java
new file mode 100644
index 00000000..00e047db
--- /dev/null
+++ b/android/hardware/camera2/params/StreamConfigurationMap.java
@@ -0,0 +1,1688 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import android.graphics.ImageFormat;
+import android.graphics.PixelFormat;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.utils.HashCodeHelpers;
+import android.hardware.camera2.utils.SurfaceUtils;
+import android.hardware.camera2.legacy.LegacyCameraDevice;
+import android.hardware.camera2.legacy.LegacyMetadataMapper;
+import android.view.Surface;
+import android.util.Range;
+import android.util.Size;
+import android.util.SparseIntArray;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Objects;
+import java.util.Set;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Immutable class to store the available stream
+ * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP configurations} to set up
+ * {@link android.view.Surface Surfaces} for creating a
+ * {@link android.hardware.camera2.CameraCaptureSession capture session} with
+ * {@link android.hardware.camera2.CameraDevice#createCaptureSession}.
+ * <!-- TODO: link to input stream configuration -->
+ *
+ * <p>This is the authoritative list for all <!-- input/ -->output formats (and sizes respectively
+ * for that format) that are supported by a camera device.</p>
+ *
+ * <p>This also contains the minimum frame durations and stall durations for each format/size
+ * combination that can be used to calculate effective frame rate when submitting multiple captures.
+ * </p>
+ *
+ * <p>An instance of this object is available from {@link CameraCharacteristics} using
+ * the {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP} key and the
+ * {@link CameraCharacteristics#get} method.</p>
+ *
+ * <pre><code>{@code
+ * CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(cameraId);
+ * StreamConfigurationMap configs = characteristics.get(
+ * CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ * }</code></pre>
+ *
+ * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP
+ * @see CameraDevice#createCaptureSession
+ */
+public final class StreamConfigurationMap {
+
+ private static final String TAG = "StreamConfigurationMap";
+
+ /**
+ * Create a new {@link StreamConfigurationMap}.
+ *
+ * <p>The array parameters ownership is passed to this object after creation; do not
+ * write to them after this constructor is invoked.</p>
+ *
+ * @param configurations a non-{@code null} array of {@link StreamConfiguration}
+ * @param minFrameDurations a non-{@code null} array of {@link StreamConfigurationDuration}
+ * @param stallDurations a non-{@code null} array of {@link StreamConfigurationDuration}
+ * @param highSpeedVideoConfigurations an array of {@link HighSpeedVideoConfiguration}, null if
+ * camera device does not support high speed video recording
+ * @param listHighResolution a flag indicating whether the device supports BURST_CAPTURE
+ * and thus needs a separate list of slow high-resolution output sizes
+ * @throws NullPointerException if any of the arguments except highSpeedVideoConfigurations
+ * were {@code null} or any subelements were {@code null}
+ *
+ * @hide
+ */
+ public StreamConfigurationMap(
+ StreamConfiguration[] configurations,
+ StreamConfigurationDuration[] minFrameDurations,
+ StreamConfigurationDuration[] stallDurations,
+ StreamConfiguration[] depthConfigurations,
+ StreamConfigurationDuration[] depthMinFrameDurations,
+ StreamConfigurationDuration[] depthStallDurations,
+ HighSpeedVideoConfiguration[] highSpeedVideoConfigurations,
+ ReprocessFormatsMap inputOutputFormatsMap,
+ boolean listHighResolution) {
+
+ if (configurations == null) {
+ // If no color configurations exist, ensure depth ones do
+ checkArrayElementsNotNull(depthConfigurations, "depthConfigurations");
+ mConfigurations = new StreamConfiguration[0];
+ mMinFrameDurations = new StreamConfigurationDuration[0];
+ mStallDurations = new StreamConfigurationDuration[0];
+ } else {
+ mConfigurations = checkArrayElementsNotNull(configurations, "configurations");
+ mMinFrameDurations = checkArrayElementsNotNull(minFrameDurations, "minFrameDurations");
+ mStallDurations = checkArrayElementsNotNull(stallDurations, "stallDurations");
+ }
+
+ mListHighResolution = listHighResolution;
+
+ if (depthConfigurations == null) {
+ mDepthConfigurations = new StreamConfiguration[0];
+ mDepthMinFrameDurations = new StreamConfigurationDuration[0];
+ mDepthStallDurations = new StreamConfigurationDuration[0];
+ } else {
+ mDepthConfigurations = checkArrayElementsNotNull(depthConfigurations,
+ "depthConfigurations");
+ mDepthMinFrameDurations = checkArrayElementsNotNull(depthMinFrameDurations,
+ "depthMinFrameDurations");
+ mDepthStallDurations = checkArrayElementsNotNull(depthStallDurations,
+ "depthStallDurations");
+ }
+
+ if (highSpeedVideoConfigurations == null) {
+ mHighSpeedVideoConfigurations = new HighSpeedVideoConfiguration[0];
+ } else {
+ mHighSpeedVideoConfigurations = checkArrayElementsNotNull(
+ highSpeedVideoConfigurations, "highSpeedVideoConfigurations");
+ }
+
+ // For each format, track how many sizes there are available to configure
+ for (StreamConfiguration config : mConfigurations) {
+ int fmt = config.getFormat();
+ SparseIntArray map = null;
+ if (config.isOutput()) {
+ mAllOutputFormats.put(fmt, mAllOutputFormats.get(fmt) + 1);
+ long duration = 0;
+ if (mListHighResolution) {
+ for (StreamConfigurationDuration configurationDuration : mMinFrameDurations) {
+ if (configurationDuration.getFormat() == fmt &&
+ configurationDuration.getWidth() == config.getSize().getWidth() &&
+ configurationDuration.getHeight() == config.getSize().getHeight()) {
+ duration = configurationDuration.getDuration();
+ break;
+ }
+ }
+ }
+ map = duration <= DURATION_20FPS_NS ?
+ mOutputFormats : mHighResOutputFormats;
+ } else {
+ map = mInputFormats;
+ }
+ map.put(fmt, map.get(fmt) + 1);
+ }
+
+ // For each depth format, track how many sizes there are available to configure
+ for (StreamConfiguration config : mDepthConfigurations) {
+ if (!config.isOutput()) {
+ // Ignoring input depth configs
+ continue;
+ }
+
+ mDepthOutputFormats.put(config.getFormat(),
+ mDepthOutputFormats.get(config.getFormat()) + 1);
+ }
+
+ if (configurations != null &&
+ mOutputFormats.indexOfKey(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) < 0) {
+ throw new AssertionError(
+ "At least one stream configuration for IMPLEMENTATION_DEFINED must exist");
+ }
+
+ // For each Size/FPS range, track how many FPS range/Size there are available
+ for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
+ Size size = config.getSize();
+ Range<Integer> fpsRange = config.getFpsRange();
+ Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
+ if (fpsRangeCount == null) {
+ fpsRangeCount = 0;
+ }
+ mHighSpeedVideoSizeMap.put(size, fpsRangeCount + 1);
+ Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
+ if (sizeCount == null) {
+ sizeCount = 0;
+ }
+ mHighSpeedVideoFpsRangeMap.put(fpsRange, sizeCount + 1);
+ }
+
+ mInputOutputFormatsMap = inputOutputFormatsMap;
+ }
+
+ /**
+ * Get the image {@code format} output formats in this stream configuration.
+ *
+ * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
+ * or in {@link PixelFormat} (and there is no possibility of collision).</p>
+ *
+ * <p>Formats listed in this array are guaranteed to return true if queried with
+ * {@link #isOutputSupportedFor(int)}.</p>
+ *
+ * @return an array of integer format
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public final int[] getOutputFormats() {
+ return getPublicFormats(/*output*/true);
+ }
+
+ /**
+ * Get the image {@code format} output formats for a reprocessing input format.
+ *
+ * <p>When submitting a {@link CaptureRequest} with an input Surface of a given format,
+ * the only allowed target outputs of the {@link CaptureRequest} are the ones with a format
+ * listed in the return value of this method. Including any other output Surface as a target
+ * will throw an IllegalArgumentException. If no output format is supported given the input
+ * format, an empty int[] will be returned.</p>
+ *
+ * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
+ * or in {@link PixelFormat} (and there is no possibility of collision).</p>
+ *
+ * <p>Formats listed in this array are guaranteed to return true if queried with
+ * {@link #isOutputSupportedFor(int)}.</p>
+ *
+ * @return an array of integer format
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public final int[] getValidOutputFormatsForInput(int inputFormat) {
+ if (mInputOutputFormatsMap == null) {
+ return new int[0];
+ }
+ return mInputOutputFormatsMap.getOutputs(inputFormat);
+ }
+
+ /**
+ * Get the image {@code format} input formats in this stream configuration.
+ *
+ * <p>All image formats returned by this function will be defined in either {@link ImageFormat}
+ * or in {@link PixelFormat} (and there is no possibility of collision).</p>
+ *
+ * @return an array of integer format
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public final int[] getInputFormats() {
+ return getPublicFormats(/*output*/false);
+ }
+
+ /**
+ * Get the supported input sizes for this input format.
+ *
+ * <p>The format must have come from {@link #getInputFormats}; otherwise
+ * {@code null} is returned.</p>
+ *
+ * @param format a format from {@link #getInputFormats}
+ * @return a non-empty array of sizes, or {@code null} if the format was not available.
+ */
+ public Size[] getInputSizes(final int format) {
+ return getPublicFormatSizes(format, /*output*/false, /*highRes*/false);
+ }
+
+ /**
+ * Determine whether or not output surfaces with a particular user-defined format can be passed
+ * {@link CameraDevice#createCaptureSession createCaptureSession}.
+ *
+ * <p>This method determines that the output {@code format} is supported by the camera device;
+ * each output {@code surface} target may or may not itself support that {@code format}.
+ * Refer to the class which provides the surface for additional documentation.</p>
+ *
+ * <p>Formats for which this returns {@code true} are guaranteed to exist in the result
+ * returned by {@link #getOutputSizes}.</p>
+ *
+ * @param format an image format from either {@link ImageFormat} or {@link PixelFormat}
+ * @return
+ * {@code true} iff using a {@code surface} with this {@code format} will be
+ * supported with {@link CameraDevice#createCaptureSession}
+ *
+ * @throws IllegalArgumentException
+ * if the image format was not a defined named constant
+ * from either {@link ImageFormat} or {@link PixelFormat}
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ * @see CameraDevice#createCaptureSession
+ */
+ public boolean isOutputSupportedFor(int format) {
+ checkArgumentFormat(format);
+
+ int internalFormat = imageFormatToInternal(format);
+ int dataspace = imageFormatToDataspace(format);
+ if (dataspace == HAL_DATASPACE_DEPTH) {
+ return mDepthOutputFormats.indexOfKey(internalFormat) >= 0;
+ } else {
+ return getFormatsMap(/*output*/true).indexOfKey(internalFormat) >= 0;
+ }
+ }
+
+ /**
+ * Determine whether or not output streams can be configured with a particular class
+ * as a consumer.
+ *
+ * <p>The following list is generally usable for outputs:
+ * <ul>
+ * <li>{@link android.media.ImageReader} -
+ * Recommended for image processing or streaming to external resources (such as a file or
+ * network)
+ * <li>{@link android.media.MediaRecorder} -
+ * Recommended for recording video (simple to use)
+ * <li>{@link android.media.MediaCodec} -
+ * Recommended for recording video (more complicated to use, with more flexibility)
+ * <li>{@link android.renderscript.Allocation} -
+ * Recommended for image processing with {@link android.renderscript RenderScript}
+ * <li>{@link android.view.SurfaceHolder} -
+ * Recommended for low-power camera preview with {@link android.view.SurfaceView}
+ * <li>{@link android.graphics.SurfaceTexture} -
+ * Recommended for OpenGL-accelerated preview processing or compositing with
+ * {@link android.view.TextureView}
+ * </ul>
+ * </p>
+ *
+ * <p>Generally speaking this means that creating a {@link Surface} from that class <i>may</i>
+ * provide a producer endpoint that is suitable to be used with
+ * {@link CameraDevice#createCaptureSession}.</p>
+ *
+ * <p>Since not all of the above classes support output of all format and size combinations,
+ * the particular combination should be queried with {@link #isOutputSupportedFor(Surface)}.</p>
+ *
+ * @param klass a non-{@code null} {@link Class} object reference
+ * @return {@code true} if this class is supported as an output, {@code false} otherwise
+ *
+ * @throws NullPointerException if {@code klass} was {@code null}
+ *
+ * @see CameraDevice#createCaptureSession
+ * @see #isOutputSupportedFor(Surface)
+ */
+ public static <T> boolean isOutputSupportedFor(Class<T> klass) {
+ checkNotNull(klass, "klass must not be null");
+
+ if (klass == android.media.ImageReader.class) {
+ return true;
+ } else if (klass == android.media.MediaRecorder.class) {
+ return true;
+ } else if (klass == android.media.MediaCodec.class) {
+ return true;
+ } else if (klass == android.renderscript.Allocation.class) {
+ return true;
+ } else if (klass == android.view.SurfaceHolder.class) {
+ return true;
+ } else if (klass == android.graphics.SurfaceTexture.class) {
+ return true;
+ }
+
+ return false;
+ }
+
+ /**
+ * Determine whether or not the {@code surface} in its current state is suitable to be included
+ * in a {@link CameraDevice#createCaptureSession capture session} as an output.
+ *
+ * <p>Not all surfaces are usable with the {@link CameraDevice}, and not all configurations
+ * of that {@code surface} are compatible. Some classes that provide the {@code surface} are
+ * compatible with the {@link CameraDevice} in general
+ * (see {@link #isOutputSupportedFor(Class)}, but it is the caller's responsibility to put the
+ * {@code surface} into a state that will be compatible with the {@link CameraDevice}.</p>
+ *
+ * <p>Reasons for a {@code surface} being specifically incompatible might be:
+ * <ul>
+ * <li>Using a format that's not listed by {@link #getOutputFormats}
+ * <li>Using a format/size combination that's not listed by {@link #getOutputSizes}
+ * <li>The {@code surface} itself is not in a state where it can service a new producer.</p>
+ * </li>
+ * </ul>
+ *
+ * <p>Surfaces from flexible sources will return true even if the exact size of the Surface does
+ * not match a camera-supported size, as long as the format (or class) is supported and the
+ * camera device supports a size that is equal to or less than 1080p in that format. If such as
+ * Surface is used to create a capture session, it will have its size rounded to the nearest
+ * supported size, below or equal to 1080p. Flexible sources include SurfaceView, SurfaceTexture,
+ * and ImageReader.</p>
+ *
+ * <p>This is not an exhaustive list; see the particular class's documentation for further
+ * possible reasons of incompatibility.</p>
+ *
+ * @param surface a non-{@code null} {@link Surface} object reference
+ * @return {@code true} if this is supported, {@code false} otherwise
+ *
+ * @throws NullPointerException if {@code surface} was {@code null}
+ * @throws IllegalArgumentException if the Surface endpoint is no longer valid
+ *
+ * @see CameraDevice#createCaptureSession
+ * @see #isOutputSupportedFor(Class)
+ */
+ public boolean isOutputSupportedFor(Surface surface) {
+ checkNotNull(surface, "surface must not be null");
+
+ Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
+ int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
+ int surfaceDataspace = SurfaceUtils.getSurfaceDataspace(surface);
+
+ // See if consumer is flexible.
+ boolean isFlexible = SurfaceUtils.isFlexibleConsumer(surface);
+
+ StreamConfiguration[] configs =
+ surfaceDataspace != HAL_DATASPACE_DEPTH ? mConfigurations : mDepthConfigurations;
+ for (StreamConfiguration config : configs) {
+ if (config.getFormat() == surfaceFormat && config.isOutput()) {
+ // Matching format, either need exact size match, or a flexible consumer
+ // and a size no bigger than MAX_DIMEN_FOR_ROUNDING
+ if (config.getSize().equals(surfaceSize)) {
+ return true;
+ } else if (isFlexible &&
+ (config.getSize().getWidth() <= LegacyCameraDevice.MAX_DIMEN_FOR_ROUNDING)) {
+ return true;
+ }
+ }
+ }
+ return false;
+ }
+
+ /**
+ * Get a list of sizes compatible with {@code klass} to use as an output.
+ *
+ * <p>Some of the supported classes may support additional formats beyond
+ * {@link ImageFormat#PRIVATE}; this function only returns
+ * sizes for {@link ImageFormat#PRIVATE}. For example, {@link android.media.ImageReader}
+ * supports {@link ImageFormat#YUV_420_888} and {@link ImageFormat#PRIVATE}, this method will
+ * only return the sizes for {@link ImageFormat#PRIVATE} for {@link android.media.ImageReader}
+ * class.</p>
+ *
+ * <p>If a well-defined format such as {@code NV21} is required, use
+ * {@link #getOutputSizes(int)} instead.</p>
+ *
+ * <p>The {@code klass} should be a supported output, that querying
+ * {@code #isOutputSupportedFor(Class)} should return {@code true}.</p>
+ *
+ * @param klass
+ * a non-{@code null} {@link Class} object reference
+ * @return
+ * an array of supported sizes for {@link ImageFormat#PRIVATE} format,
+ * or {@code null} iff the {@code klass} is not a supported output.
+ *
+ *
+ * @throws NullPointerException if {@code klass} was {@code null}
+ *
+ * @see #isOutputSupportedFor(Class)
+ */
+ public <T> Size[] getOutputSizes(Class<T> klass) {
+ if (isOutputSupportedFor(klass) == false) {
+ return null;
+ }
+
+ return getInternalFormatSizes(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ HAL_DATASPACE_UNKNOWN,/*output*/true, /*highRes*/false);
+ }
+
+ /**
+ * Get a list of sizes compatible with the requested image {@code format}.
+ *
+ * <p>The {@code format} should be a supported format (one of the formats returned by
+ * {@link #getOutputFormats}).</p>
+ *
+ * As of API level 23, the {@link #getHighResolutionOutputSizes} method can be used on devices
+ * that support the
+ * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
+ * capability to get a list of high-resolution output sizes that cannot operate at the preferred
+ * 20fps rate. This means that for some supported formats, this method will return an empty
+ * list, if all the supported resolutions operate at below 20fps. For devices that do not
+ * support the BURST_CAPTURE capability, all output resolutions are listed through this method.
+ *
+ * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+ * @return
+ * an array of supported sizes,
+ * or {@code null} if the {@code format} is not a supported output
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ * @see #getOutputFormats
+ */
+ public Size[] getOutputSizes(int format) {
+ return getPublicFormatSizes(format, /*output*/true, /*highRes*/ false);
+ }
+
+ /**
+ * Get a list of supported high speed video recording sizes.
+ * <p>
+ * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
+ * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
+ * list the supported high speed video size configurations. All the sizes listed will be a
+ * subset of the sizes reported by {@link #getOutputSizes} for processed non-stalling formats
+ * (typically {@link ImageFormat#PRIVATE} {@link ImageFormat#YUV_420_888}, etc.)
+ * </p>
+ * <p>
+ * To enable high speed video recording, application must create a constrained create high speed
+ * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
+ * a CaptureRequest list created by
+ * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
+ * to this session. The application must select the video size from this method and
+ * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
+ * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
+ * generate the high speed request list. For example, if the application intends to do high
+ * speed recording, it can select the maximum size reported by this method to create high speed
+ * capture session. Note that for the use case of multiple output streams, application must
+ * select one unique size from this method to use (e.g., preview and recording streams must have
+ * the same size). Otherwise, the high speed session creation will fail. Once the size is
+ * selected, application can get the supported FPS ranges by
+ * {@link #getHighSpeedVideoFpsRangesFor}, and use these FPS ranges to setup the recording
+ * request lists via
+ * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+ * </p>
+ *
+ * @return an array of supported high speed video recording sizes
+ * @see #getHighSpeedVideoFpsRangesFor(Size)
+ * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
+ * @see CameraDevice#createConstrainedHighSpeedCaptureSession
+ * @see android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList
+ */
+ public Size[] getHighSpeedVideoSizes() {
+ Set<Size> keySet = mHighSpeedVideoSizeMap.keySet();
+ return keySet.toArray(new Size[keySet.size()]);
+ }
+
+ /**
+ * Get the frame per second ranges (fpsMin, fpsMax) for input high speed video size.
+ * <p>
+ * See {@link #getHighSpeedVideoFpsRanges} for how to enable high speed recording.
+ * </p>
+ * <p>
+ * The {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS ranges} reported in this method
+ * must not be used to setup capture requests that are submitted to unconstrained capture
+ * sessions, or it will result in {@link IllegalArgumentException IllegalArgumentExceptions}.
+ * </p>
+ * <p>
+ * See {@link #getHighSpeedVideoFpsRanges} for the characteristics of the returned FPS ranges.
+ * </p>
+ *
+ * @param size one of the sizes returned by {@link #getHighSpeedVideoSizes()}
+ * @return an array of supported high speed video recording FPS ranges The upper bound of
+ * returned ranges is guaranteed to be greater than or equal to 120.
+ * @throws IllegalArgumentException if input size does not exist in the return value of
+ * getHighSpeedVideoSizes
+ * @see #getHighSpeedVideoSizes()
+ * @see #getHighSpeedVideoFpsRanges()
+ */
+ public Range<Integer>[] getHighSpeedVideoFpsRangesFor(Size size) {
+ Integer fpsRangeCount = mHighSpeedVideoSizeMap.get(size);
+ if (fpsRangeCount == null || fpsRangeCount == 0) {
+ throw new IllegalArgumentException(String.format(
+ "Size %s does not support high speed video recording", size));
+ }
+
+ @SuppressWarnings("unchecked")
+ Range<Integer>[] fpsRanges = new Range[fpsRangeCount];
+ int i = 0;
+ for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
+ if (size.equals(config.getSize())) {
+ fpsRanges[i++] = config.getFpsRange();
+ }
+ }
+ return fpsRanges;
+ }
+
+ /**
+ * Get a list of supported high speed video recording FPS ranges.
+ * <p>
+ * When {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO} is
+ * supported in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}, this method will
+ * list the supported high speed video FPS range configurations. Application can then use
+ * {@link #getHighSpeedVideoSizesFor} to query available sizes for one of returned FPS range.
+ * </p>
+ * <p>
+ * To enable high speed video recording, application must create a constrained create high speed
+ * capture session via {@link CameraDevice#createConstrainedHighSpeedCaptureSession}, and submit
+ * a CaptureRequest list created by
+ * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}
+ * to this session. The application must select the video size from this method and
+ * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE FPS range} from
+ * {@link #getHighSpeedVideoFpsRangesFor} to configure the constrained high speed session and
+ * generate the high speed request list. For example, if the application intends to do high
+ * speed recording, it can select one FPS range reported by this method, query the video sizes
+ * corresponding to this FPS range by {@link #getHighSpeedVideoSizesFor} and use one of reported
+ * sizes to create a high speed capture session. Note that for the use case of multiple output
+ * streams, application must select one unique size from this method to use (e.g., preview and
+ * recording streams must have the same size). Otherwise, the high speed session creation will
+ * fail. Once the high speed capture session is created, the application can set the FPS range
+ * in the recording request lists via
+ * {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}.
+ * </p>
+ * <p>
+ * The FPS ranges reported by this method will have below characteristics:
+ * <li>The fpsMin and fpsMax will be a multiple 30fps.</li>
+ * <li>The fpsMin will be no less than 30fps, the fpsMax will be no less than 120fps.</li>
+ * <li>At least one range will be a fixed FPS range where fpsMin == fpsMax.</li>
+ * <li>For each fixed FPS range, there will be one corresponding variable FPS range [30,
+ * fps_max]. These kinds of FPS ranges are suitable for preview-only use cases where the
+ * application doesn't want the camera device always produce higher frame rate than the display
+ * refresh rate.</li>
+ * </p>
+ *
+ * @return an array of supported high speed video recording FPS ranges The upper bound of
+ * returned ranges is guaranteed to be larger or equal to 120.
+ * @see #getHighSpeedVideoSizesFor
+ * @see CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO
+ * @see CameraDevice#createConstrainedHighSpeedCaptureSession
+ * @see CameraDevice#createHighSpeedRequestList
+ */
+ @SuppressWarnings("unchecked")
+ public Range<Integer>[] getHighSpeedVideoFpsRanges() {
+ Set<Range<Integer>> keySet = mHighSpeedVideoFpsRangeMap.keySet();
+ return keySet.toArray(new Range[keySet.size()]);
+ }
+
+ /**
+ * Get the supported video sizes for an input high speed FPS range.
+ *
+ * <p> See {@link #getHighSpeedVideoSizes} for how to enable high speed recording.</p>
+ *
+ * @param fpsRange one of the FPS range returned by {@link #getHighSpeedVideoFpsRanges()}
+ * @return An array of video sizes to create high speed capture sessions for high speed streaming
+ * use cases.
+ *
+ * @throws IllegalArgumentException if input FPS range does not exist in the return value of
+ * getHighSpeedVideoFpsRanges
+ * @see #getHighSpeedVideoFpsRanges()
+ */
+ public Size[] getHighSpeedVideoSizesFor(Range<Integer> fpsRange) {
+ Integer sizeCount = mHighSpeedVideoFpsRangeMap.get(fpsRange);
+ if (sizeCount == null || sizeCount == 0) {
+ throw new IllegalArgumentException(String.format(
+ "FpsRange %s does not support high speed video recording", fpsRange));
+ }
+
+ Size[] sizes = new Size[sizeCount];
+ int i = 0;
+ for (HighSpeedVideoConfiguration config : mHighSpeedVideoConfigurations) {
+ if (fpsRange.equals(config.getFpsRange())) {
+ sizes[i++] = config.getSize();
+ }
+ }
+ return sizes;
+ }
+
+ /**
+ * Get a list of supported high resolution sizes, which cannot operate at full BURST_CAPTURE
+ * rate.
+ *
+ * <p>This includes all output sizes that cannot meet the 20 fps frame rate requirements for the
+ * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE BURST_CAPTURE}
+ * capability. This does not include the stall duration, so for example, a JPEG or RAW16 output
+ * resolution with a large stall duration but a minimum frame duration that's above 20 fps will
+ * still be listed in the regular {@link #getOutputSizes} list. All the sizes on this list are
+ * still guaranteed to operate at a rate of at least 10 fps, not including stall duration.</p>
+ *
+ * <p>For a device that does not support the BURST_CAPTURE capability, this list will be
+ * {@code null}, since resolutions in the {@link #getOutputSizes} list are already not
+ * guaranteed to meet &gt;= 20 fps rate requirements. For a device that does support the
+ * BURST_CAPTURE capability, this list may be empty, if all supported resolutions meet the 20
+ * fps requirement.</p>
+ *
+ * @return an array of supported slower high-resolution sizes, or {@code null} if the
+ * BURST_CAPTURE capability is not supported
+ */
+ public Size[] getHighResolutionOutputSizes(int format) {
+ if (!mListHighResolution) return null;
+
+ return getPublicFormatSizes(format, /*output*/true, /*highRes*/ true);
+ }
+
+ /**
+ * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
+ * for the format/size combination (in nanoseconds).
+ *
+ * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
+ * <p>{@code size} should be one of the ones returned by
+ * {@link #getOutputSizes(int)}.</p>
+ *
+ * <p>This should correspond to the frame duration when only that stream is active, with all
+ * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
+ * </p>
+ *
+ * <p>When multiple streams are used in a request, the minimum frame duration will be
+ * {@code max(individual stream min durations)}.</p>
+ *
+ * <p>For devices that do not support manual sensor control
+ * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
+ * this function may return 0.</p>
+ *
+ * <!--
+ * TODO: uncomment after adding input stream support
+ * <p>The minimum frame duration of a stream (of a particular format, size) is the same
+ * regardless of whether the stream is input or output.</p>
+ * -->
+ *
+ * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+ * @param size an output-compatible size
+ * @return a minimum frame duration {@code >} 0 in nanoseconds, or
+ * 0 if the minimum frame duration is not available.
+ *
+ * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
+ * @throws NullPointerException if {@code size} was {@code null}
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see #getOutputStallDuration(int, Size)
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public long getOutputMinFrameDuration(int format, Size size) {
+ checkNotNull(size, "size must not be null");
+ checkArgumentFormatSupported(format, /*output*/true);
+
+ return getInternalFormatDuration(imageFormatToInternal(format),
+ imageFormatToDataspace(format),
+ size,
+ DURATION_MIN_FRAME);
+ }
+
+ /**
+ * Get the minimum {@link CaptureRequest#SENSOR_FRAME_DURATION frame duration}
+ * for the class/size combination (in nanoseconds).
+ *
+ * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
+ * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
+ *
+ * <p>{@code klass} should be one of the ones which is supported by
+ * {@link #isOutputSupportedFor(Class)}.</p>
+ *
+ * <p>{@code size} should be one of the ones returned by
+ * {@link #getOutputSizes(int)}.</p>
+ *
+ * <p>This should correspond to the frame duration when only that stream is active, with all
+ * processing (typically in {@code android.*.mode}) set to either {@code OFF} or {@code FAST}.
+ * </p>
+ *
+ * <p>When multiple streams are used in a request, the minimum frame duration will be
+ * {@code max(individual stream min durations)}.</p>
+ *
+ * <p>For devices that do not support manual sensor control
+ * ({@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR}),
+ * this function may return 0.</p>
+ *
+ * <!--
+ * TODO: uncomment after adding input stream support
+ * <p>The minimum frame duration of a stream (of a particular format, size) is the same
+ * regardless of whether the stream is input or output.</p>
+ * -->
+ *
+ * @param klass
+ * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
+ * non-empty array returned by {@link #getOutputSizes(Class)}
+ * @param size an output-compatible size
+ * @return a minimum frame duration {@code >} 0 in nanoseconds, or
+ * 0 if the minimum frame duration is not available.
+ *
+ * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
+ * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public <T> long getOutputMinFrameDuration(final Class<T> klass, final Size size) {
+ if (!isOutputSupportedFor(klass)) {
+ throw new IllegalArgumentException("klass was not supported");
+ }
+
+ return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ HAL_DATASPACE_UNKNOWN,
+ size, DURATION_MIN_FRAME);
+ }
+
+ /**
+ * Get the stall duration for the format/size combination (in nanoseconds).
+ *
+ * <p>{@code format} should be one of the ones returned by {@link #getOutputFormats()}.</p>
+ * <p>{@code size} should be one of the ones returned by
+ * {@link #getOutputSizes(int)}.</p>
+ *
+ * <p>
+ * A stall duration is how much extra time would get added to the normal minimum frame duration
+ * for a repeating request that has streams with non-zero stall.
+ *
+ * <p>For example, consider JPEG captures which have the following characteristics:
+ *
+ * <ul>
+ * <li>JPEG streams act like processed YUV streams in requests for which they are not included;
+ * in requests in which they are directly referenced, they act as JPEG streams.
+ * This is because supporting a JPEG stream requires the underlying YUV data to always be ready
+ * for use by a JPEG encoder, but the encoder will only be used (and impact frame duration) on
+ * requests that actually reference a JPEG stream.
+ * <li>The JPEG processor can run concurrently to the rest of the camera pipeline, but cannot
+ * process more than 1 capture at a time.
+ * </ul>
+ *
+ * <p>In other words, using a repeating YUV request would result in a steady frame rate
+ * (let's say it's 30 FPS). If a single JPEG request is submitted periodically,
+ * the frame rate will stay at 30 FPS (as long as we wait for the previous JPEG to return each
+ * time). If we try to submit a repeating YUV + JPEG request, then the frame rate will drop from
+ * 30 FPS.</p>
+ *
+ * <p>In general, submitting a new request with a non-0 stall time stream will <em>not</em> cause a
+ * frame rate drop unless there are still outstanding buffers for that stream from previous
+ * requests.</p>
+ *
+ * <p>Submitting a repeating request with streams (call this {@code S}) is the same as setting
+ * the minimum frame duration from the normal minimum frame duration corresponding to {@code S},
+ * added with the maximum stall duration for {@code S}.</p>
+ *
+ * <p>If interleaving requests with and without a stall duration, a request will stall by the
+ * maximum of the remaining times for each can-stall stream with outstanding buffers.</p>
+ *
+ * <p>This means that a stalling request will not have an exposure start until the stall has
+ * completed.</p>
+ *
+ * <p>This should correspond to the stall duration when only that stream is active, with all
+ * processing (typically in {@code android.*.mode}) set to {@code FAST} or {@code OFF}.
+ * Setting any of the processing modes to {@code HIGH_QUALITY} effectively results in an
+ * indeterminate stall duration for all streams in a request (the regular stall calculation
+ * rules are ignored).</p>
+ *
+ * <p>The following formats may always have a stall duration:
+ * <ul>
+ * <li>{@link ImageFormat#JPEG JPEG}
+ * <li>{@link ImageFormat#RAW_SENSOR RAW16}
+ * <li>{@link ImageFormat#RAW_PRIVATE RAW_PRIVATE}
+ * </ul>
+ * </p>
+ *
+ * <p>The following formats will never have a stall duration:
+ * <ul>
+ * <li>{@link ImageFormat#YUV_420_888 YUV_420_888}
+ * <li>{@link #isOutputSupportedFor(Class) Implementation-Defined}
+ * </ul></p>
+ *
+ * <p>
+ * All other formats may or may not have an allowed stall duration on a per-capability basis;
+ * refer to {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES
+ * android.request.availableCapabilities} for more details.</p>
+ * </p>
+ *
+ * <p>See {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
+ * for more information about calculating the max frame rate (absent stalls).</p>
+ *
+ * @param format an image format from {@link ImageFormat} or {@link PixelFormat}
+ * @param size an output-compatible size
+ * @return a stall duration {@code >=} 0 in nanoseconds
+ *
+ * @throws IllegalArgumentException if {@code format} or {@code size} was not supported
+ * @throws NullPointerException if {@code size} was {@code null}
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public long getOutputStallDuration(int format, Size size) {
+ checkArgumentFormatSupported(format, /*output*/true);
+
+ return getInternalFormatDuration(imageFormatToInternal(format),
+ imageFormatToDataspace(format),
+ size,
+ DURATION_STALL);
+ }
+
+ /**
+ * Get the stall duration for the class/size combination (in nanoseconds).
+ *
+ * <p>This assumes a the {@code klass} is set up to use {@link ImageFormat#PRIVATE}.
+ * For user-defined formats, use {@link #getOutputMinFrameDuration(int, Size)}.</p>
+ *
+ * <p>{@code klass} should be one of the ones with a non-empty array returned by
+ * {@link #getOutputSizes(Class)}.</p>
+ *
+ * <p>{@code size} should be one of the ones returned by
+ * {@link #getOutputSizes(Class)}.</p>
+ *
+ * <p>See {@link #getOutputStallDuration(int, Size)} for a definition of a
+ * <em>stall duration</em>.</p>
+ *
+ * @param klass
+ * a class which is supported by {@link #isOutputSupportedFor(Class)} and has a
+ * non-empty array returned by {@link #getOutputSizes(Class)}
+ * @param size an output-compatible size
+ * @return a minimum frame duration {@code >=} 0 in nanoseconds
+ *
+ * @throws IllegalArgumentException if {@code klass} or {@code size} was not supported
+ * @throws NullPointerException if {@code size} or {@code klass} was {@code null}
+ *
+ * @see CaptureRequest#SENSOR_FRAME_DURATION
+ * @see ImageFormat
+ * @see PixelFormat
+ */
+ public <T> long getOutputStallDuration(final Class<T> klass, final Size size) {
+ if (!isOutputSupportedFor(klass)) {
+ throw new IllegalArgumentException("klass was not supported");
+ }
+
+ return getInternalFormatDuration(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
+ HAL_DATASPACE_UNKNOWN, size, DURATION_STALL);
+ }
+
+ /**
+ * Check if this {@link StreamConfigurationMap} is equal to another
+ * {@link StreamConfigurationMap}.
+ *
+ * <p>Two vectors are only equal if and only if each of the respective elements is equal.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(final Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof StreamConfigurationMap) {
+ final StreamConfigurationMap other = (StreamConfigurationMap) obj;
+ // XX: do we care about order?
+ return Arrays.equals(mConfigurations, other.mConfigurations) &&
+ Arrays.equals(mMinFrameDurations, other.mMinFrameDurations) &&
+ Arrays.equals(mStallDurations, other.mStallDurations) &&
+ Arrays.equals(mDepthConfigurations, other.mDepthConfigurations) &&
+ Arrays.equals(mHighSpeedVideoConfigurations,
+ other.mHighSpeedVideoConfigurations);
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ // XX: do we care about order?
+ return HashCodeHelpers.hashCodeGeneric(
+ mConfigurations, mMinFrameDurations,
+ mStallDurations,
+ mDepthConfigurations, mHighSpeedVideoConfigurations);
+ }
+
+ // Check that the argument is supported by #getOutputFormats or #getInputFormats
+ private int checkArgumentFormatSupported(int format, boolean output) {
+ checkArgumentFormat(format);
+
+ int internalFormat = imageFormatToInternal(format);
+ int internalDataspace = imageFormatToDataspace(format);
+
+ if (output) {
+ if (internalDataspace == HAL_DATASPACE_DEPTH) {
+ if (mDepthOutputFormats.indexOfKey(internalFormat) >= 0) {
+ return format;
+ }
+ } else {
+ if (mAllOutputFormats.indexOfKey(internalFormat) >= 0) {
+ return format;
+ }
+ }
+ } else {
+ if (mInputFormats.indexOfKey(internalFormat) >= 0) {
+ return format;
+ }
+ }
+
+ throw new IllegalArgumentException(String.format(
+ "format %x is not supported by this stream configuration map", format));
+ }
+
+ /**
+ * Ensures that the format is either user-defined or implementation defined.
+ *
+ * <p>If a format has a different internal representation than the public representation,
+ * passing in the public representation here will fail.</p>
+ *
+ * <p>For example if trying to use {@link ImageFormat#JPEG}:
+ * it has a different public representation than the internal representation
+ * {@code HAL_PIXEL_FORMAT_BLOB}, this check will fail.</p>
+ *
+ * <p>Any invalid/undefined formats will raise an exception.</p>
+ *
+ * @param format image format
+ * @return the format
+ *
+ * @throws IllegalArgumentException if the format was invalid
+ */
+ static int checkArgumentFormatInternal(int format) {
+ switch (format) {
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ case HAL_PIXEL_FORMAT_BLOB:
+ case HAL_PIXEL_FORMAT_RAW_OPAQUE:
+ case HAL_PIXEL_FORMAT_Y16:
+ return format;
+ case ImageFormat.JPEG:
+ throw new IllegalArgumentException(
+ "ImageFormat.JPEG is an unknown internal format");
+ default:
+ return checkArgumentFormat(format);
+ }
+ }
+
+ /**
+ * Ensures that the format is publicly user-defined in either ImageFormat or PixelFormat.
+ *
+ * <p>If a format has a different public representation than the internal representation,
+ * passing in the internal representation here will fail.</p>
+ *
+ * <p>For example if trying to use {@code HAL_PIXEL_FORMAT_BLOB}:
+ * it has a different internal representation than the public representation
+ * {@link ImageFormat#JPEG}, this check will fail.</p>
+ *
+ * <p>Any invalid/undefined formats will raise an exception, including implementation-defined.
+ * </p>
+ *
+ * <p>Note that {@code @hide} and deprecated formats will not pass this check.</p>
+ *
+ * @param format image format
+ * @return the format
+ *
+ * @throws IllegalArgumentException if the format was not user-defined
+ */
+ static int checkArgumentFormat(int format) {
+ if (!ImageFormat.isPublicFormat(format) && !PixelFormat.isPublicFormat(format)) {
+ throw new IllegalArgumentException(String.format(
+ "format 0x%x was not defined in either ImageFormat or PixelFormat", format));
+ }
+
+ return format;
+ }
+
+ /**
+ * Convert an internal format compatible with {@code graphics.h} into public-visible
+ * {@code ImageFormat}. This assumes the dataspace of the format is not HAL_DATASPACE_DEPTH.
+ *
+ * <p>In particular these formats are converted:
+ * <ul>
+ * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.JPEG</li>
+ * </ul>
+ * </p>
+ *
+ * <p>Passing in a format which has no public equivalent will fail;
+ * as will passing in a public format which has a different internal format equivalent.
+ * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
+ *
+ * <p>All other formats are returned as-is, no further invalid check is performed.</p>
+ *
+ * <p>This function is the dual of {@link #imageFormatToInternal} for dataspaces other than
+ * HAL_DATASPACE_DEPTH.</p>
+ *
+ * @param format image format from {@link ImageFormat} or {@link PixelFormat}
+ * @return the converted image formats
+ *
+ * @throws IllegalArgumentException
+ * if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
+ * {@link ImageFormat#JPEG}
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ * @see #checkArgumentFormat
+ */
+ static int imageFormatToPublic(int format) {
+ switch (format) {
+ case HAL_PIXEL_FORMAT_BLOB:
+ return ImageFormat.JPEG;
+ case ImageFormat.JPEG:
+ throw new IllegalArgumentException(
+ "ImageFormat.JPEG is an unknown internal format");
+ default:
+ return format;
+ }
+ }
+
+ /**
+ * Convert an internal format compatible with {@code graphics.h} into public-visible
+ * {@code ImageFormat}. This assumes the dataspace of the format is HAL_DATASPACE_DEPTH.
+ *
+ * <p>In particular these formats are converted:
+ * <ul>
+ * <li>HAL_PIXEL_FORMAT_BLOB => ImageFormat.DEPTH_POINT_CLOUD
+ * <li>HAL_PIXEL_FORMAT_Y16 => ImageFormat.DEPTH16
+ * </ul>
+ * </p>
+ *
+ * <p>Passing in an implementation-defined format which has no public equivalent will fail;
+ * as will passing in a public format which has a different internal format equivalent.
+ * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
+ *
+ * <p>All other formats are returned as-is, no further invalid check is performed.</p>
+ *
+ * <p>This function is the dual of {@link #imageFormatToInternal} for formats associated with
+ * HAL_DATASPACE_DEPTH.</p>
+ *
+ * @param format image format from {@link ImageFormat} or {@link PixelFormat}
+ * @return the converted image formats
+ *
+ * @throws IllegalArgumentException
+ * if {@code format} is {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED} or
+ * {@link ImageFormat#JPEG}
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ * @see #checkArgumentFormat
+ */
+ static int depthFormatToPublic(int format) {
+ switch (format) {
+ case HAL_PIXEL_FORMAT_BLOB:
+ return ImageFormat.DEPTH_POINT_CLOUD;
+ case HAL_PIXEL_FORMAT_Y16:
+ return ImageFormat.DEPTH16;
+ case HAL_PIXEL_FORMAT_RAW16:
+ return ImageFormat.RAW_DEPTH;
+ case ImageFormat.JPEG:
+ throw new IllegalArgumentException(
+ "ImageFormat.JPEG is an unknown internal format");
+ case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
+ throw new IllegalArgumentException(
+ "IMPLEMENTATION_DEFINED must not leak to public API");
+ default:
+ throw new IllegalArgumentException(
+ "Unknown DATASPACE_DEPTH format " + format);
+ }
+ }
+
+ /**
+ * Convert image formats from internal to public formats (in-place).
+ *
+ * @param formats an array of image formats
+ * @return {@code formats}
+ *
+ * @see #imageFormatToPublic
+ */
+ static int[] imageFormatToPublic(int[] formats) {
+ if (formats == null) {
+ return null;
+ }
+
+ for (int i = 0; i < formats.length; ++i) {
+ formats[i] = imageFormatToPublic(formats[i]);
+ }
+
+ return formats;
+ }
+
+ /**
+ * Convert a public format compatible with {@code ImageFormat} to an internal format
+ * from {@code graphics.h}.
+ *
+ * <p>In particular these formats are converted:
+ * <ul>
+ * <li>ImageFormat.JPEG => HAL_PIXEL_FORMAT_BLOB
+ * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_PIXEL_FORMAT_BLOB
+ * <li>ImageFormat.DEPTH16 => HAL_PIXEL_FORMAT_Y16
+ * </ul>
+ * </p>
+ *
+ * <p>Passing in an internal format which has a different public format equivalent will fail.
+ * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
+ *
+ * <p>All other formats are returned as-is, no invalid check is performed.</p>
+ *
+ * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
+ *
+ * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
+ * @return the converted image formats
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ *
+ * @throws IllegalArgumentException
+ * if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
+ */
+ static int imageFormatToInternal(int format) {
+ switch (format) {
+ case ImageFormat.JPEG:
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ return HAL_PIXEL_FORMAT_BLOB;
+ case ImageFormat.DEPTH16:
+ return HAL_PIXEL_FORMAT_Y16;
+ case ImageFormat.RAW_DEPTH:
+ return HAL_PIXEL_FORMAT_RAW16;
+ default:
+ return format;
+ }
+ }
+
+ /**
+ * Convert a public format compatible with {@code ImageFormat} to an internal dataspace
+ * from {@code graphics.h}.
+ *
+ * <p>In particular these formats are converted:
+ * <ul>
+ * <li>ImageFormat.JPEG => HAL_DATASPACE_V0_JFIF
+ * <li>ImageFormat.DEPTH_POINT_CLOUD => HAL_DATASPACE_DEPTH
+ * <li>ImageFormat.DEPTH16 => HAL_DATASPACE_DEPTH
+ * <li>others => HAL_DATASPACE_UNKNOWN
+ * </ul>
+ * </p>
+ *
+ * <p>Passing in an implementation-defined format here will fail (it's not a public format);
+ * as will passing in an internal format which has a different public format equivalent.
+ * See {@link #checkArgumentFormat} for more details about a legal public format.</p>
+ *
+ * <p>All other formats are returned as-is, no invalid check is performed.</p>
+ *
+ * <p>This function is the dual of {@link #imageFormatToPublic}.</p>
+ *
+ * @param format public image format from {@link ImageFormat} or {@link PixelFormat}
+ * @return the converted image formats
+ *
+ * @see ImageFormat
+ * @see PixelFormat
+ *
+ * @throws IllegalArgumentException
+ * if {@code format} was {@code HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED}
+ */
+ static int imageFormatToDataspace(int format) {
+ switch (format) {
+ case ImageFormat.JPEG:
+ return HAL_DATASPACE_V0_JFIF;
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ case ImageFormat.DEPTH16:
+ case ImageFormat.RAW_DEPTH:
+ return HAL_DATASPACE_DEPTH;
+ default:
+ return HAL_DATASPACE_UNKNOWN;
+ }
+ }
+
+ /**
+ * Convert image formats from public to internal formats (in-place).
+ *
+ * @param formats an array of image formats
+ * @return {@code formats}
+ *
+ * @see #imageFormatToInternal
+ *
+ * @hide
+ */
+ public static int[] imageFormatToInternal(int[] formats) {
+ if (formats == null) {
+ return null;
+ }
+
+ for (int i = 0; i < formats.length; ++i) {
+ formats[i] = imageFormatToInternal(formats[i]);
+ }
+
+ return formats;
+ }
+
+ private Size[] getPublicFormatSizes(int format, boolean output, boolean highRes) {
+ try {
+ checkArgumentFormatSupported(format, output);
+ } catch (IllegalArgumentException e) {
+ return null;
+ }
+
+ int internalFormat = imageFormatToInternal(format);
+ int dataspace = imageFormatToDataspace(format);
+
+ return getInternalFormatSizes(internalFormat, dataspace, output, highRes);
+ }
+
+ private Size[] getInternalFormatSizes(int format, int dataspace,
+ boolean output, boolean highRes) {
+ // All depth formats are non-high-res.
+ if (dataspace == HAL_DATASPACE_DEPTH && highRes) {
+ return new Size[0];
+ }
+
+ SparseIntArray formatsMap =
+ !output ? mInputFormats :
+ dataspace == HAL_DATASPACE_DEPTH ? mDepthOutputFormats :
+ highRes ? mHighResOutputFormats :
+ mOutputFormats;
+
+ int sizesCount = formatsMap.get(format);
+ if ( ((!output || dataspace == HAL_DATASPACE_DEPTH) && sizesCount == 0) ||
+ (output && dataspace != HAL_DATASPACE_DEPTH && mAllOutputFormats.get(format) == 0)) {
+ // Only throw if this is really not supported at all
+ throw new IllegalArgumentException("format not available");
+ }
+
+ Size[] sizes = new Size[sizesCount];
+ int sizeIndex = 0;
+
+ StreamConfiguration[] configurations =
+ (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
+ StreamConfigurationDuration[] minFrameDurations =
+ (dataspace == HAL_DATASPACE_DEPTH) ? mDepthMinFrameDurations : mMinFrameDurations;
+
+ for (StreamConfiguration config : configurations) {
+ int fmt = config.getFormat();
+ if (fmt == format && config.isOutput() == output) {
+ if (output && mListHighResolution) {
+ // Filter slow high-res output formats; include for
+ // highRes, remove for !highRes
+ long duration = 0;
+ for (int i = 0; i < minFrameDurations.length; i++) {
+ StreamConfigurationDuration d = minFrameDurations[i];
+ if (d.getFormat() == fmt &&
+ d.getWidth() == config.getSize().getWidth() &&
+ d.getHeight() == config.getSize().getHeight()) {
+ duration = d.getDuration();
+ break;
+ }
+ }
+ if (dataspace != HAL_DATASPACE_DEPTH &&
+ highRes != (duration > DURATION_20FPS_NS)) {
+ continue;
+ }
+ }
+ sizes[sizeIndex++] = config.getSize();
+ }
+ }
+
+ if (sizeIndex != sizesCount) {
+ throw new AssertionError(
+ "Too few sizes (expected " + sizesCount + ", actual " + sizeIndex + ")");
+ }
+
+ return sizes;
+ }
+
+ /** Get the list of publically visible output formats; does not include IMPL_DEFINED */
+ private int[] getPublicFormats(boolean output) {
+ int[] formats = new int[getPublicFormatCount(output)];
+
+ int i = 0;
+
+ SparseIntArray map = getFormatsMap(output);
+ for (int j = 0; j < map.size(); j++) {
+ int format = map.keyAt(j);
+ formats[i++] = imageFormatToPublic(format);
+ }
+ if (output) {
+ for (int j = 0; j < mDepthOutputFormats.size(); j++) {
+ formats[i++] = depthFormatToPublic(mDepthOutputFormats.keyAt(j));
+ }
+ }
+ if (formats.length != i) {
+ throw new AssertionError("Too few formats " + i + ", expected " + formats.length);
+ }
+
+ return formats;
+ }
+
+ /** Get the format -> size count map for either output or input formats */
+ private SparseIntArray getFormatsMap(boolean output) {
+ return output ? mAllOutputFormats : mInputFormats;
+ }
+
+ private long getInternalFormatDuration(int format, int dataspace, Size size, int duration) {
+ // assume format is already checked, since its internal
+
+ if (!isSupportedInternalConfiguration(format, dataspace, size)) {
+ throw new IllegalArgumentException("size was not supported");
+ }
+
+ StreamConfigurationDuration[] durations = getDurations(duration, dataspace);
+
+ for (StreamConfigurationDuration configurationDuration : durations) {
+ if (configurationDuration.getFormat() == format &&
+ configurationDuration.getWidth() == size.getWidth() &&
+ configurationDuration.getHeight() == size.getHeight()) {
+ return configurationDuration.getDuration();
+ }
+ }
+ // Default duration is '0' (unsupported/no extra stall)
+ return 0;
+ }
+
+ /**
+ * Get the durations array for the kind of duration
+ *
+ * @see #DURATION_MIN_FRAME
+ * @see #DURATION_STALL
+ * */
+ private StreamConfigurationDuration[] getDurations(int duration, int dataspace) {
+ switch (duration) {
+ case DURATION_MIN_FRAME:
+ return (dataspace == HAL_DATASPACE_DEPTH) ?
+ mDepthMinFrameDurations : mMinFrameDurations;
+ case DURATION_STALL:
+ return (dataspace == HAL_DATASPACE_DEPTH) ?
+ mDepthStallDurations : mStallDurations;
+ default:
+ throw new IllegalArgumentException("duration was invalid");
+ }
+ }
+
+ /** Count the number of publicly-visible output formats */
+ private int getPublicFormatCount(boolean output) {
+ SparseIntArray formatsMap = getFormatsMap(output);
+ int size = formatsMap.size();
+ if (output) {
+ size += mDepthOutputFormats.size();
+ }
+
+ return size;
+ }
+
+ private static <T> boolean arrayContains(T[] array, T element) {
+ if (array == null) {
+ return false;
+ }
+
+ for (T el : array) {
+ if (Objects.equals(el, element)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ private boolean isSupportedInternalConfiguration(int format, int dataspace,
+ Size size) {
+ StreamConfiguration[] configurations =
+ (dataspace == HAL_DATASPACE_DEPTH) ? mDepthConfigurations : mConfigurations;
+
+ for (int i = 0; i < configurations.length; i++) {
+ if (configurations[i].getFormat() == format &&
+ configurations[i].getSize().equals(size)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ /**
+ * Return this {@link StreamConfigurationMap} as a string representation.
+ *
+ * <p>{@code "StreamConfigurationMap(Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d,
+ * stall:%d], ... [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]), Inputs([w:%d, h:%d,
+ * format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)]), ValidOutputFormatsForInput(
+ * [in:%d, out:%d, ... %d], ... [in:%d, out:%d, ... %d]), HighSpeedVideoConfigurations(
+ * [w:%d, h:%d, min_fps:%d, max_fps:%d], ... [w:%d, h:%d, min_fps:%d, max_fps:%d]))"}.</p>
+ *
+ * <p>{@code Outputs([w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d], ...
+ * [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d])}, where
+ * {@code [w:%d, h:%d, format:%s(%d), min_duration:%d, stall:%d]} represents an output
+ * configuration's width, height, format, minimal frame duration in nanoseconds, and stall
+ * duration in nanoseconds.</p>
+ *
+ * <p>{@code Inputs([w:%d, h:%d, format:%s(%d)], ... [w:%d, h:%d, format:%s(%d)])}, where
+ * {@code [w:%d, h:%d, format:%s(%d)]} represents an input configuration's width, height, and
+ * format.</p>
+ *
+ * <p>{@code ValidOutputFormatsForInput([in:%s(%d), out:%s(%d), ... %s(%d)],
+ * ... [in:%s(%d), out:%s(%d), ... %s(%d)])}, where {@code [in:%s(%d), out:%s(%d), ... %s(%d)]}
+ * represents an input fomat and its valid output formats.</p>
+ *
+ * <p>{@code HighSpeedVideoConfigurations([w:%d, h:%d, min_fps:%d, max_fps:%d],
+ * ... [w:%d, h:%d, min_fps:%d, max_fps:%d])}, where
+ * {@code [w:%d, h:%d, min_fps:%d, max_fps:%d]} represents a high speed video output
+ * configuration's width, height, minimal frame rate, and maximal frame rate.</p>
+ *
+ * @return string representation of {@link StreamConfigurationMap}
+ */
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("StreamConfiguration(");
+ appendOutputsString(sb);
+ sb.append(", ");
+ appendHighResOutputsString(sb);
+ sb.append(", ");
+ appendInputsString(sb);
+ sb.append(", ");
+ appendValidOutputFormatsForInputString(sb);
+ sb.append(", ");
+ appendHighSpeedVideoConfigurationsString(sb);
+ sb.append(")");
+
+ return sb.toString();
+ }
+
+ private void appendOutputsString(StringBuilder sb) {
+ sb.append("Outputs(");
+ int[] formats = getOutputFormats();
+ for (int format : formats) {
+ Size[] sizes = getOutputSizes(format);
+ for (Size size : sizes) {
+ long minFrameDuration = getOutputMinFrameDuration(format, size);
+ long stallDuration = getOutputStallDuration(format, size);
+ sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
+ "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
+ format, minFrameDuration, stallDuration));
+ }
+ }
+ // Remove the pending ", "
+ if (sb.charAt(sb.length() - 1) == ' ') {
+ sb.delete(sb.length() - 2, sb.length());
+ }
+ sb.append(")");
+ }
+
+ private void appendHighResOutputsString(StringBuilder sb) {
+ sb.append("HighResolutionOutputs(");
+ int[] formats = getOutputFormats();
+ for (int format : formats) {
+ Size[] sizes = getHighResolutionOutputSizes(format);
+ if (sizes == null) continue;
+ for (Size size : sizes) {
+ long minFrameDuration = getOutputMinFrameDuration(format, size);
+ long stallDuration = getOutputStallDuration(format, size);
+ sb.append(String.format("[w:%d, h:%d, format:%s(%d), min_duration:%d, " +
+ "stall:%d], ", size.getWidth(), size.getHeight(), formatToString(format),
+ format, minFrameDuration, stallDuration));
+ }
+ }
+ // Remove the pending ", "
+ if (sb.charAt(sb.length() - 1) == ' ') {
+ sb.delete(sb.length() - 2, sb.length());
+ }
+ sb.append(")");
+ }
+
+ private void appendInputsString(StringBuilder sb) {
+ sb.append("Inputs(");
+ int[] formats = getInputFormats();
+ for (int format : formats) {
+ Size[] sizes = getInputSizes(format);
+ for (Size size : sizes) {
+ sb.append(String.format("[w:%d, h:%d, format:%s(%d)], ", size.getWidth(),
+ size.getHeight(), formatToString(format), format));
+ }
+ }
+ // Remove the pending ", "
+ if (sb.charAt(sb.length() - 1) == ' ') {
+ sb.delete(sb.length() - 2, sb.length());
+ }
+ sb.append(")");
+ }
+
+ private void appendValidOutputFormatsForInputString(StringBuilder sb) {
+ sb.append("ValidOutputFormatsForInput(");
+ int[] inputFormats = getInputFormats();
+ for (int inputFormat : inputFormats) {
+ sb.append(String.format("[in:%s(%d), out:", formatToString(inputFormat), inputFormat));
+ int[] outputFormats = getValidOutputFormatsForInput(inputFormat);
+ for (int i = 0; i < outputFormats.length; i++) {
+ sb.append(String.format("%s(%d)", formatToString(outputFormats[i]),
+ outputFormats[i]));
+ if (i < outputFormats.length - 1) {
+ sb.append(", ");
+ }
+ }
+ sb.append("], ");
+ }
+ // Remove the pending ", "
+ if (sb.charAt(sb.length() - 1) == ' ') {
+ sb.delete(sb.length() - 2, sb.length());
+ }
+ sb.append(")");
+ }
+
+ private void appendHighSpeedVideoConfigurationsString(StringBuilder sb) {
+ sb.append("HighSpeedVideoConfigurations(");
+ Size[] sizes = getHighSpeedVideoSizes();
+ for (Size size : sizes) {
+ Range<Integer>[] ranges = getHighSpeedVideoFpsRangesFor(size);
+ for (Range<Integer> range : ranges) {
+ sb.append(String.format("[w:%d, h:%d, min_fps:%d, max_fps:%d], ", size.getWidth(),
+ size.getHeight(), range.getLower(), range.getUpper()));
+ }
+ }
+ // Remove the pending ", "
+ if (sb.charAt(sb.length() - 1) == ' ') {
+ sb.delete(sb.length() - 2, sb.length());
+ }
+ sb.append(")");
+ }
+
+ private String formatToString(int format) {
+ switch (format) {
+ case ImageFormat.YV12:
+ return "YV12";
+ case ImageFormat.YUV_420_888:
+ return "YUV_420_888";
+ case ImageFormat.NV21:
+ return "NV21";
+ case ImageFormat.NV16:
+ return "NV16";
+ case PixelFormat.RGB_565:
+ return "RGB_565";
+ case PixelFormat.RGBA_8888:
+ return "RGBA_8888";
+ case PixelFormat.RGBX_8888:
+ return "RGBX_8888";
+ case PixelFormat.RGB_888:
+ return "RGB_888";
+ case ImageFormat.JPEG:
+ return "JPEG";
+ case ImageFormat.YUY2:
+ return "YUY2";
+ case ImageFormat.Y8:
+ return "Y8";
+ case ImageFormat.Y16:
+ return "Y16";
+ case ImageFormat.RAW_SENSOR:
+ return "RAW_SENSOR";
+ case ImageFormat.RAW_PRIVATE:
+ return "RAW_PRIVATE";
+ case ImageFormat.RAW10:
+ return "RAW10";
+ case ImageFormat.DEPTH16:
+ return "DEPTH16";
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ return "DEPTH_POINT_CLOUD";
+ case ImageFormat.RAW_DEPTH:
+ return "RAW_DEPTH";
+ case ImageFormat.PRIVATE:
+ return "PRIVATE";
+ default:
+ return "UNKNOWN";
+ }
+ }
+
+ // from system/core/include/system/graphics.h
+ private static final int HAL_PIXEL_FORMAT_RAW16 = 0x20;
+ private static final int HAL_PIXEL_FORMAT_BLOB = 0x21;
+ private static final int HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 0x22;
+ private static final int HAL_PIXEL_FORMAT_YCbCr_420_888 = 0x23;
+ private static final int HAL_PIXEL_FORMAT_RAW_OPAQUE = 0x24;
+ private static final int HAL_PIXEL_FORMAT_RAW10 = 0x25;
+ private static final int HAL_PIXEL_FORMAT_RAW12 = 0x26;
+ private static final int HAL_PIXEL_FORMAT_Y16 = 0x20363159;
+
+
+ private static final int HAL_DATASPACE_STANDARD_SHIFT = 16;
+ private static final int HAL_DATASPACE_TRANSFER_SHIFT = 22;
+ private static final int HAL_DATASPACE_RANGE_SHIFT = 27;
+
+ private static final int HAL_DATASPACE_UNKNOWN = 0x0;
+ private static final int HAL_DATASPACE_V0_JFIF =
+ (2 << HAL_DATASPACE_STANDARD_SHIFT) |
+ (3 << HAL_DATASPACE_TRANSFER_SHIFT) |
+ (1 << HAL_DATASPACE_RANGE_SHIFT);
+
+ private static final int HAL_DATASPACE_DEPTH = 0x1000;
+
+ private static final long DURATION_20FPS_NS = 50000000L;
+ /**
+ * @see #getDurations(int, int)
+ */
+ private static final int DURATION_MIN_FRAME = 0;
+ private static final int DURATION_STALL = 1;
+
+ private final StreamConfiguration[] mConfigurations;
+ private final StreamConfigurationDuration[] mMinFrameDurations;
+ private final StreamConfigurationDuration[] mStallDurations;
+
+ private final StreamConfiguration[] mDepthConfigurations;
+ private final StreamConfigurationDuration[] mDepthMinFrameDurations;
+ private final StreamConfigurationDuration[] mDepthStallDurations;
+
+ private final HighSpeedVideoConfiguration[] mHighSpeedVideoConfigurations;
+ private final ReprocessFormatsMap mInputOutputFormatsMap;
+
+ private final boolean mListHighResolution;
+
+ /** internal format -> num output sizes mapping, not including slow high-res sizes, for
+ * non-depth dataspaces */
+ private final SparseIntArray mOutputFormats = new SparseIntArray();
+ /** internal format -> num output sizes mapping for slow high-res sizes, for non-depth
+ * dataspaces */
+ private final SparseIntArray mHighResOutputFormats = new SparseIntArray();
+ /** internal format -> num output sizes mapping for all non-depth dataspaces */
+ private final SparseIntArray mAllOutputFormats = new SparseIntArray();
+ /** internal format -> num input sizes mapping, for input reprocessing formats */
+ private final SparseIntArray mInputFormats = new SparseIntArray();
+ /** internal format -> num depth output sizes mapping, for HAL_DATASPACE_DEPTH */
+ private final SparseIntArray mDepthOutputFormats = new SparseIntArray();
+ /** High speed video Size -> FPS range count mapping*/
+ private final HashMap</*HighSpeedVideoSize*/Size, /*Count*/Integer> mHighSpeedVideoSizeMap =
+ new HashMap<Size, Integer>();
+ /** High speed video FPS range -> Size count mapping*/
+ private final HashMap</*HighSpeedVideoFpsRange*/Range<Integer>, /*Count*/Integer>
+ mHighSpeedVideoFpsRangeMap = new HashMap<Range<Integer>, Integer>();
+
+}
diff --git a/android/hardware/camera2/params/TonemapCurve.java b/android/hardware/camera2/params/TonemapCurve.java
new file mode 100644
index 00000000..2d7bbaa2
--- /dev/null
+++ b/android/hardware/camera2/params/TonemapCurve.java
@@ -0,0 +1,351 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import static com.android.internal.util.Preconditions.*;
+
+import android.graphics.PointF;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.utils.HashCodeHelpers;
+
+import java.util.Arrays;
+
+/**
+ * Immutable class for describing a {@code 2 x M x 3} tonemap curve of floats.
+ *
+ * <p>This defines red, green, and blue curves that the {@link CameraDevice} will
+ * use as the tonemapping/contrast/gamma curve when {@link CaptureRequest#TONEMAP_MODE} is
+ * set to {@link CameraMetadata#TONEMAP_MODE_CONTRAST_CURVE}.</p>
+ *
+ * <p>The total number of points {@code (Pin, Pout)} for each color channel can be no more than
+ * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS}.</p>
+ *
+ * <p>The coordinate system for each point is within the inclusive range
+ * [{@value #LEVEL_BLACK}, {@value #LEVEL_WHITE}].</p>
+ *
+ * @see CaptureRequest#TONEMAP_CURVE_BLUE
+ * @see CaptureRequest#TONEMAP_CURVE_GREEN
+ * @see CaptureRequest#TONEMAP_CURVE_RED
+ * @see CameraMetadata#TONEMAP_MODE_CONTRAST_CURVE
+ * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS
+ */
+public final class TonemapCurve {
+ /**
+ * Lower bound tonemap value corresponding to pure black for a single color channel.
+ */
+ public static final float LEVEL_BLACK = 0.0f;
+
+ /**
+ * Upper bound tonemap value corresponding to a pure white for a single color channel.
+ */
+ public static final float LEVEL_WHITE = 1.0f;
+
+ /**
+ * Number of elements in a {@code (Pin, Pout)} point;
+ */
+ public static final int POINT_SIZE = 2;
+
+ /**
+ * Index of the red color channel curve.
+ */
+ public static final int CHANNEL_RED = 0;
+ /**
+ * Index of the green color channel curve.
+ */
+ public static final int CHANNEL_GREEN = 1;
+ /**
+ * Index of the blue color channel curve.
+ */
+ public static final int CHANNEL_BLUE = 2;
+
+ /**
+ * Create a new immutable TonemapCurve instance.
+ *
+ * <p>Values are stored as a contiguous array of {@code (Pin, Pout)} points.</p>
+ *
+ * <p>All parameters may have independent length but should have at most
+ * {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS} * {@value #POINT_SIZE} elements and
+ * at least 2 * {@value #POINT_SIZE} elements.</p>
+ *
+ * <p>All sub-elements must be in the inclusive range of
+ * [{@value #LEVEL_BLACK}, {@value #LEVEL_WHITE}].</p>
+ *
+ * <p>This constructor copies the array contents and does not retain ownership of the array.</p>
+ *
+ * @param red An array of elements whose length is divisible by {@value #POINT_SIZE}
+ * @param green An array of elements whose length is divisible by {@value #POINT_SIZE}
+ * @param blue An array of elements whose length is divisible by {@value #POINT_SIZE}
+ *
+ * @throws IllegalArgumentException
+ * if any of input array length is invalid,
+ * or if any of the elements in the array are not in the range of
+ * [{@value #LEVEL_BLACK}, {@value #LEVEL_WHITE}]
+ * @throws NullPointerException
+ * if any of the parameters are {@code null}
+ */
+ public TonemapCurve(float[] red, float[] green, float[] blue) {
+ // TODO: maxCurvePoints check?
+
+ checkNotNull(red, "red must not be null");
+ checkNotNull(green, "green must not be null");
+ checkNotNull(blue, "blue must not be null");
+
+ checkArgumentArrayLengthDivisibleBy(red, POINT_SIZE, "red");
+ checkArgumentArrayLengthDivisibleBy(green, POINT_SIZE, "green");
+ checkArgumentArrayLengthDivisibleBy(blue, POINT_SIZE, "blue");
+
+ checkArgumentArrayLengthNoLessThan(red, MIN_CURVE_LENGTH, "red");
+ checkArgumentArrayLengthNoLessThan(green, MIN_CURVE_LENGTH, "green");
+ checkArgumentArrayLengthNoLessThan(blue, MIN_CURVE_LENGTH, "blue");
+
+ checkArrayElementsInRange(red, LEVEL_BLACK, LEVEL_WHITE, "red");
+ checkArrayElementsInRange(green, LEVEL_BLACK, LEVEL_WHITE, "green");
+ checkArrayElementsInRange(blue, LEVEL_BLACK, LEVEL_WHITE, "blue");
+
+ mRed = Arrays.copyOf(red, red.length);
+ mGreen = Arrays.copyOf(green, green.length);
+ mBlue = Arrays.copyOf(blue, blue.length);
+ }
+
+ private static void checkArgumentArrayLengthDivisibleBy(float[] array,
+ int divisible, String arrayName) {
+ if (array.length % divisible != 0) {
+ throw new IllegalArgumentException(arrayName + " size must be divisible by "
+ + divisible);
+ }
+ }
+
+ private static int checkArgumentColorChannel(int colorChannel) {
+ switch (colorChannel) {
+ case CHANNEL_RED:
+ case CHANNEL_GREEN:
+ case CHANNEL_BLUE:
+ break;
+ default:
+ throw new IllegalArgumentException("colorChannel out of range");
+ }
+
+ return colorChannel;
+ }
+
+ private static void checkArgumentArrayLengthNoLessThan(float[] array, int minLength,
+ String arrayName) {
+ if (array.length < minLength) {
+ throw new IllegalArgumentException(arrayName + " size must be at least "
+ + minLength);
+ }
+ }
+
+ /**
+ * Get the number of points stored in this tonemap curve for the specified color channel.
+ *
+ * @param colorChannel one of {@link #CHANNEL_RED}, {@link #CHANNEL_GREEN}, {@link #CHANNEL_BLUE}
+ * @return number of points stored in this tonemap for that color's curve (>= 0)
+ *
+ * @throws IllegalArgumentException if {@code colorChannel} was out of range
+ */
+ public int getPointCount(int colorChannel) {
+ checkArgumentColorChannel(colorChannel);
+
+ return getCurve(colorChannel).length / POINT_SIZE;
+ }
+
+ /**
+ * Get the point for a color channel at a specified index.
+ *
+ * <p>The index must be at least 0 but no greater than {@link #getPointCount(int)} for
+ * that {@code colorChannel}.</p>
+ *
+ * <p>All returned coordinates in the point are between the range of
+ * [{@value #LEVEL_BLACK}, {@value #LEVEL_WHITE}].</p>
+ *
+ * @param colorChannel {@link #CHANNEL_RED}, {@link #CHANNEL_GREEN}, or {@link #CHANNEL_BLUE}
+ * @param index at least 0 but no greater than {@code getPointCount(colorChannel)}
+ * @return the {@code (Pin, Pout)} pair mapping the tone for that index
+ *
+ * @throws IllegalArgumentException if {@code colorChannel} or {@code index} was out of range
+ *
+ * @see #LEVEL_BLACK
+ * @see #LEVEL_WHITE
+ */
+ public PointF getPoint(int colorChannel, int index) {
+ checkArgumentColorChannel(colorChannel);
+ if (index < 0 || index >= getPointCount(colorChannel)) {
+ throw new IllegalArgumentException("index out of range");
+ }
+
+ final float[] curve = getCurve(colorChannel);
+
+ final float pIn = curve[index * POINT_SIZE + OFFSET_POINT_IN];
+ final float pOut = curve[index * POINT_SIZE + OFFSET_POINT_OUT];
+
+ return new PointF(pIn, pOut);
+ }
+
+ /**
+ * Copy the color curve for a single color channel from this tonemap curve into the destination.
+ *
+ * <p>
+ * <!--The output is encoded the same as in the constructor -->
+ * Values are stored as packed {@code (Pin, Pout}) points, and there are a total of
+ * {@link #getPointCount} points for that respective channel.</p>
+ *
+ * <p>All returned coordinates are between the range of
+ * [{@value #LEVEL_BLACK}, {@value #LEVEL_WHITE}].</p>
+ *
+ * @param destination
+ * an array big enough to hold at least {@link #getPointCount} {@code *}
+ * {@link #POINT_SIZE} elements after the {@code offset}
+ * @param offset
+ * a non-negative offset into the array
+ * @throws NullPointerException
+ * If {@code destination} was {@code null}
+ * @throws IllegalArgumentException
+ * If offset was negative
+ * @throws ArrayIndexOutOfBoundsException
+ * If there's not enough room to write the elements at the specified destination and
+ * offset.
+ *
+ * @see CaptureRequest#TONEMAP_CURVE_BLUE
+ * @see CaptureRequest#TONEMAP_CURVE_RED
+ * @see CaptureRequest#TONEMAP_CURVE_GREEN
+ * @see #LEVEL_BLACK
+ * @see #LEVEL_WHITE
+ */
+ public void copyColorCurve(int colorChannel, float[] destination,
+ int offset) {
+ checkArgumentNonnegative(offset, "offset must not be negative");
+ checkNotNull(destination, "destination must not be null");
+
+ if (destination.length + offset < getPointCount(colorChannel) * POINT_SIZE) {
+ throw new ArrayIndexOutOfBoundsException("destination too small to fit elements");
+ }
+
+ float[] curve = getCurve(colorChannel);
+ System.arraycopy(curve, /*srcPos*/0, destination, offset, curve.length);
+ }
+
+ /**
+ * Check if this TonemapCurve is equal to another TonemapCurve.
+ *
+ * <p>Two matrices are equal if and only if all of their elements are
+ * {@link Object#equals equal}.</p>
+ *
+ * @return {@code true} if the objects were equal, {@code false} otherwise
+ */
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == null) {
+ return false;
+ }
+ if (this == obj) {
+ return true;
+ }
+ if (obj instanceof TonemapCurve) {
+ final TonemapCurve other = (TonemapCurve) obj;
+ return Arrays.equals(mRed, other.mRed) &&
+ Arrays.equals(mGreen, other.mGreen) &&
+ Arrays.equals(mBlue, other.mBlue);
+ }
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ if (mHashCalculated) {
+ // Avoid re-calculating hash. Data is immutable so this is both legal and faster.
+ return mHashCode;
+ }
+
+ mHashCode = HashCodeHelpers.hashCodeGeneric(mRed, mGreen, mBlue);
+ mHashCalculated = true;
+
+ return mHashCode;
+ }
+
+ /**
+ * Return the TonemapCurve as a string representation.
+ *
+ * <p> {@code "TonemapCurve{R:[(%f, %f), (%f, %f) ... (%f, %f)], G:[(%f, %f), (%f, %f) ...
+ * (%f, %f)], B:[(%f, %f), (%f, %f) ... (%f, %f)]}"},
+ * where each {@code (%f, %f)} respectively represents one point of the corresponding
+ * tonemap curve. </p>
+ *
+ * @return string representation of {@link TonemapCurve}
+ */
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("TonemapCurve{");
+ sb.append("R:");
+ sb.append(curveToString(CHANNEL_RED));
+ sb.append(", G:");
+ sb.append(curveToString(CHANNEL_GREEN));
+ sb.append(", B:");
+ sb.append(curveToString(CHANNEL_BLUE));
+ sb.append("}");
+ return sb.toString();
+ }
+
+ private String curveToString(int colorChannel) {
+ checkArgumentColorChannel(colorChannel);
+ StringBuilder sb = new StringBuilder("[");
+ float[] curve = getCurve(colorChannel);
+ int pointCount = curve.length / POINT_SIZE;
+ for (int i = 0, j = 0; i < pointCount; i++, j += 2) {
+ sb.append("(");
+ sb.append(curve[j]);
+ sb.append(", ");
+ sb.append(curve[j+1]);
+ sb.append("), ");
+ }
+ // trim extra ", " at the end. Guaranteed to work because pointCount >= 2
+ sb.setLength(sb.length() - 2);
+ sb.append("]");
+ return sb.toString();
+ }
+
+ private float[] getCurve(int colorChannel) {
+ switch (colorChannel) {
+ case CHANNEL_RED:
+ return mRed;
+ case CHANNEL_GREEN:
+ return mGreen;
+ case CHANNEL_BLUE:
+ return mBlue;
+ default:
+ throw new AssertionError("colorChannel out of range");
+ }
+ }
+
+ private final static int OFFSET_POINT_IN = 0;
+ private final static int OFFSET_POINT_OUT = 1;
+ private final static int TONEMAP_MIN_CURVE_POINTS = 2;
+ private final static int MIN_CURVE_LENGTH = TONEMAP_MIN_CURVE_POINTS * POINT_SIZE;
+
+ private final float[] mRed;
+ private final float[] mGreen;
+ private final float[] mBlue;
+
+ private int mHashCode;
+ private boolean mHashCalculated = false;
+}
diff --git a/android/hardware/camera2/params/VendorTagDescriptor.java b/android/hardware/camera2/params/VendorTagDescriptor.java
new file mode 100644
index 00000000..ea424e59
--- /dev/null
+++ b/android/hardware/camera2/params/VendorTagDescriptor.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+/**
+ * A class for describing the vendor tags declared by a camera HAL module.
+ * Generally only used by the native side of
+ * android.hardware.camera2.impl.CameraMetadataNative
+ *
+ * @hide
+ */
+public final class VendorTagDescriptor implements Parcelable {
+
+ private VendorTagDescriptor(Parcel source) {
+ }
+
+ public static final Parcelable.Creator<VendorTagDescriptor> CREATOR =
+ new Parcelable.Creator<VendorTagDescriptor>() {
+ @Override
+ public VendorTagDescriptor createFromParcel(Parcel source) {
+ try {
+ VendorTagDescriptor vendorDescriptor = new VendorTagDescriptor(source);
+ return vendorDescriptor;
+ } catch (Exception e) {
+ Log.e(TAG, "Exception creating VendorTagDescriptor from parcel", e);
+ return null;
+ }
+ }
+
+ @Override
+ public VendorTagDescriptor[] newArray(int size) {
+ return new VendorTagDescriptor[size];
+ }
+ };
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ if (dest == null) {
+ throw new IllegalArgumentException("dest must not be null");
+ }
+ }
+
+ private static final String TAG = "VendorTagDescriptor";
+}
diff --git a/android/hardware/camera2/params/VendorTagDescriptorCache.java b/android/hardware/camera2/params/VendorTagDescriptorCache.java
new file mode 100644
index 00000000..1f92f6d9
--- /dev/null
+++ b/android/hardware/camera2/params/VendorTagDescriptorCache.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2017 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.params;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.util.Log;
+
+/**
+ * A class for describing the vendor tag cache declared by a camera HAL module.
+ * Generally only used by the native side of
+ * android.hardware.camera2.impl.CameraMetadataNative
+ *
+ * @hide
+ */
+public final class VendorTagDescriptorCache implements Parcelable {
+
+ private VendorTagDescriptorCache(Parcel source) {
+ }
+
+ public static final Parcelable.Creator<VendorTagDescriptorCache> CREATOR =
+ new Parcelable.Creator<VendorTagDescriptorCache>() {
+ @Override
+ public VendorTagDescriptorCache createFromParcel(Parcel source) {
+ try {
+ VendorTagDescriptorCache vendorDescriptorCache = new VendorTagDescriptorCache(source);
+ return vendorDescriptorCache;
+ } catch (Exception e) {
+ Log.e(TAG, "Exception creating VendorTagDescriptorCache from parcel", e);
+ return null;
+ }
+ }
+
+ @Override
+ public VendorTagDescriptorCache[] newArray(int size) {
+ return new VendorTagDescriptorCache[size];
+ }
+ };
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ if (dest == null) {
+ throw new IllegalArgumentException("dest must not be null");
+ }
+ }
+
+ private static final String TAG = "VendorTagDescriptorCache";
+}
diff --git a/android/hardware/camera2/utils/ArrayUtils.java b/android/hardware/camera2/utils/ArrayUtils.java
new file mode 100644
index 00000000..99ddf6e8
--- /dev/null
+++ b/android/hardware/camera2/utils/ArrayUtils.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+import android.util.Log;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Various assortment of array utilities.
+ */
+public class ArrayUtils {
+
+ private static final String TAG = "ArrayUtils";
+ private static final boolean DEBUG = false;
+
+ /** Return the index of {@code needle} in the {@code array}, or else {@code -1} */
+ public static <T> int getArrayIndex(T[] array, T needle) {
+ if (array == null) {
+ return -1;
+ }
+
+ int index = 0;
+ for (T elem : array) {
+ if (Objects.equals(elem, needle)) {
+ return index;
+ }
+ index++;
+ }
+
+ return -1;
+ }
+
+ /** Return the index of {@code needle} in the {@code array}, or else {@code -1} */
+ public static int getArrayIndex(int[] array, int needle) {
+ if (array == null) {
+ return -1;
+ }
+ for (int i = 0; i < array.length; ++i) {
+ if (array[i] == needle) {
+ return i;
+ }
+ }
+ return -1;
+ }
+
+ /**
+ * Create an {@code int[]} from the {@code List<>} by using {@code convertFrom} and
+ * {@code convertTo} as a one-to-one map (via the index).
+ *
+ * <p>Strings not appearing in {@code convertFrom} are ignored (with a logged warning);
+ * strings appearing in {@code convertFrom} but not {@code convertTo} are silently
+ * dropped.</p>
+ *
+ * @param list Source list of strings
+ * @param convertFrom Conversion list of strings
+ * @param convertTo Conversion list of ints
+ * @return An array of ints where the values correspond to the ones in {@code convertTo}
+ * or {@code null} if {@code list} was {@code null}
+ */
+ public static int[] convertStringListToIntArray(
+ List<String> list, String[] convertFrom, int[] convertTo) {
+ if (list == null) {
+ return null;
+ }
+
+ List<Integer> convertedList = convertStringListToIntList(list, convertFrom, convertTo);
+
+ int[] returnArray = new int[convertedList.size()];
+ for (int i = 0; i < returnArray.length; ++i) {
+ returnArray[i] = convertedList.get(i);
+ }
+
+ return returnArray;
+ }
+
+ /**
+ * Create an {@code List<Integer>} from the {@code List<>} by using {@code convertFrom} and
+ * {@code convertTo} as a one-to-one map (via the index).
+ *
+ * <p>Strings not appearing in {@code convertFrom} are ignored (with a logged warning);
+ * strings appearing in {@code convertFrom} but not {@code convertTo} are silently
+ * dropped.</p>
+ *
+ * @param list Source list of strings
+ * @param convertFrom Conversion list of strings
+ * @param convertTo Conversion list of ints
+ * @return A list of ints where the values correspond to the ones in {@code convertTo}
+ * or {@code null} if {@code list} was {@code null}
+ */
+ public static List<Integer> convertStringListToIntList(
+ List<String> list, String[] convertFrom, int[] convertTo) {
+ if (list == null) {
+ return null;
+ }
+
+ List<Integer> convertedList = new ArrayList<>(list.size());
+
+ for (String str : list) {
+ int strIndex = getArrayIndex(convertFrom, str);
+
+ // Guard against unexpected values
+ if (strIndex < 0) {
+ if (DEBUG) Log.v(TAG, "Ignoring invalid value " + str);
+ continue;
+ }
+
+ // Ignore values we can't map into (intentional)
+ if (strIndex < convertTo.length) {
+ convertedList.add(convertTo[strIndex]);
+ }
+ }
+
+ return convertedList;
+ }
+
+ /**
+ * Convert the list of integers in {@code list} to an {@code int} array.
+ *
+ * <p>Every element in {@code list} must be non-{@code null}.</p>
+ *
+ * @param list a list of non-{@code null} integers
+ *
+ * @return a new int array containing all the elements from {@code list}
+ *
+ * @throws NullPointerException if any of the elements in {@code list} were {@code null}
+ */
+ public static int[] toIntArray(List<Integer> list) {
+ if (list == null) {
+ return null;
+ }
+
+ int[] arr = new int[list.size()];
+ int i = 0;
+ for (int elem : list) {
+ arr[i] = elem;
+ i++;
+ }
+
+ return arr;
+ }
+
+ /**
+ * Returns true if the given {@code array} contains the given element.
+ *
+ * @param array {@code array} to check for {@code elem}
+ * @param elem {@code elem} to test for
+ * @return {@code true} if the given element is contained
+ */
+ public static boolean contains(int[] array, int elem) {
+ return getArrayIndex(array, elem) != -1;
+ }
+
+ /**
+ * Returns true if the given {@code array} contains the given element.
+ *
+ * @param array {@code array} to check for {@code elem}
+ * @param elem {@code elem} to test for
+ * @return {@code true} if the given element is contained
+ */
+ public static <T> boolean contains(T[] array, T elem) {
+ return getArrayIndex(array, elem) != -1;
+ }
+
+ private ArrayUtils() {
+ throw new AssertionError();
+ }
+}
diff --git a/android/hardware/camera2/utils/CloseableLock.java b/android/hardware/camera2/utils/CloseableLock.java
new file mode 100644
index 00000000..9ac89c82
--- /dev/null
+++ b/android/hardware/camera2/utils/CloseableLock.java
@@ -0,0 +1,346 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+import android.util.Log;
+
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Implement a shared/exclusive lock that can be closed.
+ *
+ * <p>A shared lock can be acquired if any other shared locks are also acquired. An
+ * exclusive lock acquire will block until all shared locks have been released.</p>
+ *
+ * <p>Locks are re-entrant; trying to acquire another lock (of the same type)
+ * while a lock is already held will immediately succeed.</p>
+ *
+ * <p>Acquiring to acquire a shared lock while holding an exclusive lock or vice versa is not
+ * supported; attempting it will throw an {@link IllegalStateException}.</p>
+ *
+ * <p>If the lock is closed, all future and current acquires will immediately return {@code null}.
+ * </p>
+ */
+public class CloseableLock implements AutoCloseable {
+
+ private static final boolean VERBOSE = false;
+
+ private final String TAG = "CloseableLock";
+ private final String mName;
+
+ private volatile boolean mClosed = false;
+
+ /** If an exclusive lock is acquired by some thread. */
+ private boolean mExclusive = false;
+ /**
+ * How many shared locks are acquired by any thread:
+ *
+ * <p>Reentrant locking increments this. If an exclusive lock is held,
+ * this value will stay at 0.</p>
+ */
+ private int mSharedLocks = 0;
+
+ private final ReentrantLock mLock = new ReentrantLock();
+ /** This condition automatically releases mLock when waiting; re-acquiring it after notify */
+ private final Condition mCondition = mLock.newCondition();
+
+ /** How many times the current thread is holding the lock */
+ private final ThreadLocal<Integer> mLockCount =
+ new ThreadLocal<Integer>() {
+ @Override protected Integer initialValue() {
+ return 0;
+ }
+ };
+
+ /**
+ * Helper class to release a lock at the end of a try-with-resources statement.
+ */
+ public class ScopedLock implements AutoCloseable {
+ private ScopedLock() {}
+
+ /** Release the lock with {@link CloseableLock#releaseLock}. */
+ @Override
+ public void close() {
+ releaseLock();
+ }
+ }
+
+ /**
+ * Create a new instance; starts out with 0 locks acquired.
+ */
+ public CloseableLock() {
+ mName = "";
+ }
+
+ /**
+ * Create a new instance; starts out with 0 locks acquired.
+ *
+ * @param name set an optional name for logging functionality
+ */
+ public CloseableLock(String name) {
+ mName = name;
+ }
+
+ /**
+ * Acquires the lock exclusively (blocking), marks it as closed, then releases the lock.
+ *
+ * <p>Marking a lock as closed will fail all further acquisition attempts;
+ * it will also immediately unblock all other threads currently trying to acquire a lock.</p>
+ *
+ * <p>This operation is idempotent; calling it more than once has no effect.</p>
+ *
+ * @throws IllegalStateException
+ * if an attempt is made to {@code close} while this thread has a lock acquired
+ */
+ @Override
+ public void close() {
+ if (mClosed) {
+ if (VERBOSE) {
+ log("close - already closed; ignoring");
+ }
+ return;
+ }
+
+ ScopedLock scoper = acquireExclusiveLock();
+ // Already closed by another thread?
+ if (scoper == null) {
+ return;
+ } else if (mLockCount.get() != 1) {
+ // Future: may want to add a #releaseAndClose to allow this.
+ throw new IllegalStateException(
+ "Cannot close while one or more acquired locks are being held by this " +
+ "thread; release all other locks first");
+ }
+
+ try {
+ mLock.lock();
+
+ mClosed = true;
+ mExclusive = false;
+ mSharedLocks = 0;
+ mLockCount.remove();
+
+ // Notify all threads that are waiting to unblock and return immediately
+ mCondition.signalAll();
+ } finally {
+ mLock.unlock();
+ }
+
+ if (VERBOSE) {
+ log("close - completed");
+ }
+ }
+
+ /**
+ * Try to acquire the lock non-exclusively, blocking until the operation completes.
+ *
+ * <p>If the lock has already been closed, or being closed before this operation returns,
+ * the call will immediately return {@code false}.</p>
+ *
+ * <p>If other threads hold a non-exclusive lock (and the lock is not yet closed),
+ * this operation will return immediately. If another thread holds an exclusive lock,
+ * this thread will block until the exclusive lock has been released.</p>
+ *
+ * <p>This lock is re-entrant; acquiring more than one non-exclusive lock per thread is
+ * supported, and must be matched by an equal number of {@link #releaseLock} calls.</p>
+ *
+ * @return {@code ScopedLock} instance if the lock was acquired, or {@code null} if the lock
+ * was already closed.
+ *
+ * @throws IllegalStateException if this thread is already holding an exclusive lock
+ */
+ public ScopedLock acquireLock() {
+
+ int ownedLocks;
+
+ try {
+ mLock.lock();
+
+ // Lock is already closed, all further acquisitions will fail
+ if (mClosed) {
+ if (VERBOSE) {
+ log("acquire lock early aborted (already closed)");
+ }
+ return null;
+ }
+
+ ownedLocks = mLockCount.get();
+
+ // This thread is already holding an exclusive lock
+ if (mExclusive && ownedLocks > 0) {
+ throw new IllegalStateException(
+ "Cannot acquire shared lock while holding exclusive lock");
+ }
+
+ // Is another thread holding the exclusive lock? Block until we can get in.
+ while (mExclusive) {
+ mCondition.awaitUninterruptibly();
+
+ // Did another thread #close while we were waiting? Unblock immediately.
+ if (mClosed) {
+ if (VERBOSE) {
+ log("acquire lock unblocked aborted (already closed)");
+ }
+ return null;
+ }
+ }
+
+ mSharedLocks++;
+
+ ownedLocks = mLockCount.get() + 1;
+ mLockCount.set(ownedLocks);
+ } finally {
+ mLock.unlock();
+ }
+
+ if (VERBOSE) {
+ log("acquired lock (local own count = " + ownedLocks + ")");
+ }
+ return new ScopedLock();
+ }
+
+ /**
+ * Try to acquire the lock exclusively, blocking until all other threads release their locks.
+ *
+ * <p>If the lock has already been closed, or being closed before this operation returns,
+ * the call will immediately return {@code false}.</p>
+ *
+ * <p>If any other threads are holding a lock, this thread will block until all
+ * other locks are released.</p>
+ *
+ * <p>This lock is re-entrant; acquiring more than one exclusive lock per thread is supported,
+ * and must be matched by an equal number of {@link #releaseLock} calls.</p>
+ *
+ * @return {@code ScopedLock} instance if the lock was acquired, or {@code null} if the lock
+ * was already closed.
+ *
+ * @throws IllegalStateException
+ * if an attempt is made to acquire an exclusive lock while already holding a lock
+ */
+ public ScopedLock acquireExclusiveLock() {
+
+ int ownedLocks;
+
+ try {
+ mLock.lock();
+
+ // Lock is already closed, all further acquisitions will fail
+ if (mClosed) {
+ if (VERBOSE) {
+ log("acquire exclusive lock early aborted (already closed)");
+ }
+ return null;
+ }
+
+ ownedLocks = mLockCount.get();
+
+ // This thread is already holding a shared lock
+ if (!mExclusive && ownedLocks > 0) {
+ throw new IllegalStateException(
+ "Cannot acquire exclusive lock while holding shared lock");
+ }
+
+ /*
+ * Is another thread holding the lock? Block until we can get in.
+ *
+ * If we are already holding the lock, always let it through since
+ * we are just reentering the exclusive lock.
+ */
+ while (ownedLocks == 0 && (mExclusive || mSharedLocks > 0)) {
+ mCondition.awaitUninterruptibly();
+
+ // Did another thread #close while we were waiting? Unblock immediately.
+ if (mClosed) {
+ if (VERBOSE) {
+ log("acquire exclusive lock unblocked aborted (already closed)");
+ }
+ return null;
+ }
+ }
+
+ mExclusive = true;
+
+ ownedLocks = mLockCount.get() + 1;
+ mLockCount.set(ownedLocks);
+ } finally {
+ mLock.unlock();
+ }
+
+ if (VERBOSE) {
+ log("acquired exclusive lock (local own count = " + ownedLocks + ")");
+ }
+ return new ScopedLock();
+ }
+
+ /**
+ * Release a single lock that was acquired.
+ *
+ * <p>Any other other that is blocked and trying to acquire a lock will get a chance
+ * to acquire the lock.</p>
+ *
+ * @throws IllegalStateException if no locks were acquired, or if the lock was already closed
+ */
+ public void releaseLock() {
+ if (mLockCount.get() <= 0) {
+ throw new IllegalStateException(
+ "Cannot release lock that was not acquired by this thread");
+ }
+
+ int ownedLocks;
+
+ try {
+ mLock.lock();
+
+ // Lock is already closed, it couldn't have been acquired in the first place
+ if (mClosed) {
+ throw new IllegalStateException("Do not release after the lock has been closed");
+ }
+
+ if (!mExclusive) {
+ mSharedLocks--;
+ } else {
+ if (mSharedLocks != 0) {
+ throw new AssertionError("Too many shared locks " + mSharedLocks);
+ }
+ }
+
+ ownedLocks = mLockCount.get() - 1;
+ mLockCount.set(ownedLocks);
+
+ if (ownedLocks == 0 && mExclusive) {
+ // Wake up any threads that might be waiting for the exclusive lock to be released
+ mExclusive = false;
+ mCondition.signalAll();
+ } else if (ownedLocks == 0 && mSharedLocks == 0) {
+ // Wake up any threads that might be trying to get the exclusive lock
+ mCondition.signalAll();
+ }
+ } finally {
+ mLock.unlock();
+ }
+
+ if (VERBOSE) {
+ log("released lock (local lock count " + ownedLocks + ")");
+ }
+ }
+
+ private void log(String what) {
+ Log.v(TAG + "[" + mName + "]", what);
+ }
+
+}
diff --git a/android/hardware/camera2/utils/HashCodeHelpers.java b/android/hardware/camera2/utils/HashCodeHelpers.java
new file mode 100644
index 00000000..731da8b6
--- /dev/null
+++ b/android/hardware/camera2/utils/HashCodeHelpers.java
@@ -0,0 +1,100 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+/**
+ * Provide hashing functions using the Modified Bernstein hash
+ */
+public final class HashCodeHelpers {
+
+ /**
+ * Hash every element uniformly using the Modified Bernstein hash.
+ *
+ * <p>Useful to implement a {@link Object#hashCode} for uniformly distributed data.</p>
+ *
+ * @param array a non-{@code null} array of integers
+ *
+ * @return the numeric hash code
+ */
+ public static int hashCode(int... array) {
+ if (array == null) {
+ return 0;
+ }
+
+ /*
+ * Note that we use 31 here instead of 33 since it's preferred in Effective Java
+ * and used elsewhere in the runtime (e.g. Arrays#hashCode)
+ *
+ * That being said 33 and 31 are nearly identical in terms of their usefulness
+ * according to http://svn.apache.org/repos/asf/apr/apr/trunk/tables/apr_hash.c
+ */
+ int h = 1;
+ for (int x : array) {
+ // Strength reduction; in case the compiler has illusions about divisions being faster
+ h = ((h << 5) - h) ^ x; // (h * 31) XOR x
+ }
+
+ return h;
+ }
+
+ /**
+ * Hash every element uniformly using the Modified Bernstein hash.
+ *
+ * <p>Useful to implement a {@link Object#hashCode} for uniformly distributed data.</p>
+ *
+ * @param array a non-{@code null} array of floats
+ *
+ * @return the numeric hash code
+ */
+ public static int hashCode(float... array) {
+ if (array == null) {
+ return 0;
+ }
+
+ int h = 1;
+ for (float f : array) {
+ int x = Float.floatToIntBits(f);
+ h = ((h << 5) - h) ^ x; // (h * 31) XOR x
+ }
+
+ return h;
+ }
+
+ /**
+ * Hash every element uniformly using the Modified Bernstein hash.
+ *
+ * <p>Useful to implement a {@link Object#hashCode} for uniformly distributed data.</p>
+ *
+ * @param array a non-{@code null} array of objects
+ *
+ * @return the numeric hash code
+ */
+ public static <T> int hashCodeGeneric(T... array) {
+ if (array == null) {
+ return 0;
+ }
+
+ int h = 1;
+ for (T o : array) {
+ int x = (o == null) ? 0 : o.hashCode();
+ h = ((h << 5) - h) ^ x; // (h * 31) XOR x
+ }
+
+ return h;
+ }
+
+}
diff --git a/android/hardware/camera2/utils/ListUtils.java b/android/hardware/camera2/utils/ListUtils.java
new file mode 100644
index 00000000..99cb0356
--- /dev/null
+++ b/android/hardware/camera2/utils/ListUtils.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+import java.util.List;
+
+/**
+ * Various assortment of list utilities.
+ *
+ * <p>Using a {@code null} list is supported and will almost always return the default value
+ * (e.g. {@code false}, or {@code null}).</p>
+ */
+public class ListUtils {
+
+ /** Return {@code} true if the {@code list} contains the {@code needle}. */
+ public static <T> boolean listContains(List<T> list, T needle) {
+ if (list == null) {
+ return false;
+ } else {
+ return list.contains(needle);
+ }
+ }
+
+ /**
+ * Return {@code true} if the {@code list} is only a single element equal to
+ * {@code single}.
+ */
+ public static <T> boolean listElementsEqualTo(List<T> list, T single) {
+ if (list == null) {
+ return false;
+ }
+
+ return (list.size() == 1 && list.contains(single));
+ }
+
+ /**
+ * Return a human-readable representation of a list (non-recursively).
+ */
+ public static <T> String listToString(List<T> list) {
+ if (list == null) {
+ return null;
+ }
+
+ StringBuilder sb = new StringBuilder();
+ sb.append('[');
+
+ int size = list.size();
+ int i = 0;
+ for (T elem : list) {
+ sb.append(elem);
+
+ if (i != size - 1) {
+ sb.append(',');
+ }
+ i++;
+ }
+ sb.append(']');
+
+ return sb.toString();
+ }
+
+ /**
+ * Return the first item from {@code choices} that is contained in the {@code list}.
+ *
+ * <p>Choices with an index closer to 0 get higher priority. If none of the {@code choices}
+ * are in the {@code list}, then {@code null} is returned.
+ *
+ * @param list a list of objects which may or may not contain one or more of the choices
+ * @param choices an array of objects which should be used to select an item from
+ *
+ * @return the first item from {@code choices} contained in {@code list}, otherwise {@code null}
+ */
+ public static <T> T listSelectFirstFrom(List<T> list, T[] choices) {
+ if (list == null) {
+ return null;
+ }
+
+ for (T choice : choices) {
+ if (list.contains(choice)) {
+ return choice;
+ }
+ }
+
+ return null;
+ }
+
+ private ListUtils() {
+ throw new AssertionError();
+ }
+}
diff --git a/android/hardware/camera2/utils/LongParcelable.java b/android/hardware/camera2/utils/LongParcelable.java
new file mode 100644
index 00000000..c89b3394
--- /dev/null
+++ b/android/hardware/camera2/utils/LongParcelable.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.utils;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+
+/**
+ * @hide
+ */
+public class LongParcelable implements Parcelable {
+ private long number;
+
+ public LongParcelable() {
+ this.number = 0;
+ }
+
+ public LongParcelable(long number) {
+ this.number = number;
+ }
+
+ public static final Parcelable.Creator<LongParcelable> CREATOR =
+ new Parcelable.Creator<LongParcelable>() {
+ @Override
+ public LongParcelable createFromParcel(Parcel in) {
+ return new LongParcelable(in);
+ }
+
+ @Override
+ public LongParcelable[] newArray(int size) {
+ return new LongParcelable[size];
+ }
+ };
+
+ private LongParcelable(Parcel in) {
+ readFromParcel(in);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeLong(number);
+ }
+
+ public void readFromParcel(Parcel in) {
+ number = in.readLong();
+ }
+
+ public long getNumber() {
+ return number;
+ }
+
+ public void setNumber(long number) {
+ this.number = number;
+ }
+
+}
diff --git a/android/hardware/camera2/utils/ParamsUtils.java b/android/hardware/camera2/utils/ParamsUtils.java
new file mode 100644
index 00000000..976fa2e3
--- /dev/null
+++ b/android/hardware/camera2/utils/ParamsUtils.java
@@ -0,0 +1,200 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+import android.graphics.Matrix;
+import android.graphics.Rect;
+import android.graphics.RectF;
+import android.hardware.camera2.CaptureRequest;
+import android.util.Rational;
+import android.util.Size;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Various assortment of params utilities.
+ */
+public class ParamsUtils {
+
+ /** Arbitrary denominator used to estimate floats as rationals */
+ private static final int RATIONAL_DENOMINATOR = 1000000; // 1million
+
+ /**
+ * Create a {@link Rect} from a {@code Size} by creating a new rectangle with
+ * left, top = {@code (0, 0)} and right, bottom = {@code (width, height)}
+ *
+ * @param size a non-{@code null} size
+ *
+ * @return a {@code non-null} rectangle
+ *
+ * @throws NullPointerException if {@code size} was {@code null}
+ */
+ public static Rect createRect(Size size) {
+ checkNotNull(size, "size must not be null");
+
+ return new Rect(/*left*/0, /*top*/0, size.getWidth(), size.getHeight());
+ }
+
+ /**
+ * Create a {@link Rect} from a {@code RectF} by creating a new rectangle with
+ * each corner (left, top, right, bottom) rounded towards the nearest integer bounding box.
+ *
+ * <p>In particular (left, top) is floored, and (right, bottom) is ceiled.</p>
+ *
+ * @param size a non-{@code null} rect
+ *
+ * @return a {@code non-null} rectangle
+ *
+ * @throws NullPointerException if {@code rect} was {@code null}
+ */
+ public static Rect createRect(RectF rect) {
+ checkNotNull(rect, "rect must not be null");
+
+ Rect r = new Rect();
+ rect.roundOut(r);
+
+ return r;
+ }
+
+ /**
+ * Map the rectangle in {@code rect} with the transform in {@code transform} into
+ * a new rectangle, with each corner (left, top, right, bottom) rounded towards the nearest
+ * integer bounding box.
+ *
+ * <p>None of the arguments are mutated.</p>
+ *
+ * @param transform a non-{@code null} transformation matrix
+ * @param rect a non-{@code null} rectangle
+ * @return a new rectangle that was transformed by {@code transform}
+ *
+ * @throws NullPointerException if any of the args were {@code null}
+ */
+ public static Rect mapRect(Matrix transform, Rect rect) {
+ checkNotNull(transform, "transform must not be null");
+ checkNotNull(rect, "rect must not be null");
+
+ RectF rectF = new RectF(rect);
+ transform.mapRect(rectF);
+ return createRect(rectF);
+ }
+
+ /**
+ * Create a {@link Size} from a {@code Rect} by creating a new size whose width
+ * and height are the same as the rectangle's width and heights.
+ *
+ * @param rect a non-{@code null} rectangle
+ *
+ * @return a {@code non-null} size
+ *
+ * @throws NullPointerException if {@code rect} was {@code null}
+ */
+ public static Size createSize(Rect rect) {
+ checkNotNull(rect, "rect must not be null");
+
+ return new Size(rect.width(), rect.height());
+ }
+
+ /**
+ * Create a {@link Rational} value by approximating the float value as a rational.
+ *
+ * <p>Floating points too large to be represented as an integer will be converted to
+ * to {@link Integer#MAX_VALUE}; floating points too small to be represented as an integer
+ * will be converted to {@link Integer#MIN_VALUE}.</p>
+ *
+ * @param value a floating point value
+ * @return the rational representation of the float
+ */
+ public static Rational createRational(float value) {
+ if (Float.isNaN(value)) {
+ return Rational.NaN;
+ } else if (value == Float.POSITIVE_INFINITY) {
+ return Rational.POSITIVE_INFINITY;
+ } else if (value == Float.NEGATIVE_INFINITY) {
+ return Rational.NEGATIVE_INFINITY;
+ } else if (value == 0.0f) {
+ return Rational.ZERO;
+ }
+
+ // normal finite value: approximate it
+
+ /*
+ * Start out trying to approximate with denominator = 1million,
+ * but if the numerator doesn't fit into an Int then keep making the denominator
+ * smaller until it does.
+ */
+ int den = RATIONAL_DENOMINATOR;
+ float numF;
+ do {
+ numF = value * den;
+
+ if ((numF > Integer.MIN_VALUE && numF < Integer.MAX_VALUE) || (den == 1)) {
+ break;
+ }
+
+ den /= 10;
+ } while (true);
+
+ /*
+ * By float -> int narrowing conversion in JLS 5.1.3, this will automatically become
+ * MIN_VALUE or MAX_VALUE if numF is too small/large to be represented by an integer
+ */
+ int num = (int) numF;
+
+ return new Rational(num, den);
+ }
+
+ /**
+ * Convert an integral rectangle ({@code source}) to a floating point rectangle
+ * ({@code destination}) in-place.
+ *
+ * @param source the originating integer rectangle will be read from here
+ * @param destination the resulting floating point rectangle will be written out to here
+ *
+ * @throws NullPointerException if {@code rect} was {@code null}
+ */
+ public static void convertRectF(Rect source, RectF destination) {
+ checkNotNull(source, "source must not be null");
+ checkNotNull(destination, "destination must not be null");
+
+ destination.left = source.left;
+ destination.right = source.right;
+ destination.bottom = source.bottom;
+ destination.top = source.top;
+ }
+
+ /**
+ * Return the value set by the key, or the {@code defaultValue} if no value was set.
+ *
+ * @throws NullPointerException if any of the args were {@code null}
+ */
+ public static <T> T getOrDefault(CaptureRequest r, CaptureRequest.Key<T> key, T defaultValue) {
+ checkNotNull(r, "r must not be null");
+ checkNotNull(key, "key must not be null");
+ checkNotNull(defaultValue, "defaultValue must not be null");
+
+ T value = r.get(key);
+ if (value == null) {
+ return defaultValue;
+ } else {
+ return value;
+ }
+ }
+
+ private ParamsUtils() {
+ throw new AssertionError();
+ }
+}
diff --git a/android/hardware/camera2/utils/SizeAreaComparator.java b/android/hardware/camera2/utils/SizeAreaComparator.java
new file mode 100644
index 00000000..4fde6361
--- /dev/null
+++ b/android/hardware/camera2/utils/SizeAreaComparator.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+import android.util.Size;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Comparator for {@link Size} objects by the area.
+ *
+ * <p>This comparator totally orders by rectangle area. Tie-breaks on width.</p>
+ */
+public class SizeAreaComparator implements Comparator<Size> {
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int compare(Size size, Size size2) {
+ checkNotNull(size, "size must not be null");
+ checkNotNull(size2, "size2 must not be null");
+
+ if (size.equals(size2)) {
+ return 0;
+ }
+
+ long width = size.getWidth();
+ long width2 = size2.getWidth();
+ long area = width * size.getHeight();
+ long area2 = width2 * size2.getHeight();
+
+ if (area == area2) {
+ return (width > width2) ? 1 : -1;
+ }
+
+ return (area > area2) ? 1 : -1;
+ }
+
+ /**
+ * Get the largest {@code Size} from the list by comparing each size's area
+ * by each other using {@link SizeAreaComparator}.
+ *
+ * @param sizes a non-{@code null} list of non-{@code null} sizes
+ * @return a non-{@code null} size
+ *
+ * @throws NullPointerException if {@code sizes} or any elements in it were {@code null}
+ */
+ public static Size findLargestByArea(List<Size> sizes) {
+ checkNotNull(sizes, "sizes must not be null");
+
+ return Collections.max(sizes, new SizeAreaComparator());
+ }
+} \ No newline at end of file
diff --git a/android/hardware/camera2/utils/SubmitInfo.java b/android/hardware/camera2/utils/SubmitInfo.java
new file mode 100644
index 00000000..d1692b59
--- /dev/null
+++ b/android/hardware/camera2/utils/SubmitInfo.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.utils;
+
+import android.os.Parcel;
+import android.os.Parcelable;
+import android.hardware.camera2.ICameraDeviceUser;
+
+/**
+ * The status information returned for a successful capture request submission.
+ *
+ * Includes the request ID for the newly submitted capture request, and the
+ * last frame number of either the previous repeating request (for repeating
+ * requests), or of the request(s) just submitted (for single-shot capture).
+ *
+ * @hide
+ */
+public class SubmitInfo implements Parcelable {
+
+ private int mRequestId;
+ private long mLastFrameNumber;
+
+ public SubmitInfo() {
+ mRequestId = -1;
+ mLastFrameNumber = ICameraDeviceUser.NO_IN_FLIGHT_REPEATING_FRAMES;
+ }
+
+ public SubmitInfo(int requestId, long lastFrameNumber) {
+ mRequestId = requestId;
+ mLastFrameNumber = lastFrameNumber;
+ }
+
+ public static final Parcelable.Creator<SubmitInfo> CREATOR =
+ new Parcelable.Creator<SubmitInfo>() {
+ @Override
+ public SubmitInfo createFromParcel(Parcel in) {
+ return new SubmitInfo(in);
+ }
+
+ @Override
+ public SubmitInfo[] newArray(int size) {
+ return new SubmitInfo[size];
+ }
+ };
+
+ private SubmitInfo(Parcel in) {
+ readFromParcel(in);
+ }
+
+ @Override
+ public int describeContents() {
+ return 0;
+ }
+
+ @Override
+ public void writeToParcel(Parcel dest, int flags) {
+ dest.writeInt(mRequestId);
+ dest.writeLong(mLastFrameNumber);
+ }
+
+ public void readFromParcel(Parcel in) {
+ mRequestId = in.readInt();
+ mLastFrameNumber = in.readLong();
+ }
+
+ /**
+ * Return the request ID for the submitted capture request/burst.
+ *
+ * This is used to track the completion status of the requested captures,
+ * and to cancel repeating requests.
+ */
+ public int getRequestId() {
+ return mRequestId;
+ }
+
+ /**
+ * Return the last frame number for the submitted capture request/burst.
+ *
+ * For a repeating request, this is the last frame number of the _prior_
+ * repeating request, to indicate when to fire the sequence completion callback
+ * for the prior repeating request.
+ *
+ * For a single-shot capture, this is the last frame number of _this_
+ * burst, to indicate when to fire the sequence completion callback for the request itself.
+ *
+ * For a repeating request, may be NO_IN_FLIGHT_REPEATING_FRAMES, if no
+ * instances of a prior repeating request were actually issued to the camera device.
+ */
+ public long getLastFrameNumber() {
+ return mLastFrameNumber;
+ }
+
+}
diff --git a/android/hardware/camera2/utils/SurfaceUtils.java b/android/hardware/camera2/utils/SurfaceUtils.java
new file mode 100644
index 00000000..e1e1c4fd
--- /dev/null
+++ b/android/hardware/camera2/utils/SurfaceUtils.java
@@ -0,0 +1,196 @@
+/*
+ * Copyright 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+import android.graphics.ImageFormat;
+import android.hardware.camera2.legacy.LegacyCameraDevice;
+import android.hardware.camera2.legacy.LegacyExceptionUtils.BufferQueueAbandonedException;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.util.Range;
+import android.util.Size;
+import android.view.Surface;
+
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+
+/**
+ * Various Surface utilities.
+ */
+public class SurfaceUtils {
+
+ /**
+ * Check if a surface is for preview consumer based on consumer end point Gralloc usage flags.
+ *
+ * @param surface The surface to be checked.
+ * @return true if the surface is for preview consumer, false otherwise.
+ */
+ public static boolean isSurfaceForPreview(Surface surface) {
+ return LegacyCameraDevice.isPreviewConsumer(surface);
+ }
+
+ /**
+ * Check if the surface is for hardware video encoder consumer based on consumer end point
+ * Gralloc usage flags.
+ *
+ * @param surface The surface to be checked.
+ * @return true if the surface is for hardware video encoder consumer, false otherwise.
+ */
+ public static boolean isSurfaceForHwVideoEncoder(Surface surface) {
+ return LegacyCameraDevice.isVideoEncoderConsumer(surface);
+ }
+
+ /**
+ * Get the Surface size.
+ *
+ * @param surface The surface to be queried for size.
+ * @return Size of the surface.
+ *
+ * @throws IllegalArgumentException if the surface is already abandoned.
+ */
+ public static Size getSurfaceSize(Surface surface) {
+ try {
+ return LegacyCameraDevice.getSurfaceSize(surface);
+ } catch (BufferQueueAbandonedException e) {
+ throw new IllegalArgumentException("Surface was abandoned", e);
+ }
+ }
+
+ /**
+ * Get the Surface format.
+ *
+ * @param surface The surface to be queried for format.
+ * @return format of the surface.
+ *
+ * @throws IllegalArgumentException if the surface is already abandoned.
+ */
+ public static int getSurfaceFormat(Surface surface) {
+ try {
+ return LegacyCameraDevice.detectSurfaceType(surface);
+ } catch (BufferQueueAbandonedException e) {
+ throw new IllegalArgumentException("Surface was abandoned", e);
+ }
+ }
+
+ /**
+ * Get the Surface dataspace.
+ *
+ * @param surface The surface to be queried for dataspace.
+ * @return dataspace of the surface.
+ *
+ * @throws IllegalArgumentException if the surface is already abandoned.
+ */
+ public static int getSurfaceDataspace(Surface surface) {
+ try {
+ return LegacyCameraDevice.detectSurfaceDataspace(surface);
+ } catch (BufferQueueAbandonedException e) {
+ throw new IllegalArgumentException("Surface was abandoned", e);
+ }
+ }
+
+ /**
+ * Return true is the consumer is one of the consumers that can accept
+ * producer overrides of the default dimensions and format.
+ *
+ */
+ public static boolean isFlexibleConsumer(Surface output) {
+ return LegacyCameraDevice.isFlexibleConsumer(output);
+ }
+
+ /**
+ * A high speed output surface can only be preview or hardware encoder surface.
+ *
+ * @param surface The high speed output surface to be checked.
+ */
+ private static void checkHighSpeedSurfaceFormat(Surface surface) {
+ int surfaceFormat = SurfaceUtils.getSurfaceFormat(surface);
+
+ if (surfaceFormat != ImageFormat.PRIVATE) {
+ throw new IllegalArgumentException("Surface format(" + surfaceFormat + ") is not"
+ + " for preview or hardware video encoding!");
+ }
+ }
+
+ /**
+ * Verify that that the surfaces are valid for high-speed recording mode,
+ * and that the FPS range is supported
+ *
+ * @param surfaces the surfaces to verify as valid in terms of size and format
+ * @param fpsRange the target high-speed FPS range to validate
+ * @param config The stream configuration map for the device in question
+ */
+ public static void checkConstrainedHighSpeedSurfaces(Collection<Surface> surfaces,
+ Range<Integer> fpsRange, StreamConfigurationMap config) {
+ if (surfaces == null || surfaces.size() == 0 || surfaces.size() > 2) {
+ throw new IllegalArgumentException("Output target surface list must not be null and"
+ + " the size must be 1 or 2");
+ }
+
+ List<Size> highSpeedSizes = null;
+ if (fpsRange == null) {
+ highSpeedSizes = Arrays.asList(config.getHighSpeedVideoSizes());
+ } else {
+ // Check the FPS range first if provided
+ Range<Integer>[] highSpeedFpsRanges = config.getHighSpeedVideoFpsRanges();
+ if(!Arrays.asList(highSpeedFpsRanges).contains(fpsRange)) {
+ throw new IllegalArgumentException("Fps range " + fpsRange.toString() + " in the"
+ + " request is not a supported high speed fps range " +
+ Arrays.toString(highSpeedFpsRanges));
+ }
+ highSpeedSizes = Arrays.asList(config.getHighSpeedVideoSizesFor(fpsRange));
+ }
+
+ for (Surface surface : surfaces) {
+ checkHighSpeedSurfaceFormat(surface);
+
+ // Surface size must be supported high speed sizes.
+ Size surfaceSize = SurfaceUtils.getSurfaceSize(surface);
+ if (!highSpeedSizes.contains(surfaceSize)) {
+ throw new IllegalArgumentException("Surface size " + surfaceSize.toString() + " is"
+ + " not part of the high speed supported size list " +
+ Arrays.toString(highSpeedSizes.toArray()));
+ }
+ // Each output surface must be either preview surface or recording surface.
+ if (!SurfaceUtils.isSurfaceForPreview(surface) &&
+ !SurfaceUtils.isSurfaceForHwVideoEncoder(surface)) {
+ throw new IllegalArgumentException("This output surface is neither preview nor "
+ + "hardware video encoding surface");
+ }
+ if (SurfaceUtils.isSurfaceForPreview(surface) &&
+ SurfaceUtils.isSurfaceForHwVideoEncoder(surface)) {
+ throw new IllegalArgumentException("This output surface can not be both preview"
+ + " and hardware video encoding surface");
+ }
+ }
+
+ // For 2 output surface case, they shouldn't be same type.
+ if (surfaces.size() == 2) {
+ // Up to here, each surface can only be either preview or recording.
+ Iterator<Surface> iterator = surfaces.iterator();
+ boolean isFirstSurfacePreview =
+ SurfaceUtils.isSurfaceForPreview(iterator.next());
+ boolean isSecondSurfacePreview =
+ SurfaceUtils.isSurfaceForPreview(iterator.next());
+ if (isFirstSurfacePreview == isSecondSurfacePreview) {
+ throw new IllegalArgumentException("The 2 output surfaces must have different"
+ + " type");
+ }
+ }
+ }
+
+}
diff --git a/android/hardware/camera2/utils/TaskDrainer.java b/android/hardware/camera2/utils/TaskDrainer.java
new file mode 100644
index 00000000..ed30ff34
--- /dev/null
+++ b/android/hardware/camera2/utils/TaskDrainer.java
@@ -0,0 +1,214 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.utils;
+
+import android.os.Handler;
+import android.util.Log;
+
+import java.util.HashSet;
+import java.util.Set;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Keep track of multiple concurrent tasks starting and finishing by their key;
+ * allow draining existing tasks and figuring out when all tasks have finished
+ * (and new ones won't begin).
+ *
+ * <p>The initial state is to allow all tasks to be started and finished. A task may only be started
+ * once, after which it must be finished before starting again. Likewise, a task may only be
+ * finished once, after which it must be started before finishing again. It is okay to finish a
+ * task before starting it due to different threads handling starting and finishing.</p>
+ *
+ * <p>When draining begins, no more new tasks can be started. This guarantees that at some
+ * point when all the tasks are finished there will be no more collective new tasks,
+ * at which point the {@link DrainListener#onDrained} callback will be invoked.</p>
+ *
+ *
+ * @param <T>
+ * a type for the key that will represent tracked tasks;
+ * must implement {@code Object#equals}
+ */
+public class TaskDrainer<T> {
+ /**
+ * Fired asynchronously after draining has begun with {@link TaskDrainer#beginDrain}
+ * <em>and</em> all tasks that were started have finished.
+ */
+ public interface DrainListener {
+ /** All tasks have fully finished draining; there will be no more pending tasks. */
+ public void onDrained();
+ }
+
+ private static final String TAG = "TaskDrainer";
+ private final boolean DEBUG = false;
+
+ private final Handler mHandler;
+ private final DrainListener mListener;
+ private final String mName;
+
+ /** Set of tasks which have been started but not yet finished with #taskFinished */
+ private final Set<T> mTaskSet = new HashSet<T>();
+ /**
+ * Set of tasks which have been finished but not yet started with #taskStarted. This may happen
+ * if taskStarted and taskFinished are called from two different threads.
+ */
+ private final Set<T> mEarlyFinishedTaskSet = new HashSet<T>();
+ private final Object mLock = new Object();
+
+ private boolean mDraining = false;
+ private boolean mDrainFinished = false;
+
+ /**
+ * Create a new task drainer; {@code onDrained} callbacks will be posted to the listener
+ * via the {@code handler}.
+ *
+ * @param handler a non-{@code null} handler to use to post runnables to
+ * @param listener a non-{@code null} listener where {@code onDrained} will be called
+ */
+ public TaskDrainer(Handler handler, DrainListener listener) {
+ mHandler = checkNotNull(handler, "handler must not be null");
+ mListener = checkNotNull(listener, "listener must not be null");
+ mName = null;
+ }
+
+ /**
+ * Create a new task drainer; {@code onDrained} callbacks will be posted to the listener
+ * via the {@code handler}.
+ *
+ * @param handler a non-{@code null} handler to use to post runnables to
+ * @param listener a non-{@code null} listener where {@code onDrained} will be called
+ * @param name an optional name used for debug logging
+ */
+ public TaskDrainer(Handler handler, DrainListener listener, String name) {
+ // XX: Probably don't need a handler at all here
+ mHandler = checkNotNull(handler, "handler must not be null");
+ mListener = checkNotNull(listener, "listener must not be null");
+ mName = name;
+ }
+
+ /**
+ * Mark an asynchronous task as having started.
+ *
+ * <p>A task cannot be started more than once without first having finished. Once
+ * draining begins with {@link #beginDrain}, no new tasks can be started.</p>
+ *
+ * @param task a key to identify a task
+ *
+ * @see #taskFinished
+ * @see #beginDrain
+ *
+ * @throws IllegalStateException
+ * If attempting to start a task which is already started (and not finished),
+ * or if attempting to start a task after draining has begun.
+ */
+ public void taskStarted(T task) {
+ synchronized (mLock) {
+ if (DEBUG) {
+ Log.v(TAG + "[" + mName + "]", "taskStarted " + task);
+ }
+
+ if (mDraining) {
+ throw new IllegalStateException("Can't start more tasks after draining has begun");
+ }
+
+ // Try to remove the task from the early finished set.
+ if (!mEarlyFinishedTaskSet.remove(task)) {
+ // The task is not finished early. Add it to the started set.
+ if (!mTaskSet.add(task)) {
+ throw new IllegalStateException("Task " + task + " was already started");
+ }
+ }
+ }
+ }
+
+
+ /**
+ * Mark an asynchronous task as having finished.
+ *
+ * <p>A task cannot be finished more than once without first having started.</p>
+ *
+ * @param task a key to identify a task
+ *
+ * @see #taskStarted
+ * @see #beginDrain
+ *
+ * @throws IllegalStateException
+ * If attempting to finish a task which is already finished (and not started),
+ */
+ public void taskFinished(T task) {
+ synchronized (mLock) {
+ if (DEBUG) {
+ Log.v(TAG + "[" + mName + "]", "taskFinished " + task);
+ }
+
+ // Try to remove the task from started set.
+ if (!mTaskSet.remove(task)) {
+ // Task is not started yet. Add it to the early finished set.
+ if (!mEarlyFinishedTaskSet.add(task)) {
+ throw new IllegalStateException("Task " + task + " was already finished");
+ }
+ }
+
+ // If this is the last finished task and draining has already begun, fire #onDrained
+ checkIfDrainFinished();
+ }
+ }
+
+ /**
+ * Do not allow any more tasks to be started; once all existing started tasks are finished,
+ * fire the {@link DrainListener#onDrained} callback asynchronously.
+ *
+ * <p>This operation is idempotent; calling it more than once has no effect.</p>
+ */
+ public void beginDrain() {
+ synchronized (mLock) {
+ if (!mDraining) {
+ if (DEBUG) {
+ Log.v(TAG + "[" + mName + "]", "beginDrain started");
+ }
+
+ mDraining = true;
+
+ // If all tasks that had started had already finished by now, fire #onDrained
+ checkIfDrainFinished();
+ } else {
+ if (DEBUG) {
+ Log.v(TAG + "[" + mName + "]", "beginDrain ignored");
+ }
+ }
+ }
+ }
+
+ private void checkIfDrainFinished() {
+ if (mTaskSet.isEmpty() && mDraining && !mDrainFinished) {
+ mDrainFinished = true;
+ postDrained();
+ }
+ }
+
+ private void postDrained() {
+ mHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ if (DEBUG) {
+ Log.v(TAG + "[" + mName + "]", "onDrained");
+ }
+
+ mListener.onDrained();
+ }
+ });
+ }
+}
diff --git a/android/hardware/camera2/utils/TaskSingleDrainer.java b/android/hardware/camera2/utils/TaskSingleDrainer.java
new file mode 100644
index 00000000..f6272c9e
--- /dev/null
+++ b/android/hardware/camera2/utils/TaskSingleDrainer.java
@@ -0,0 +1,104 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package android.hardware.camera2.utils;
+
+import android.hardware.camera2.utils.TaskDrainer.DrainListener;
+import android.os.Handler;
+
+/**
+ * Keep track of a single concurrent task starting and finishing;
+ * allow draining the existing task and figuring out when the task has finished
+ * (and won't restart).
+ *
+ * <p>The initial state is to allow all tasks to be started and finished. A task may only be started
+ * once, after which it must be finished before starting again. Likewise, finishing a task
+ * that hasn't been started is also not allowed.</p>
+ *
+ * <p>When draining begins, the task cannot be started again. This guarantees that at some
+ * point the task will be finished forever, at which point the {@link DrainListener#onDrained}
+ * callback will be invoked.</p>
+ */
+public class TaskSingleDrainer {
+
+ private final TaskDrainer<Object> mTaskDrainer;
+ private final Object mSingleTask = new Object();
+
+ /**
+ * Create a new task drainer; {@code onDrained} callbacks will be posted to the listener
+ * via the {@code handler}.
+ *
+ * @param handler a non-{@code null} handler to use to post runnables to
+ * @param listener a non-{@code null} listener where {@code onDrained} will be called
+ */
+ public TaskSingleDrainer(Handler handler, DrainListener listener) {
+ mTaskDrainer = new TaskDrainer<>(handler, listener);
+ }
+
+ /**
+ * Create a new task drainer; {@code onDrained} callbacks will be posted to the listener
+ * via the {@code handler}.
+ *
+ * @param handler a non-{@code null} handler to use to post runnables to
+ * @param listener a non-{@code null} listener where {@code onDrained} will be called
+ * @param name an optional name used for debug logging
+ */
+ public TaskSingleDrainer(Handler handler, DrainListener listener, String name) {
+ mTaskDrainer = new TaskDrainer<>(handler, listener, name);
+ }
+
+ /**
+ * Mark this asynchronous task as having started.
+ *
+ * <p>The task cannot be started more than once without first having finished. Once
+ * draining begins with {@link #beginDrain}, no new tasks can be started.</p>
+ *
+ * @see #taskFinished
+ * @see #beginDrain
+ *
+ * @throws IllegalStateException
+ * If attempting to start a task which is already started (and not finished),
+ * or if attempting to start a task after draining has begun.
+ */
+ public void taskStarted() {
+ mTaskDrainer.taskStarted(mSingleTask);
+ }
+
+ /**
+ * Do not allow any more task re-starts; once the existing task is finished,
+ * fire the {@link DrainListener#onDrained} callback asynchronously.
+ *
+ * <p>This operation is idempotent; calling it more than once has no effect.</p>
+ */
+ public void beginDrain() {
+ mTaskDrainer.beginDrain();
+ }
+
+ /**
+ * Mark this asynchronous task as having finished.
+ *
+ * <p>The task cannot be finished if it hasn't started. Once finished, a task
+ * cannot be finished again (unless it's started again).</p>
+ *
+ * @see #taskStarted
+ * @see #beginDrain
+ *
+ * @throws IllegalStateException
+ * If attempting to start a task which is already finished (and not re-started),
+ */
+ public void taskFinished() {
+ mTaskDrainer.taskFinished(mSingleTask);
+ }
+}
diff --git a/android/hardware/camera2/utils/TypeReference.java b/android/hardware/camera2/utils/TypeReference.java
new file mode 100644
index 00000000..24ce1241
--- /dev/null
+++ b/android/hardware/camera2/utils/TypeReference.java
@@ -0,0 +1,437 @@
+/*
+ * Copyright (C) 2014 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+import java.lang.reflect.Array;
+import java.lang.reflect.GenericArrayType;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.lang.reflect.TypeVariable;
+import java.lang.reflect.WildcardType;
+
+import static com.android.internal.util.Preconditions.*;
+
+/**
+ * Super type token; allows capturing generic types at runtime by forcing them to be reified.
+ *
+ * <p>Usage example: <pre>{@code
+ * // using anonymous classes (preferred)
+ * TypeReference&lt;Integer> intToken = new TypeReference&lt;Integer>() {{ }};
+ *
+ * // using named classes
+ * class IntTypeReference extends TypeReference&lt;Integer> {...}
+ * TypeReference&lt;Integer> intToken = new IntTypeReference();
+ * }</p></pre>
+ *
+ * <p>Unlike the reference implementation, this bans nested TypeVariables; that is all
+ * dynamic types must equal to the static types.</p>
+ *
+ * <p>See <a href="http://gafter.blogspot.com/2007/05/limitation-of-super-type-tokens.html">
+ * http://gafter.blogspot.com/2007/05/limitation-of-super-type-tokens.html</a>
+ * for more details.</p>
+ */
+public abstract class TypeReference<T> {
+ private final Type mType;
+ private final int mHash;
+
+ /**
+ * Create a new type reference for {@code T}.
+ *
+ * @throws IllegalArgumentException if {@code T}'s actual type contains a type variable
+ *
+ * @see TypeReference
+ */
+ protected TypeReference() {
+ ParameterizedType thisType = (ParameterizedType)getClass().getGenericSuperclass();
+
+ // extract the "T" from TypeReference<T>
+ mType = thisType.getActualTypeArguments()[0];
+
+ /*
+ * Prohibit type references with type variables such as
+ *
+ * class GenericListToken<T> extends TypeReference<List<T>>
+ *
+ * Since the "T" there is not known without an instance of T, type equality would
+ * consider *all* Lists equal regardless of T. Allowing this would defeat
+ * some of the type safety of a type reference.
+ */
+ if (containsTypeVariable(mType)) {
+ throw new IllegalArgumentException(
+ "Including a type variable in a type reference is not allowed");
+ }
+ mHash = mType.hashCode();
+ }
+
+ /**
+ * Return the dynamic {@link Type} corresponding to the captured type {@code T}.
+ */
+ public Type getType() {
+ return mType;
+ }
+
+ private TypeReference(Type type) {
+ mType = type;
+ if (containsTypeVariable(mType)) {
+ throw new IllegalArgumentException(
+ "Including a type variable in a type reference is not allowed");
+ }
+ mHash = mType.hashCode();
+ }
+
+ private static class SpecializedTypeReference<T> extends TypeReference<T> {
+ public SpecializedTypeReference(Class<T> klass) {
+ super(klass);
+ }
+ }
+
+ @SuppressWarnings("rawtypes")
+ private static class SpecializedBaseTypeReference extends TypeReference {
+ public SpecializedBaseTypeReference(Type type) {
+ super(type);
+ }
+ }
+
+ /**
+ * Create a specialized type reference from a dynamic class instance,
+ * bypassing the standard compile-time checks.
+ *
+ * <p>As with a regular type reference, the {@code klass} must not contain
+ * any type variables.</p>
+ *
+ * @param klass a non-{@code null} {@link Class} instance
+ *
+ * @return a type reference which captures {@code T} at runtime
+ *
+ * @throws IllegalArgumentException if {@code T} had any type variables
+ */
+ public static <T> TypeReference<T> createSpecializedTypeReference(Class<T> klass) {
+ return new SpecializedTypeReference<T>(klass);
+ }
+
+ /**
+ * Create a specialized type reference from a dynamic {@link Type} instance,
+ * bypassing the standard compile-time checks.
+ *
+ * <p>As with a regular type reference, the {@code type} must not contain
+ * any type variables.</p>
+ *
+ * @param type a non-{@code null} {@link Type} instance
+ *
+ * @return a type reference which captures {@code T} at runtime
+ *
+ * @throws IllegalArgumentException if {@code type} had any type variables
+ */
+ public static TypeReference<?> createSpecializedTypeReference(Type type) {
+ return new SpecializedBaseTypeReference(type);
+ }
+
+ /**
+ * Returns the raw type of T.
+ *
+ * <p><ul>
+ * <li>If T is a Class itself, T itself is returned.
+ * <li>If T is a ParameterizedType, the raw type of the parameterized type is returned.
+ * <li>If T is a GenericArrayType, the returned type is the corresponding array class.
+ * For example: {@code List<Integer>[]} => {@code List[]}.
+ * <li>If T is a type variable or a wildcard type, the raw type of the first upper bound is
+ * returned. For example: {@code <X extends Foo>} => {@code Foo}.
+ * </ul>
+ *
+ * @return the raw type of {@code T}
+ */
+ @SuppressWarnings("unchecked")
+ public final Class<? super T> getRawType() {
+ return (Class<? super T>)getRawType(mType);
+ }
+
+ private static final Class<?> getRawType(Type type) {
+ if (type == null) {
+ throw new NullPointerException("type must not be null");
+ }
+
+ if (type instanceof Class<?>) {
+ return (Class<?>)type;
+ } else if (type instanceof ParameterizedType) {
+ return (Class<?>)(((ParameterizedType)type).getRawType());
+ } else if (type instanceof GenericArrayType) {
+ return getArrayClass(getRawType(((GenericArrayType)type).getGenericComponentType()));
+ } else if (type instanceof WildcardType) {
+ // Should be at most 1 upper bound, but treat it like an array for simplicity
+ return getRawType(((WildcardType) type).getUpperBounds());
+ } else if (type instanceof TypeVariable) {
+ throw new AssertionError("Type variables are not allowed in type references");
+ } else {
+ // Impossible
+ throw new AssertionError("Unhandled branch to get raw type for type " + type);
+ }
+ }
+
+ private static final Class<?> getRawType(Type[] types) {
+ if (types == null) {
+ return null;
+ }
+
+ for (Type type : types) {
+ Class<?> klass = getRawType(type);
+ if (klass != null) {
+ return klass;
+ }
+ }
+
+ return null;
+ }
+
+ private static final Class<?> getArrayClass(Class<?> componentType) {
+ return Array.newInstance(componentType, 0).getClass();
+ }
+
+ /**
+ * Get the component type, e.g. {@code T} from {@code T[]}.
+ *
+ * @return component type, or {@code null} if {@code T} is not an array
+ */
+ public TypeReference<?> getComponentType() {
+ Type componentType = getComponentType(mType);
+
+ return (componentType != null) ?
+ createSpecializedTypeReference(componentType) :
+ null;
+ }
+
+ private static Type getComponentType(Type type) {
+ checkNotNull(type, "type must not be null");
+
+ if (type instanceof Class<?>) {
+ return ((Class<?>) type).getComponentType();
+ } else if (type instanceof ParameterizedType) {
+ return null;
+ } else if (type instanceof GenericArrayType) {
+ return ((GenericArrayType)type).getGenericComponentType();
+ } else if (type instanceof WildcardType) {
+ // Should be at most 1 upper bound, but treat it like an array for simplicity
+ throw new UnsupportedOperationException("TODO: support wild card components");
+ } else if (type instanceof TypeVariable) {
+ throw new AssertionError("Type variables are not allowed in type references");
+ } else {
+ // Impossible
+ throw new AssertionError("Unhandled branch to get component type for type " + type);
+ }
+ }
+
+ /**
+ * Compare two objects for equality.
+ *
+ * <p>A TypeReference is only equal to another TypeReference if their captured type {@code T}
+ * is also equal.</p>
+ */
+ @Override
+ public boolean equals(Object o) {
+ // Note that this comparison could inaccurately return true when comparing types
+ // with nested type variables; therefore we ban type variables in the constructor.
+ return o instanceof TypeReference<?> && mType.equals(((TypeReference<?>)o).mType);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public int hashCode() {
+ return mHash;
+ }
+
+ /**
+ * Check if the {@code type} contains a {@link TypeVariable} recursively.
+ *
+ * <p>Intuitively, a type variable is a type in a type expression that refers to a generic
+ * type which is not known at the definition of the expression (commonly seen when
+ * type parameters are used, e.g. {@code class Foo<T>}).</p>
+ *
+ * <p>See <a href="http://docs.oracle.com/javase/specs/jls/se7/html/jls-4.html#jls-4.4">
+ * http://docs.oracle.com/javase/specs/jls/se7/html/jls-4.html#jls-4.4</a>
+ * for a more formal definition of a type variable</p>.
+ *
+ * @param type a type object ({@code null} is allowed)
+ * @return {@code true} if there were nested type variables; {@code false} otherwise
+ */
+ public static boolean containsTypeVariable(Type type) {
+ if (type == null) {
+ // Trivially false
+ return false;
+ } else if (type instanceof TypeVariable<?>) {
+ /*
+ * T -> trivially true
+ */
+ return true;
+ } else if (type instanceof Class<?>) {
+ /*
+ * class Foo -> no type variable
+ * class Foo<T> - has a type variable
+ *
+ * This also covers the case of class Foo<T> extends ... / implements ...
+ * since everything on the right hand side would either include a type variable T
+ * or have no type variables.
+ */
+ Class<?> klass = (Class<?>)type;
+
+ // Empty array => class is not generic
+ if (klass.getTypeParameters().length != 0) {
+ return true;
+ } else {
+ // Does the outer class(es) contain any type variables?
+
+ /*
+ * class Outer<T> {
+ * class Inner {
+ * T field;
+ * }
+ * }
+ *
+ * In this case 'Inner' has no type parameters itself, but it still has a type
+ * variable as part of the type definition.
+ */
+ return containsTypeVariable(klass.getDeclaringClass());
+ }
+ } else if (type instanceof ParameterizedType) {
+ /*
+ * This is the "Foo<T1, T2, T3, ... Tn>" in the scope of a
+ *
+ * // no type variables here, T1-Tn are known at this definition
+ * class X extends Foo<T1, T2, T3, ... Tn>
+ *
+ * // T1 is a type variable, T2-Tn are known at this definition
+ * class X<T1> extends Foo<T1, T2, T3, ... Tn>
+ */
+ ParameterizedType p = (ParameterizedType) type;
+
+ // This needs to be recursively checked
+ for (Type arg : p.getActualTypeArguments()) {
+ if (containsTypeVariable(arg)) {
+ return true;
+ }
+ }
+
+ return false;
+ } else if (type instanceof WildcardType) {
+ WildcardType wild = (WildcardType) type;
+
+ /*
+ * This is is the "?" inside of a
+ *
+ * Foo<?> --> unbounded; trivially no type variables
+ * Foo<? super T> --> lower bound; does T have a type variable?
+ * Foo<? extends T> --> upper bound; does T have a type variable?
+ */
+
+ /*
+ * According to JLS 4.5.1
+ * (http://java.sun.com/docs/books/jls/third_edition/html/typesValues.html#4.5.1):
+ *
+ * - More than 1 lower/upper bound is illegal
+ * - Both a lower and upper bound is illegal
+ *
+ * However, we use this 'array OR array' approach for readability
+ */
+ return containsTypeVariable(wild.getLowerBounds()) ||
+ containsTypeVariable(wild.getUpperBounds());
+ }
+
+ return false;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public String toString() {
+ StringBuilder builder = new StringBuilder();
+ builder.append("TypeReference<");
+ toString(getType(), builder);
+ builder.append(">");
+
+ return builder.toString();
+ }
+
+ private static void toString(Type type, StringBuilder out) {
+ if (type == null) {
+ return;
+ } else if (type instanceof TypeVariable<?>) {
+ // T
+ out.append(((TypeVariable<?>)type).getName());
+ } else if (type instanceof Class<?>) {
+ Class<?> klass = (Class<?>)type;
+
+ out.append(klass.getName());
+ toString(klass.getTypeParameters(), out);
+ } else if (type instanceof ParameterizedType) {
+ // "Foo<T1, T2, T3, ... Tn>"
+ ParameterizedType p = (ParameterizedType) type;
+
+ out.append(((Class<?>)p.getRawType()).getName());
+ toString(p.getActualTypeArguments(), out);
+ } else if (type instanceof GenericArrayType) {
+ GenericArrayType gat = (GenericArrayType)type;
+
+ toString(gat.getGenericComponentType(), out);
+ out.append("[]");
+ } else { // WildcardType, BoundedType
+ // TODO:
+ out.append(type.toString());
+ }
+ }
+
+ private static void toString(Type[] types, StringBuilder out) {
+ if (types == null) {
+ return;
+ } else if (types.length == 0) {
+ return;
+ }
+
+ out.append("<");
+
+ for (int i = 0; i < types.length; ++i) {
+ toString(types[i], out);
+ if (i != types.length - 1) {
+ out.append(", ");
+ }
+ }
+
+ out.append(">");
+ }
+
+ /**
+ * Check if any of the elements in this array contained a type variable.
+ *
+ * <p>Empty and null arrays trivially have no type variables.</p>
+ *
+ * @param typeArray an array ({@code null} is ok) of types
+ * @return true if any elements contained a type variable; false otherwise
+ */
+ private static boolean containsTypeVariable(Type[] typeArray) {
+ if (typeArray == null) {
+ return false;
+ }
+
+ for (Type type : typeArray) {
+ if (containsTypeVariable(type)) {
+ return true;
+ }
+ }
+
+ return false;
+ }
+}
diff --git a/android/hardware/camera2/utils/UncheckedThrow.java b/android/hardware/camera2/utils/UncheckedThrow.java
new file mode 100644
index 00000000..ffcb78ba
--- /dev/null
+++ b/android/hardware/camera2/utils/UncheckedThrow.java
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera2.utils;
+
+/**
+ * @hide
+ */
+public class UncheckedThrow {
+
+ /**
+ * Throw any kind of exception without needing it to be checked
+ * @param e any instance of a Exception
+ */
+ public static void throwAnyException(Exception e) {
+ /**
+ * Abuse type erasure by making the compiler think we are throwing RuntimeException,
+ * which is unchecked, but then inserting any exception in there.
+ */
+ UncheckedThrow.<RuntimeException>throwAnyImpl(e);
+ }
+
+ /**
+ * Throw any kind of throwable without needing it to be checked
+ * @param e any instance of a Throwable
+ */
+ public static void throwAnyException(Throwable e) {
+ /**
+ * Abuse type erasure by making the compiler think we are throwing RuntimeException,
+ * which is unchecked, but then inserting any exception in there.
+ */
+ UncheckedThrow.<RuntimeException>throwAnyImpl(e);
+ }
+
+ @SuppressWarnings("unchecked")
+ private static<T extends Throwable> void throwAnyImpl(Throwable e) throws T {
+ throw (T) e;
+ }
+}