/* * Copyright (C) 2013 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.hardware.camera2; import android.annotation.NonNull; import android.annotation.TestApi; import android.compat.annotation.UnsupportedAppUsage; import android.hardware.camera2.impl.CameraMetadataNative; import android.hardware.camera2.impl.PublicKey; import android.hardware.camera2.impl.SyntheticKey; import android.util.Log; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * The base class for camera controls and information. * *

* This class defines the basic key/value map used for querying for camera * characteristics or capture results, and for setting camera request * parameters. *

* *

* All instances of CameraMetadata are immutable. The list of keys with {@link #getKeys()} * never changes, nor do the values returned by any key with {@code #get} throughout * the lifetime of the object. *

* * @see CameraDevice * @see CameraManager * @see CameraCharacteristics **/ public abstract class CameraMetadata { private static final String TAG = "CameraMetadataAb"; private static final boolean DEBUG = false; private CameraMetadataNative mNativeInstance = null; /** * Set a camera metadata field to a value. The field definitions can be * found in {@link CameraCharacteristics}, {@link CaptureResult}, and * {@link CaptureRequest}. * * @param key The metadata field to write. * @param value The value to set the field to, which must be of a matching * type to the key. * * @hide */ protected CameraMetadata() { } /** * Get a camera metadata field value. * *

The field definitions can be * found in {@link CameraCharacteristics}, {@link CaptureResult}, and * {@link CaptureRequest}.

* *

Querying the value for the same key more than once will return a value * which is equal to the previous queried value.

* * @throws IllegalArgumentException if the key was not valid * * @param key The metadata field to read. * @return The value of that key, or {@code null} if the field is not set. * * @hide */ protected abstract T getProtected(TKey key); /** * @hide */ protected void setNativeInstance(CameraMetadataNative nativeInstance) { mNativeInstance = nativeInstance; } /** * Retrieves the native std::shared_ptr* as a Java long. * Returns 0 if mNativeInstance is null. * * @hide */ @UnsupportedAppUsage(publicAlternatives = "This method is exposed for native " + "{@code ACameraMetadata_fromCameraMetadata} in {@code libcamera2ndk}.") public long getNativeMetadataPtr() { if (mNativeInstance == null) { return 0; } else { return mNativeInstance.getMetadataPtr(); } } /** * Retrieves the CameraMetadataNative instance. * * @hide */ public CameraMetadataNative getNativeMetadata() { return mNativeInstance; } /** * @hide */ protected abstract Class getKeyClass(); /** * Returns a list of the keys contained in this map. * *

The list returned is not modifiable, so any attempts to modify it will throw * a {@code UnsupportedOperationException}.

* *

All values retrieved by a key from this list with {@code #get} are guaranteed to be * non-{@code null}. Each key is only listed once in the list. The order of the keys * is undefined.

* * @return List of the keys contained in this map. */ @SuppressWarnings("unchecked") @NonNull public List getKeys() { Class> thisClass = (Class>) getClass(); return Collections.unmodifiableList( getKeys(thisClass, getKeyClass(), this, /*filterTags*/null, /*includeSynthetic*/ true)); } /** * Return a list of all the Key that are declared as a field inside of the class * {@code type}. * *

* Optionally, if {@code instance} is not null, then filter out any keys with null values. *

* *

* Optionally, if {@code filterTags} is not {@code null}, then filter out any keys * whose native {@code tag} is not in {@code filterTags}. The {@code filterTags} array will be * sorted as a side effect. * {@code includeSynthetic} Includes public syntenthic fields by default. *

*/ /*package*/ @SuppressWarnings("unchecked") ArrayList getKeys( Class type, Class keyClass, CameraMetadata instance, int[] filterTags, boolean includeSynthetic) { if (DEBUG) Log.v(TAG, "getKeysStatic for " + type); // TotalCaptureResult does not have any of the keys on it, use CaptureResult instead if (type.equals(TotalCaptureResult.class)) { type = CaptureResult.class; } if (filterTags != null) { Arrays.sort(filterTags); } ArrayList keyList = new ArrayList(); Field[] fields = type.getDeclaredFields(); for (Field field : fields) { // Filter for Keys that are public if (field.getType().isAssignableFrom(keyClass) && (field.getModifiers() & Modifier.PUBLIC) != 0) { TKey key; try { key = (TKey) field.get(instance); } catch (IllegalAccessException e) { throw new AssertionError("Can't get IllegalAccessException", e); } catch (IllegalArgumentException e) { throw new AssertionError("Can't get IllegalArgumentException", e); } if (instance == null || instance.getProtected(key) != null) { if (shouldKeyBeAdded(key, field, filterTags, includeSynthetic)) { keyList.add(key); if (DEBUG) { Log.v(TAG, "getKeysStatic - key was added - " + key); } } else if (DEBUG) { Log.v(TAG, "getKeysStatic - key was filtered - " + key); } } } } if (null == mNativeInstance) { return keyList; } ArrayList vendorKeys = mNativeInstance.getAllVendorKeys(keyClass); if (vendorKeys != null) { for (TKey k : vendorKeys) { String keyName; long vendorId; if (k instanceof CaptureRequest.Key) { keyName = ((CaptureRequest.Key) k).getName(); vendorId = ((CaptureRequest.Key) k).getVendorId(); } else if (k instanceof CaptureResult.Key) { keyName = ((CaptureResult.Key) k).getName(); vendorId = ((CaptureResult.Key) k).getVendorId(); } else if (k instanceof CameraCharacteristics.Key) { keyName = ((CameraCharacteristics.Key) k).getName(); vendorId = ((CameraCharacteristics.Key) k).getVendorId(); } else { continue; } if (filterTags != null && Arrays.binarySearch(filterTags, CameraMetadataNative.getTag(keyName, vendorId)) < 0) { // ignore vendor keys not in filterTags continue; } if (instance == null || instance.getProtected(k) != null) { keyList.add(k); } } } return keyList; } @SuppressWarnings("rawtypes") private static boolean shouldKeyBeAdded(TKey key, Field field, int[] filterTags, boolean includeSynthetic) { if (key == null) { throw new NullPointerException("key must not be null"); } CameraMetadataNative.Key nativeKey; /* * Get the native key from the public api key */ if (key instanceof CameraCharacteristics.Key) { nativeKey = ((CameraCharacteristics.Key)key).getNativeKey(); } else if (key instanceof CaptureResult.Key) { nativeKey = ((CaptureResult.Key)key).getNativeKey(); } else if (key instanceof CaptureRequest.Key) { nativeKey = ((CaptureRequest.Key)key).getNativeKey(); } else { // Reject fields that aren't a key throw new IllegalArgumentException("key type must be that of a metadata key"); } if (field.getAnnotation(PublicKey.class) == null) { // Never expose @hide keys up to the API user return false; } // No filtering necessary if (filterTags == null) { return true; } if (field.getAnnotation(SyntheticKey.class) != null) { // This key is synthetic, so calling #getTag will throw IAE return includeSynthetic; } /* * Regular key: look up it's native tag and see if it's in filterTags */ int keyTag = nativeKey.getTag(); // non-negative result is returned iff the value is in the array return Arrays.binarySearch(filterTags, keyTag) >= 0; } /*@O~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ * The enum values below this point are generated from metadata * definitions in /system/media/camera/docs. Do not modify by hand or * modify the comment blocks at the start or end. *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~*/ // // Enumeration values for CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION // /** *

The lens focus distance is not accurate, and the units used for * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} do not correspond to any physical units.

*

Setting the lens to the same focus distance on separate occasions may * result in a different real focus distance, depending on factors such * as the orientation of the device, the age of the focusing mechanism, * and the device temperature. The focus distance value will still be * in the range of [0, {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}], where 0 * represents the farthest focus.

* * @see CaptureRequest#LENS_FOCUS_DISTANCE * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION */ public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED = 0; /** *

The lens focus distance is measured in diopters.

*

However, setting the lens to the same focus distance * on separate occasions may result in a different real * focus distance, depending on factors such as the * orientation of the device, the age of the focusing * mechanism, and the device temperature.

* @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION */ public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE = 1; /** *

The lens focus distance is measured in diopters, and * is calibrated.

*

The lens mechanism is calibrated so that setting the * same focus distance is repeatable on multiple * occasions with good accuracy, and the focus distance * corresponds to the real physical distance to the plane * of best focus.

* @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION */ public static final int LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED = 2; // // Enumeration values for CameraCharacteristics#LENS_FACING // /** *

The camera device faces the same direction as the device's screen.

* @see CameraCharacteristics#LENS_FACING */ public static final int LENS_FACING_FRONT = 0; /** *

The camera device faces the opposite direction as the device's screen.

* @see CameraCharacteristics#LENS_FACING */ public static final int LENS_FACING_BACK = 1; /** *

The camera device is an external camera, and has no fixed facing relative to the * device's screen.

* @see CameraCharacteristics#LENS_FACING */ public static final int LENS_FACING_EXTERNAL = 2; // // Enumeration values for CameraCharacteristics#LENS_POSE_REFERENCE // /** *

The value of {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation} is relative to the optical center of * the largest camera device facing the same direction as this camera.

*

This is the default value for API levels before Android P.

* * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#LENS_POSE_REFERENCE */ public static final int LENS_POSE_REFERENCE_PRIMARY_CAMERA = 0; /** *

The value of {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation} is relative to the position of the * primary gyroscope of this Android device.

* * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#LENS_POSE_REFERENCE */ public static final int LENS_POSE_REFERENCE_GYROSCOPE = 1; /** *

The camera device cannot represent the values of {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation} * and {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} accurately enough. One such example is a camera device * on the cover of a foldable phone: in order to measure the pose translation and rotation, * some kind of hinge position sensor would be needed.

*

The value of {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation} must be all zeros, and * {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} must be values matching its default facing.

* * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#LENS_POSE_REFERENCE */ public static final int LENS_POSE_REFERENCE_UNDEFINED = 2; /** *

The value of {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation} is relative to the origin of the * automotive sensor coordinate system, which is at the center of the rear axle.

* * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#LENS_POSE_REFERENCE */ public static final int LENS_POSE_REFERENCE_AUTOMOTIVE = 3; // // Enumeration values for CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES // /** *

The minimal set of capabilities that every camera * device (regardless of {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL android.info.supportedHardwareLevel}) * supports.

*

This capability is listed by all normal devices, and * indicates that the camera device has a feature set * that's comparable to the baseline requirements for the * older android.hardware.Camera API.

*

Devices with the DEPTH_OUTPUT capability might not list this * capability, indicating that they support only depth measurement, * not standard color output.

* * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE = 0; /** *

The camera device can be manually controlled (3A algorithms such * as auto-exposure, and auto-focus can be bypassed). * The camera device supports basic manual control of the sensor image * acquisition related stages. This means the following controls are * guaranteed to be supported:

*
    *
  • Manual frame duration control
      *
    • {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration}
    • *
    • {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}
    • *
    *
  • *
  • Manual exposure control
      *
    • {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}
    • *
    • {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}
    • *
    *
  • *
  • Manual sensitivity control
      *
    • {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}
    • *
    • {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}
    • *
    *
  • *
  • Manual lens control (if the lens is adjustable)
      *
    • android.lens.*
    • *
    *
  • *
  • Manual flash control (if a flash unit is present)
      *
    • android.flash.*
    • *
    *
  • *
  • Manual black level locking
      *
    • {@link CaptureRequest#BLACK_LEVEL_LOCK android.blackLevel.lock}
    • *
    *
  • *
  • Auto exposure lock
      *
    • {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock}
    • *
    *
  • *
*

If any of the above 3A algorithms are enabled, then the camera * device will accurately report the values applied by 3A in the * result.

*

A given camera device may also support additional manual sensor controls, * but this capability only covers the above list of controls.

*

If this is supported, {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} will * additionally return a min frame duration that is greater than * zero for each supported size-format combination.

*

For camera devices with LOGICAL_MULTI_CAMERA capability, when the underlying active * physical camera switches, exposureTime, sensitivity, and lens properties may change * even if AE/AF is locked. However, the overall auto exposure and auto focus experience * for users will be consistent. Refer to LOGICAL_MULTI_CAMERA capability for details.

* * @see CaptureRequest#BLACK_LEVEL_LOCK * @see CaptureRequest#CONTROL_AE_LOCK * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_FRAME_DURATION * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE * @see CaptureRequest#SENSOR_SENSITIVITY * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR = 1; /** *

The camera device post-processing stages can be manually controlled. * The camera device supports basic manual control of the image post-processing * stages. This means the following controls are guaranteed to be supported:

*
    *
  • *

    Manual tonemap control

    *
      *
    • {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}
    • *
    • {@link CaptureRequest#TONEMAP_MODE android.tonemap.mode}
    • *
    • {@link CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS android.tonemap.maxCurvePoints}
    • *
    • {@link CaptureRequest#TONEMAP_GAMMA android.tonemap.gamma}
    • *
    • {@link CaptureRequest#TONEMAP_PRESET_CURVE android.tonemap.presetCurve}
    • *
    *
  • *
  • *

    Manual white balance control

    *
      *
    • {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}
    • *
    • {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}
    • *
    *
  • *
  • Manual lens shading map control
      *
    • {@link CaptureRequest#SHADING_MODE android.shading.mode}
    • *
    • {@link CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE android.statistics.lensShadingMapMode}
    • *
    • android.statistics.lensShadingMap
    • *
    • android.lens.info.shadingMapSize
    • *
    *
  • *
  • Manual aberration correction control (if aberration correction is supported)
      *
    • {@link CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE android.colorCorrection.aberrationMode}
    • *
    • {@link CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES android.colorCorrection.availableAberrationModes}
    • *
    *
  • *
  • Auto white balance lock
      *
    • {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}
    • *
    *
  • *
*

If auto white balance is enabled, then the camera device * will accurately report the values applied by AWB in the result.

*

A given camera device may also support additional post-processing * controls, but this capability only covers the above list of controls.

*

For camera devices with LOGICAL_MULTI_CAMERA capability, when underlying active * physical camera switches, tonemap, white balance, and shading map may change even if * awb is locked. However, the overall post-processing experience for users will be * consistent. Refer to LOGICAL_MULTI_CAMERA capability for details.

* * @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE * @see CameraCharacteristics#COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_LOCK * @see CaptureRequest#SHADING_MODE * @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE * @see CaptureRequest#TONEMAP_CURVE * @see CaptureRequest#TONEMAP_GAMMA * @see CameraCharacteristics#TONEMAP_MAX_CURVE_POINTS * @see CaptureRequest#TONEMAP_MODE * @see CaptureRequest#TONEMAP_PRESET_CURVE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING = 2; /** *

The camera device supports outputting RAW buffers and * metadata for interpreting them.

*

Devices supporting the RAW capability allow both for * saving DNG files, and for direct application processing of * raw sensor images.

*
    *
  • RAW_SENSOR is supported as an output format.
  • *
  • The maximum available resolution for RAW_SENSOR streams * will match either the value in * {@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize} or * {@link CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE android.sensor.info.preCorrectionActiveArraySize}.
  • *
  • All DNG-related optional metadata entries are provided * by the camera device.
  • *
* * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE * @see CameraCharacteristics#SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_RAW = 3; /** *

The camera device supports the Zero Shutter Lag reprocessing use case.

*
    *
  • One input stream is supported, that is, {@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1.
  • *
  • {@link android.graphics.ImageFormat#PRIVATE } is supported as an output/input format, * that is, {@link android.graphics.ImageFormat#PRIVATE } is included in the lists of * formats returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.
  • *
  • {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput } * returns non-empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.
  • *
  • Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(ImageFormat.PRIVATE)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(ImageFormat.PRIVATE)}
  • *
  • Using {@link android.graphics.ImageFormat#PRIVATE } does not cause a frame rate drop * relative to the sensor's maximum capture rate (at that resolution).
  • *
  • {@link android.graphics.ImageFormat#PRIVATE } will be reprocessable into both * {@link android.graphics.ImageFormat#YUV_420_888 } and * {@link android.graphics.ImageFormat#JPEG } formats.
  • *
  • For a MONOCHROME camera supporting Y8 format, {@link android.graphics.ImageFormat#PRIVATE } will be reprocessable into * {@link android.graphics.ImageFormat#Y8 }.
  • *
  • The maximum available resolution for PRIVATE streams * (both input/output) will match the maximum available * resolution of JPEG streams.
  • *
  • Static metadata {@link CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL android.reprocess.maxCaptureStall}.
  • *
  • Only below controls are effective for reprocessing requests and * will be present in capture results, other controls in reprocess * requests will be ignored by the camera device.
      *
    • android.jpeg.*
    • *
    • {@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}
    • *
    • {@link CaptureRequest#EDGE_MODE android.edge.mode}
    • *
    *
  • *
  • {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} and * {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes} will both list ZERO_SHUTTER_LAG as a supported mode.
  • *
* * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES * @see CaptureRequest#EDGE_MODE * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES * @see CaptureRequest#NOISE_REDUCTION_MODE * @see CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING = 4; /** *

The camera device supports accurately reporting the sensor settings for many of * the sensor controls while the built-in 3A algorithm is running. This allows * reporting of sensor settings even when these settings cannot be manually changed.

*

The values reported for the following controls are guaranteed to be available * in the CaptureResult, including when 3A is enabled:

*
    *
  • Exposure control
      *
    • {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}
    • *
    *
  • *
  • Sensitivity control
      *
    • {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}
    • *
    *
  • *
  • Lens controls (if the lens is adjustable)
      *
    • {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance}
    • *
    • {@link CaptureRequest#LENS_APERTURE android.lens.aperture}
    • *
    *
  • *
*

This capability is a subset of the MANUAL_SENSOR control capability, and will * always be included if the MANUAL_SENSOR capability is available.

* * @see CaptureRequest#LENS_APERTURE * @see CaptureRequest#LENS_FOCUS_DISTANCE * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_SENSITIVITY * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS = 5; /** *

The camera device supports capturing high-resolution images at >= 20 frames per * second, in at least the uncompressed YUV format, when post-processing settings are * set to FAST. Additionally, all image resolutions less than 24 megapixels can be * captured at >= 10 frames per second. Here, 'high resolution' means at least 8 * megapixels, or the maximum resolution of the device, whichever is smaller.

*

More specifically, this means that a size matching the camera device's active array * size is listed as a supported size for the {@link android.graphics.ImageFormat#YUV_420_888 } format in either {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } or {@link android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes }, * with a minimum frame duration for that format and size of either <= 1/20 s, or * <= 1/10 s if the image size is less than 24 megapixels, respectively; and * the {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges} entry lists at least one FPS range * where the minimum FPS is >= 1 / minimumFrameDuration for the maximum-size * YUV_420_888 format. If that maximum size is listed in {@link android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes }, * then the list of resolutions for YUV_420_888 from {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes } contains at * least one resolution >= 8 megapixels, with a minimum frame duration of <= 1/20 * s.

*

If the device supports the {@link android.graphics.ImageFormat#RAW10 }, {@link android.graphics.ImageFormat#RAW12 }, {@link android.graphics.ImageFormat#Y8 }, then those can also be * captured at the same rate as the maximum-size YUV_420_888 resolution is.

*

If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees * as for the YUV_420_888 format also apply to the {@link android.graphics.ImageFormat#PRIVATE } format.

*

In addition, the {@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} field is guaranteed to have a value between 0 * and 4, inclusive. {@link CameraCharacteristics#CONTROL_AE_LOCK_AVAILABLE android.control.aeLockAvailable} and {@link CameraCharacteristics#CONTROL_AWB_LOCK_AVAILABLE android.control.awbLockAvailable} * are also guaranteed to be true so burst capture with these two locks ON yields * consistent image output.

* * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES * @see CameraCharacteristics#CONTROL_AE_LOCK_AVAILABLE * @see CameraCharacteristics#CONTROL_AWB_LOCK_AVAILABLE * @see CameraCharacteristics#SYNC_MAX_LATENCY * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE = 6; /** *

The camera device supports the YUV_420_888 reprocessing use case, similar as * PRIVATE_REPROCESSING, This capability requires the camera device to support the * following:

*
    *
  • One input stream is supported, that is, {@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1.
  • *
  • {@link android.graphics.ImageFormat#YUV_420_888 } is supported as an output/input * format, that is, YUV_420_888 is included in the lists of formats returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.
  • *
  • {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput } * returns non-empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.
  • *
  • Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(YUV_420_888)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(YUV_420_888)}
  • *
  • Using {@link android.graphics.ImageFormat#YUV_420_888 } does not cause a frame rate * drop relative to the sensor's maximum capture rate (at that resolution).
  • *
  • {@link android.graphics.ImageFormat#YUV_420_888 } will be reprocessable into both * {@link android.graphics.ImageFormat#YUV_420_888 } and {@link android.graphics.ImageFormat#JPEG } formats.
  • *
  • The maximum available resolution for {@link android.graphics.ImageFormat#YUV_420_888 } streams (both input/output) will match the * maximum available resolution of {@link android.graphics.ImageFormat#JPEG } streams.
  • *
  • For a MONOCHROME camera with Y8 format support, all the requirements mentioned * above for YUV_420_888 apply for Y8 format as well.
  • *
  • Static metadata {@link CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL android.reprocess.maxCaptureStall}.
  • *
  • Only the below controls are effective for reprocessing requests and will be present * in capture results. The reprocess requests are from the original capture results * that are associated with the intermediate {@link android.graphics.ImageFormat#YUV_420_888 } output buffers. All other controls in the * reprocess requests will be ignored by the camera device.
      *
    • android.jpeg.*
    • *
    • {@link CaptureRequest#NOISE_REDUCTION_MODE android.noiseReduction.mode}
    • *
    • {@link CaptureRequest#EDGE_MODE android.edge.mode}
    • *
    • {@link CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR android.reprocess.effectiveExposureFactor}
    • *
    *
  • *
  • {@link CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES android.noiseReduction.availableNoiseReductionModes} and * {@link CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES android.edge.availableEdgeModes} will both list ZERO_SHUTTER_LAG as a supported mode.
  • *
* * @see CameraCharacteristics#EDGE_AVAILABLE_EDGE_MODES * @see CaptureRequest#EDGE_MODE * @see CameraCharacteristics#NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES * @see CaptureRequest#NOISE_REDUCTION_MODE * @see CaptureRequest#REPROCESS_EFFECTIVE_EXPOSURE_FACTOR * @see CameraCharacteristics#REPROCESS_MAX_CAPTURE_STALL * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING = 7; /** *

The camera device can produce depth measurements from its field of view.

*

This capability requires the camera device to support the following:

*
    *
  • {@link android.graphics.ImageFormat#DEPTH16 } is supported as * an output format.
  • *
  • {@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD } is * optionally supported as an output format.
  • *
  • This camera device, and all camera devices with the same {@link CameraCharacteristics#LENS_FACING android.lens.facing}, will * list the following calibration metadata entries in both {@link android.hardware.camera2.CameraCharacteristics } * and {@link android.hardware.camera2.CaptureResult }:
      *
    • {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}
    • *
    • {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}
    • *
    • {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}
    • *
    • {@link CameraCharacteristics#LENS_DISTORTION android.lens.distortion}
    • *
    *
  • *
  • The {@link CameraCharacteristics#DEPTH_DEPTH_IS_EXCLUSIVE android.depth.depthIsExclusive} entry is listed by this device.
  • *
  • As of Android P, the {@link CameraCharacteristics#LENS_POSE_REFERENCE android.lens.poseReference} entry is listed by this device.
  • *
  • A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support * normal YUV_420_888, Y8, JPEG, and PRIV-format outputs. It only has to support the * DEPTH16 format.
  • *
*

Generally, depth output operates at a slower frame rate than standard color capture, * so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that * should be accounted for (see {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration }). * On a device that supports both depth and color-based output, to enable smooth preview, * using a repeating burst is recommended, where a depth-output target is only included * once every N frames, where N is the ratio between preview output rate and depth output * rate, including depth stall time.

* * @see CameraCharacteristics#DEPTH_DEPTH_IS_EXCLUSIVE * @see CameraCharacteristics#LENS_DISTORTION * @see CameraCharacteristics#LENS_FACING * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION * @see CameraCharacteristics#LENS_POSE_REFERENCE * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT = 8; /** *

The device supports constrained high speed video recording (frame rate >=120fps) use * case. The camera device will support high speed capture session created by {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }, which * only accepts high speed request lists created by {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList }.

*

A camera device can still support high speed video streaming by advertising the high * speed FPS ranges in {@link CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES android.control.aeAvailableTargetFpsRanges}. For this case, all * normal capture request per frame control and synchronization requirements will apply * to the high speed fps ranges, the same as all other fps ranges. This capability * describes the capability of a specialized operating mode with many limitations (see * below), which is only targeted at high speed video recording.

*

The supported high speed video sizes and fps ranges are specified in {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges }. * To get desired output frame rates, the application is only allowed to select video * size and FPS range combinations provided by {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes }. The * fps range can be controlled via {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}.

*

In this capability, the camera device will override aeMode, awbMode, and afMode to * ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode * controls will be overridden to be FAST. Therefore, no manual control of capture * and post-processing parameters is possible. All other controls operate the * same as when {@link CaptureRequest#CONTROL_MODE android.control.mode} == AUTO. This means that all other * android.control.* fields continue to work, such as

*
    *
  • {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}
  • *
  • {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation}
  • *
  • {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock}
  • *
  • {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}
  • *
  • {@link CaptureRequest#CONTROL_EFFECT_MODE android.control.effectMode}
  • *
  • {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}
  • *
  • {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}
  • *
  • {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}
  • *
  • {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}
  • *
  • {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}
  • *
  • {@link CaptureRequest#CONTROL_ZOOM_RATIO android.control.zoomRatio}
  • *
*

Outside of android.control.*, the following controls will work:

*
    *
  • {@link CaptureRequest#FLASH_MODE android.flash.mode} (TORCH mode only, automatic flash for still capture will not * work since aeMode is ON)
  • *
  • {@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode} (if it is supported)
  • *
  • {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}
  • *
  • {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} (if it is supported)
  • *
*

For high speed recording use case, the actual maximum supported frame rate may * be lower than what camera can output, depending on the destination Surfaces for * the image data. For example, if the destination surface is from video encoder, * the application need check if the video encoder is capable of supporting the * high frame rate for a given video size, or it will end up with lower recording * frame rate. If the destination surface is from preview window, the actual preview frame * rate will be bounded by the screen refresh rate.

*

The camera device will only support up to 2 high speed simultaneous output surfaces * (preview and recording surfaces) in this mode. Above controls will be effective only * if all of below conditions are true:

*
    *
  • The application creates a camera capture session with no more than 2 surfaces via * {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession }. The * targeted surfaces must be preview surface (either from {@link android.view.SurfaceView } or {@link android.graphics.SurfaceTexture }) or recording * surface(either from {@link android.media.MediaRecorder#getSurface } or {@link android.media.MediaCodec#createInputSurface }).
  • *
  • The stream sizes are selected from the sizes reported by * {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes }.
  • *
  • The FPS ranges are selected from {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges }.
  • *
*

When above conditions are NOT satisfied, * {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession } * will fail.

*

Switching to a FPS range that has different maximum FPS may trigger some camera device * reconfigurations, which may introduce extra latency. It is recommended that * the application avoids unnecessary maximum target FPS changes as much as possible * during high speed streaming.

* * @see CameraCharacteristics#CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION * @see CaptureRequest#CONTROL_AE_LOCK * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureRequest#CONTROL_AE_REGIONS * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE * @see CaptureRequest#CONTROL_AF_REGIONS * @see CaptureRequest#CONTROL_AF_TRIGGER * @see CaptureRequest#CONTROL_AWB_LOCK * @see CaptureRequest#CONTROL_AWB_REGIONS * @see CaptureRequest#CONTROL_EFFECT_MODE * @see CaptureRequest#CONTROL_MODE * @see CaptureRequest#CONTROL_ZOOM_RATIO * @see CaptureRequest#FLASH_MODE * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE * @see CaptureRequest#SCALER_CROP_REGION * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO = 9; /** *

The camera device supports the MOTION_TRACKING value for * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent}, which limits maximum exposure time to 20 ms.

*

This limits the motion blur of capture images, resulting in better image tracking * results for use cases such as image stabilization or augmented reality.

* * @see CaptureRequest#CONTROL_CAPTURE_INTENT * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_MOTION_TRACKING = 10; /** *

The camera device is a logical camera backed by two or more physical cameras.

*

In API level 28, the physical cameras must also be exposed to the application via * {@link android.hardware.camera2.CameraManager#getCameraIdList }.

*

Starting from API level 29:

*
    *
  • Some or all physical cameras may not be independently exposed to the application, * in which case the physical camera IDs will not be available in * {@link android.hardware.camera2.CameraManager#getCameraIdList }. But the * application can still query the physical cameras' characteristics by calling * {@link android.hardware.camera2.CameraManager#getCameraCharacteristics }.
  • *
  • If a physical camera is hidden from camera ID list, the mandatory stream * combinations for that physical camera must be supported through the logical camera * using physical streams. One exception is that in API level 30, a physical camera * may become unavailable via * {@link CameraManager.AvailabilityCallback#onPhysicalCameraUnavailable } * callback.
  • *
*

Combinations of logical and physical streams, or physical streams from different * physical cameras are not guaranteed. However, if the camera device supports * {@link CameraDevice#isSessionConfigurationSupported }, * application must be able to query whether a stream combination involving physical * streams is supported by calling * {@link CameraDevice#isSessionConfigurationSupported }.

*

Camera application shouldn't assume that there are at most 1 rear camera and 1 front * camera in the system. For an application that switches between front and back cameras, * the recommendation is to switch between the first rear camera and the first front * camera in the list of supported camera devices.

*

This capability requires the camera device to support the following:

*
    *
  • The IDs of underlying physical cameras are returned via * {@link android.hardware.camera2.CameraCharacteristics#getPhysicalCameraIds }.
  • *
  • This camera device must list static metadata * {@link CameraCharacteristics#LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE android.logicalMultiCamera.sensorSyncType} in * {@link android.hardware.camera2.CameraCharacteristics }.
  • *
  • The underlying physical cameras' static metadata must list the following entries, * so that the application can correlate pixels from the physical streams:
      *
    • {@link CameraCharacteristics#LENS_POSE_REFERENCE android.lens.poseReference}
    • *
    • {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}
    • *
    • {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}
    • *
    • {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}
    • *
    • {@link CameraCharacteristics#LENS_DISTORTION android.lens.distortion}
    • *
    *
  • *
  • The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be * the same.
  • *
  • The logical camera must be LIMITED or higher device.
  • *
*

A logical camera device's dynamic metadata may contain * {@link CaptureResult#LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID android.logicalMultiCamera.activePhysicalId} to notify the application of the current * active physical camera Id. An active physical camera is the physical camera from which * the logical camera's main image data outputs (YUV or RAW) and metadata come from. * In addition, this serves as an indication which physical camera is used to output to * a RAW stream, or in case only physical cameras support RAW, which physical RAW stream * the application should request.

*

Logical camera's static metadata tags below describe the default active physical * camera. An active physical camera is default if it's used when application directly * uses requests built from a template. All templates will default to the same active * physical camera.

*
    *
  • {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange}
  • *
  • {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}
  • *
  • {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}
  • *
  • {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}
  • *
  • {@link CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE android.sensor.info.physicalSize}
  • *
  • {@link CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL android.sensor.info.whiteLevel}
  • *
  • {@link CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED android.sensor.info.lensShadingApplied}
  • *
  • {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 android.sensor.referenceIlluminant1}
  • *
  • {@link CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 android.sensor.referenceIlluminant2}
  • *
  • {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM1 android.sensor.calibrationTransform1}
  • *
  • {@link CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 android.sensor.calibrationTransform2}
  • *
  • {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM1 android.sensor.colorTransform1}
  • *
  • {@link CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 android.sensor.colorTransform2}
  • *
  • {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX1 android.sensor.forwardMatrix1}
  • *
  • {@link CameraCharacteristics#SENSOR_FORWARD_MATRIX2 android.sensor.forwardMatrix2}
  • *
  • {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern}
  • *
  • {@link CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY android.sensor.maxAnalogSensitivity}
  • *
  • {@link CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS android.sensor.opticalBlackRegions}
  • *
  • {@link CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES android.sensor.availableTestPatternModes}
  • *
  • {@link CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE android.lens.info.hyperfocalDistance}
  • *
  • {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance}
  • *
  • {@link CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION android.lens.info.focusDistanceCalibration}
  • *
  • {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation}
  • *
  • {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}
  • *
  • {@link CameraCharacteristics#LENS_INTRINSIC_CALIBRATION android.lens.intrinsicCalibration}
  • *
  • {@link CameraCharacteristics#LENS_POSE_REFERENCE android.lens.poseReference}
  • *
  • {@link CameraCharacteristics#LENS_DISTORTION android.lens.distortion}
  • *
*

The field of view of non-RAW physical streams must not be smaller than that of the * non-RAW logical streams, or the maximum field-of-view of the physical camera, * whichever is smaller. The application should check the physical capture result * metadata for how the physical streams are cropped or zoomed. More specifically, given * the physical camera result metadata, the effective horizontal field-of-view of the * physical camera is:

*
fov = 2 * atan2(cropW * sensorW / (2 * zoomRatio * activeArrayW), focalLength)
     * 
*

where the equation parameters are the physical camera's crop region width, physical * sensor width, zoom ratio, active array width, and focal length respectively. Typically * the physical stream of active physical camera has the same field-of-view as the * logical streams. However, the same may not be true for physical streams from * non-active physical cameras. For example, if the logical camera has a wide-ultrawide * configuration where the wide lens is the default, when the crop region is set to the * logical camera's active array size, (and the zoom ratio set to 1.0 starting from * Android 11), a physical stream for the ultrawide camera may prefer outputting images * with larger field-of-view than that of the wide camera for better stereo matching * margin or more robust motion tracking. At the same time, the physical non-RAW streams' * field of view must not be smaller than the requested crop region and zoom ratio, as * long as it's within the physical lens' capability. For example, for a logical camera * with wide-tele lens configuration where the wide lens is the default, if the logical * camera's crop region is set to maximum size, and zoom ratio set to 1.0, the physical * stream for the tele lens will be configured to its maximum size crop region (no zoom).

*

Deprecated: Prior to Android 11, the field of view of all non-RAW physical streams * cannot be larger than that of non-RAW logical streams. If the logical camera has a * wide-ultrawide lens configuration where the wide lens is the default, when the logical * camera's crop region is set to maximum size, the FOV of the physical streams for the * ultrawide lens will be the same as the logical stream, by making the crop region * smaller than its active array size to compensate for the smaller focal length.

*

For a logical camera, typically the underlying physical cameras have different RAW * capabilities (such as resolution or CFA pattern). There are two ways for the * application to capture RAW images from the logical camera:

*
    *
  • If the logical camera has RAW capability, the application can create and use RAW * streams in the same way as before. In case a RAW stream is configured, to maintain * backward compatibility, the camera device makes sure the default active physical * camera remains active and does not switch to other physical cameras. (One exception * is that, if the logical camera consists of identical image sensors and advertises * multiple focalLength due to different lenses, the camera device may generate RAW * images from different physical cameras based on the focalLength being set by the * application.) This backward-compatible approach usually results in loss of optical * zoom, to telephoto lens or to ultrawide lens.
  • *
  • Alternatively, if supported by the device, * {@link android.hardware.camera2.MultiResolutionImageReader } * can be used to capture RAW images from one of the underlying physical cameras ( * depending on current zoom level). Because different physical cameras may have * different RAW characteristics, the application needs to use the characteristics * and result metadata of the active physical camera for the relevant RAW metadata.
  • *
*

The capture request and result metadata tags required for backward compatible camera * functionalities will be solely based on the logical camera capability. On the other * hand, the use of manual capture controls (sensor or post-processing) with a * logical camera may result in unexpected behavior when the HAL decides to switch * between physical cameras with different characteristics under the hood. For example, * when the application manually sets exposure time and sensitivity while zooming in, * the brightness of the camera images may suddenly change because HAL switches from one * physical camera to the other.

* * @see CameraCharacteristics#LENS_DISTORTION * @see CameraCharacteristics#LENS_INFO_FOCUS_DISTANCE_CALIBRATION * @see CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE * @see CameraCharacteristics#LENS_INTRINSIC_CALIBRATION * @see CameraCharacteristics#LENS_POSE_REFERENCE * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CaptureResult#LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID * @see CameraCharacteristics#LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE * @see CameraCharacteristics#SENSOR_AVAILABLE_TEST_PATTERN_MODES * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN * @see CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM1 * @see CameraCharacteristics#SENSOR_CALIBRATION_TRANSFORM2 * @see CameraCharacteristics#SENSOR_COLOR_TRANSFORM1 * @see CameraCharacteristics#SENSOR_COLOR_TRANSFORM2 * @see CameraCharacteristics#SENSOR_FORWARD_MATRIX1 * @see CameraCharacteristics#SENSOR_FORWARD_MATRIX2 * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE * @see CameraCharacteristics#SENSOR_INFO_LENS_SHADING_APPLIED * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION * @see CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE * @see CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL * @see CameraCharacteristics#SENSOR_MAX_ANALOG_SENSITIVITY * @see CameraCharacteristics#SENSOR_OPTICAL_BLACK_REGIONS * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT2 * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA = 11; /** *

The camera device is a monochrome camera that doesn't contain a color filter array, * and for YUV_420_888 stream, the pixel values on U and V planes are all 128.

*

A MONOCHROME camera must support the guaranteed stream combinations required for * its device level and capabilities. Additionally, if the monochrome camera device * supports Y8 format, all mandatory stream combination requirements related to {@link android.graphics.ImageFormat#YUV_420_888 YUV_420_888} apply * to {@link android.graphics.ImageFormat#Y8 Y8} as well. There are no * mandatory stream combination requirements with regard to * {@link android.graphics.ImageFormat#Y8 Y8} for Bayer camera devices.

*

Starting from Android Q, the SENSOR_INFO_COLOR_FILTER_ARRANGEMENT of a MONOCHROME * camera will be either MONO or NIR.

* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME = 12; /** *

The camera device is capable of writing image data into a region of memory * inaccessible to Android userspace or the Android kernel, and only accessible to * trusted execution environments (TEE).

* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA = 13; /** *

The camera device is only accessible by Android's system components and privileged * applications. Processes need to have the android.permission.SYSTEM_CAMERA in * addition to android.permission.CAMERA in order to connect to this camera device.

* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_SYSTEM_CAMERA = 14; /** *

The camera device supports the OFFLINE_PROCESSING use case.

*

With OFFLINE_PROCESSING capability, the application can switch an ongoing * capture session to offline mode by calling the * CameraCaptureSession#switchToOffline method and specify streams to be kept in offline * mode. The camera will then stop currently active repeating requests, prepare for * some requests to go into offline mode, and return an offline session object. After * the switchToOffline call returns, the original capture session is in closed state as * if the CameraCaptureSession#close method has been called. * In the offline mode, all inflight requests will continue to be processed in the * background, and the application can immediately close the camera or create a new * capture session without losing those requests' output images and capture results.

*

While the camera device is processing offline requests, it * might not be able to support all stream configurations it can support * without offline requests. When that happens, the createCaptureSession * method call will fail. The following stream configurations are guaranteed to work * without hitting the resource busy exception:

*
    *
  • One ongoing offline session: target one output surface of YUV or * JPEG format, any resolution.
  • *
  • The active camera capture session:
      *
    1. One preview surface (SurfaceView or SurfaceTexture) up to 1920 width
    2. *
    3. One YUV ImageReader surface up to 1920 width
    4. *
    5. One Jpeg ImageReader, any resolution: the camera device is * allowed to slow down JPEG output speed by 50% if there is any ongoing offline * session.
    6. *
    7. If the device supports PRIVATE_REPROCESSING, one pair of ImageWriter/ImageReader * surfaces of private format, with the same resolution that is larger or equal to * the JPEG ImageReader resolution above.
    8. *
    *
  • *
  • Alternatively, the active camera session above can be replaced by an legacy * {@link android.hardware.Camera Camera} with the following parameter settings:
      *
    1. Preview size up to 1920 width
    2. *
    3. Preview callback size up to 1920 width
    4. *
    5. Video size up to 1920 width
    6. *
    7. Picture size, any resolution: the camera device is * allowed to slow down JPEG output speed by 50% if there is any ongoing offline * session.
    8. *
    *
  • *
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_OFFLINE_PROCESSING = 15; /** *

This camera device is capable of producing ultra high resolution images in * addition to the image sizes described in the * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap}. * It can operate in 'default' mode and 'max resolution' mode. It generally does this * by binning pixels in 'default' mode and not binning them in 'max resolution' mode. * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP android.scaler.streamConfigurationMap} describes the streams supported in 'default' * mode. * The stream configurations supported in 'max resolution' mode are described by * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION android.scaler.streamConfigurationMapMaximumResolution}. * The maximum resolution mode pixel array size of a camera device * ({@link CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE android.sensor.info.pixelArraySize}) with this capability, * will be at least 24 megapixels.

* * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION * @see CameraCharacteristics#SENSOR_INFO_PIXEL_ARRAY_SIZE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR = 16; /** *

The device supports reprocessing from the RAW_SENSOR format with a bayer pattern * given by {@link CameraCharacteristics#SENSOR_INFO_BINNING_FACTOR android.sensor.info.binningFactor} (m x n group of pixels with the same * color filter) to a remosaiced regular bayer pattern.

*

This capability will only be present for devices with * {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR } * capability. When * {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR } * devices do not advertise this capability, * {@link android.graphics.ImageFormat#RAW_SENSOR } images will already have a * regular bayer pattern.

*

If a RAW_SENSOR stream is requested along with another non-RAW stream in a * {@link android.hardware.camera2.CaptureRequest } (if multiple streams are supported * when {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode} is set to * {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION }), * the RAW_SENSOR stream will have a regular bayer pattern.

*

This capability requires the camera device to support the following :

*
    *
  • The {@link android.hardware.camera2.params.StreamConfigurationMap } mentioned below * refers to the one, described by * {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION android.scaler.streamConfigurationMapMaximumResolution}.
  • *
  • One input stream is supported, that is, {@link CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS android.request.maxNumInputStreams} == 1.
  • *
  • {@link android.graphics.ImageFormat#RAW_SENSOR } is supported as an output/input * format, that is, {@link android.graphics.ImageFormat#RAW_SENSOR } is included in the * lists of formats returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats } and {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats }.
  • *
  • {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput } * returns non-empty int[] for each supported input format returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputFormats }.
  • *
  • Each size returned by {@link android.hardware.camera2.params.StreamConfigurationMap#getInputSizes getInputSizes(ImageFormat.RAW_SENSOR)} is also included in {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes getOutputSizes(ImageFormat.RAW_SENSOR)}
  • *
  • Using {@link android.graphics.ImageFormat#RAW_SENSOR } does not cause a frame rate * drop relative to the sensor's maximum capture rate (at that resolution).
  • *
  • No CaptureRequest controls will be applicable when a request has an input target * with {@link android.graphics.ImageFormat#RAW_SENSOR } format.
  • *
* * @see CameraCharacteristics#REQUEST_MAX_NUM_INPUT_STREAMS * @see CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION * @see CameraCharacteristics#SENSOR_INFO_BINNING_FACTOR * @see CaptureRequest#SENSOR_PIXEL_MODE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING = 17; /** *

The device supports one or more 10-bit camera outputs according to the dynamic range * profiles specified in * {@link android.hardware.camera2.params.DynamicRangeProfiles#getSupportedProfiles }. * They can be configured as part of the capture session initialization via * {@link android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile }. * Cameras that enable this capability must also support the following:

*
    *
  • Profile {@link android.hardware.camera2.params.DynamicRangeProfiles#HLG10 }
  • *
  • All mandatory stream combinations for this specific capability as per * documentation {@link android.hardware.camera2.CameraDevice#createCaptureSession }
  • *
  • In case the device is not able to capture some combination of supported * standard 8-bit and/or 10-bit dynamic range profiles within the same capture request, * then those constraints must be listed in * {@link android.hardware.camera2.params.DynamicRangeProfiles#getProfileCaptureRequestConstraints }
  • *
  • Recommended dynamic range profile listed in * {@link android.hardware.camera2.CameraCharacteristics#REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE }.
  • *
* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT = 18; /** *

The camera device supports selecting a per-stream use case via * {@link android.hardware.camera2.params.OutputConfiguration#setStreamUseCase } * so that the device can optimize camera pipeline parameters such as tuning, sensor * mode, or ISP settings for a specific user scenario. * Some sample usages of this capability are:

*
    *
  • Distinguish high quality YUV captures from a regular YUV stream where * the image quality may not be as good as the JPEG stream, or
  • *
  • Use one stream to serve multiple purposes: viewfinder, video recording and * still capture. This is common with applications that wish to apply edits equally * to preview, saved images, and saved videos.
  • *
*

This capability requires the camera device to support the following * stream use cases:

*
    *
  • DEFAULT for backward compatibility where the application doesn't set * a stream use case
  • *
  • PREVIEW for live viewfinder and in-app image analysis
  • *
  • STILL_CAPTURE for still photo capture
  • *
  • VIDEO_RECORD for recording video clips
  • *
  • PREVIEW_VIDEO_STILL for one single stream used for viewfinder, video * recording, and still capture.
  • *
  • VIDEO_CALL for long running video calls
  • *
*

{@link android.hardware.camera2.CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES } * lists all of the supported stream use cases.

*

Refer to {@link android.hardware.camera2.CameraDevice#createCaptureSession } for the * mandatory stream combinations involving stream use cases, which can also be queried * via {@link android.hardware.camera2.params.MandatoryStreamCombination }.

* @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES */ public static final int REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE = 19; // // Enumeration values for CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP // /** *

8-bit SDR profile which is the default for all non 10-bit output capable devices.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD = 0x1; /** *

10-bit pixel samples encoded using the Hybrid log-gamma transfer function.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HLG10 = 0x2; /** *

10-bit pixel samples encoded using the SMPTE ST 2084 transfer function. * This profile utilizes internal static metadata to increase the quality * of the capture.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10 = 0x4; /** *

10-bit pixel samples encoded using the SMPTE ST 2084 transfer function. * In contrast to HDR10, this profile uses internal per-frame metadata * to further enhance the quality of the capture.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_HDR10_PLUS = 0x8; /** *

This is a camera mode for Dolby Vision capture optimized for a more scene * accurate capture. This would typically differ from what a specific device * might want to tune for a consumer optimized Dolby Vision general capture.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF = 0x10; /** *

This is the power optimized mode for 10-bit Dolby Vision HDR Reference Mode.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_REF_PO = 0x20; /** *

This is the camera mode for the default Dolby Vision capture mode for the * specific device. This would be tuned by each specific device for consumer * pleasing results that resonate with their particular audience. We expect * that each specific device would have a different look for their default * Dolby Vision capture.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM = 0x40; /** *

This is the power optimized mode for 10-bit Dolby Vision HDR device specific * capture Mode.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_10B_HDR_OEM_PO = 0x80; /** *

This is the 8-bit version of the Dolby Vision reference capture mode optimized * for scene accuracy.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF = 0x100; /** *

This is the power optimized mode for 8-bit Dolby Vision HDR Reference Mode.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_REF_PO = 0x200; /** *

This is the 8-bit version of device specific tuned and optimized Dolby Vision * capture mode.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM = 0x400; /** *

This is the power optimized mode for 8-bit Dolby Vision HDR device specific * capture Mode.

* @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_DOLBY_VISION_8B_HDR_OEM_PO = 0x800; /** * * @see CameraCharacteristics#REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP * @hide */ public static final int REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_MAX = 0x1000; // // Enumeration values for CameraCharacteristics#SCALER_CROPPING_TYPE // /** *

The camera device only supports centered crop regions.

* @see CameraCharacteristics#SCALER_CROPPING_TYPE */ public static final int SCALER_CROPPING_TYPE_CENTER_ONLY = 0; /** *

The camera device supports arbitrarily chosen crop regions.

* @see CameraCharacteristics#SCALER_CROPPING_TYPE */ public static final int SCALER_CROPPING_TYPE_FREEFORM = 1; // // Enumeration values for CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES // /** *

Default stream use case.

*

This use case is the same as when the application doesn't set any use case for * the stream. The camera device uses the properties of the output target, such as * format, dataSpace, or surface class type, to optimize the image processing pipeline.

* @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES */ public static final int SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT = 0x0; /** *

Live stream shown to the user.

*

Optimized for performance and usability as a viewfinder, but not necessarily for * image quality. The output is not meant to be persisted as saved images or video.

*

No stall if android.control.* are set to FAST. There may be stall if * they are set to HIGH_QUALITY. This use case has the same behavior as the * default SurfaceView and SurfaceTexture targets. Additionally, this use case can be * used for in-app image analysis.

* @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES */ public static final int SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW = 0x1; /** *

Still photo capture.

*

Optimized for high-quality high-resolution capture, and not expected to maintain * preview-like frame rates.

*

The stream may have stalls regardless of whether android.control.* is HIGH_QUALITY. * This use case has the same behavior as the default JPEG and RAW related formats.

* @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES */ public static final int SCALER_AVAILABLE_STREAM_USE_CASES_STILL_CAPTURE = 0x2; /** *

Recording video clips.

*

Optimized for high-quality video capture, including high-quality image stabilization * if supported by the device and enabled by the application. As a result, may produce * output frames with a substantial lag from real time, to allow for highest-quality * stabilization or other processing. As such, such an output is not suitable for drawing * to screen directly, and is expected to be persisted to disk or similar for later * playback or processing. Only streams that set the VIDEO_RECORD use case are guaranteed * to have video stabilization applied when the video stabilization control is set * to ON, as opposed to PREVIEW_STABILIZATION.

*

This use case has the same behavior as the default MediaRecorder and MediaCodec * targets.

* @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES */ public static final int SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_RECORD = 0x3; /** *

One single stream used for combined purposes of preview, video, and still capture.

*

For such multi-purpose streams, the camera device aims to make the best tradeoff * between the individual use cases. For example, the STILL_CAPTURE use case by itself * may have stalls for achieving best image quality. But if combined with PREVIEW and * VIDEO_RECORD, the camera device needs to trade off the additional image processing * for speed so that preview and video recording aren't slowed down.

*

Similarly, VIDEO_RECORD may produce frames with a substantial lag, but * PREVIEW_VIDEO_STILL must have minimal output delay. This means that to enable video * stabilization with this use case, the device must support and the app must select the * PREVIEW_STABILIZATION mode for video stabilization.

* @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES */ public static final int SCALER_AVAILABLE_STREAM_USE_CASES_PREVIEW_VIDEO_STILL = 0x4; /** *

Long-running video call optimized for both power efficiency and video quality.

*

The camera sensor may run in a lower-resolution mode to reduce power consumption * at the cost of some image and digital zoom quality. Unlike VIDEO_RECORD, VIDEO_CALL * outputs are expected to work in dark conditions, so are usually accompanied with * variable frame rate settings to allow sufficient exposure time in low light.

* @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES */ public static final int SCALER_AVAILABLE_STREAM_USE_CASES_VIDEO_CALL = 0x5; /** *

Vendor defined use cases. These depend on the vendor implementation.

* @see CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES * @hide */ public static final int SCALER_AVAILABLE_STREAM_USE_CASES_VENDOR_START = 0x10000; // // Enumeration values for CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT // /** * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT */ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGGB = 0; /** * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT */ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GRBG = 1; /** * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT */ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_GBRG = 2; /** * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT */ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_BGGR = 3; /** *

Sensor is not Bayer; output has 3 16-bit * values for each pixel, instead of just 1 16-bit value * per pixel.

* @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT */ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_RGB = 4; /** *

Sensor doesn't have any Bayer color filter. * Such sensor captures visible light in monochrome. The exact weighting and * wavelengths captured is not specified, but generally only includes the visible * frequencies. This value implies a MONOCHROME camera.

* @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT */ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_MONO = 5; /** *

Sensor has a near infrared filter capturing light with wavelength between * roughly 750nm and 1400nm, and the same filter covers the whole sensor array. This * value implies a MONOCHROME camera.

* @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT */ public static final int SENSOR_INFO_COLOR_FILTER_ARRANGEMENT_NIR = 6; // // Enumeration values for CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE // /** *

Timestamps from {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} are in nanoseconds and monotonic, but can * not be compared to timestamps from other subsystems (e.g. accelerometer, gyro etc.), * or other instances of the same or different camera devices in the same system with * accuracy. However, the timestamps are roughly in the same timebase as * {@link android.os.SystemClock#uptimeMillis }. The accuracy is sufficient for tasks * like A/V synchronization for video recording, at least, and the timestamps can be * directly used together with timestamps from the audio subsystem for that task.

*

Timestamps between streams and results for a single camera instance are comparable, * and the timestamps for all buffers and the result metadata generated by a single * capture are identical.

* * @see CaptureResult#SENSOR_TIMESTAMP * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE */ public static final int SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN = 0; /** *

Timestamps from {@link CaptureResult#SENSOR_TIMESTAMP android.sensor.timestamp} are in the same timebase as * {@link android.os.SystemClock#elapsedRealtimeNanos }, * and they can be compared to other timestamps using that base.

*

When buffers from a REALTIME device are passed directly to a video encoder from the * camera, automatic compensation is done to account for differing timebases of the * audio and camera subsystems. If the application is receiving buffers and then later * sending them to a video encoder or other application where they are compared with * audio subsystem timestamps or similar, this compensation is not present. In those * cases, applications need to adjust the timestamps themselves. Since {@link android.os.SystemClock#elapsedRealtimeNanos } and {@link android.os.SystemClock#uptimeMillis } only diverge while the device is asleep, an * offset between the two sources can be measured once per active session and applied * to timestamps for sufficient accuracy for A/V sync.

* * @see CaptureResult#SENSOR_TIMESTAMP * @see CameraCharacteristics#SENSOR_INFO_TIMESTAMP_SOURCE */ public static final int SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME = 1; // // Enumeration values for CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 // /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT = 1; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT = 2; /** *

Incandescent light

* @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN = 3; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_FLASH = 4; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER = 9; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER = 10; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_SHADE = 11; /** *

D 5700 - 7100K

* @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT = 12; /** *

N 4600 - 5400K

* @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT = 13; /** *

W 3900 - 4500K

* @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT = 14; /** *

WW 3200 - 3700K

* @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT = 15; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A = 17; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_STANDARD_B = 18; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_STANDARD_C = 19; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_D55 = 20; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_D65 = 21; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_D75 = 22; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_D50 = 23; /** * @see CameraCharacteristics#SENSOR_REFERENCE_ILLUMINANT1 */ public static final int SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN = 24; // // Enumeration values for CameraCharacteristics#LED_AVAILABLE_LEDS // /** *

android.led.transmit control is used.

* @see CameraCharacteristics#LED_AVAILABLE_LEDS * @hide */ public static final int LED_AVAILABLE_LEDS_TRANSMIT = 0; // // Enumeration values for CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL // /** *

This camera device does not have enough capabilities to qualify as a FULL device or * better.

*

Only the stream configurations listed in the LEGACY and LIMITED tables in the * {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession} documentation are guaranteed to be supported.

*

All LIMITED devices support the BACKWARDS_COMPATIBLE capability, indicating basic * support for color image capture. The only exception is that the device may * alternatively support only the DEPTH_OUTPUT capability, if it can only output depth * measurements and not color images.

*

LIMITED devices and above require the use of {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} * to lock exposure metering (and calculate flash power, for cameras with flash) before * capturing a high-quality still image.

*

A LIMITED device that only lists the BACKWARDS_COMPATIBLE capability is only * required to support full-automatic operation and post-processing (OFF is not * supported for {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}, or * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode})

*

Additional capabilities may optionally be supported by a LIMITED-level device, and * can be checked for in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.

* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureRequest#CONTROL_AF_MODE * @see CaptureRequest#CONTROL_AWB_MODE * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL */ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED = 0; /** *

This camera device is capable of supporting advanced imaging applications.

*

The stream configurations listed in the FULL, LEGACY and LIMITED tables in the * {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession} documentation are guaranteed to be supported.

*

A FULL device will support below capabilities:

*
    *
  • BURST_CAPTURE capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains * BURST_CAPTURE)
  • *
  • Per frame control ({@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} == PER_FRAME_CONTROL)
  • *
  • Manual sensor control ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains MANUAL_SENSOR)
  • *
  • Manual post-processing control ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains * MANUAL_POST_PROCESSING)
  • *
  • The required exposure time range defined in {@link CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE android.sensor.info.exposureTimeRange}
  • *
  • The required maxFrameDuration defined in {@link CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION android.sensor.info.maxFrameDuration}
  • *
*

Note: * Pre-API level 23, FULL devices also supported arbitrary cropping region * ({@link CameraCharacteristics#SCALER_CROPPING_TYPE android.scaler.croppingType} == FREEFORM); this requirement was relaxed in API level * 23, and FULL devices may only support CENTERED cropping.

* * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES * @see CameraCharacteristics#SCALER_CROPPING_TYPE * @see CameraCharacteristics#SENSOR_INFO_EXPOSURE_TIME_RANGE * @see CameraCharacteristics#SENSOR_INFO_MAX_FRAME_DURATION * @see CameraCharacteristics#SYNC_MAX_LATENCY * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL */ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_FULL = 1; /** *

This camera device is running in backward compatibility mode.

*

Only the stream configurations listed in the LEGACY table in the {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession} documentation are supported.

*

A LEGACY device does not support per-frame control, manual sensor control, manual * post-processing, arbitrary cropping regions, and has relaxed performance constraints. * No additional capabilities beyond BACKWARD_COMPATIBLE will ever be listed by a * LEGACY device in {@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities}.

*

In addition, the {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} is not functional on LEGACY * devices. Instead, every request that includes a JPEG-format output target is treated * as triggering a still capture, internally executing a precapture trigger. This may * fire the flash for flash power metering during precapture, and then fire the flash * for the final capture, if a flash is available on the device and the AE mode is set to * enable the flash.

*

Devices that initially shipped with Android version {@link android.os.Build.VERSION_CODES#Q Q} or newer will not include any LEGACY-level devices.

* * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL */ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY = 2; /** *

This camera device is capable of YUV reprocessing and RAW data capture, in addition to * FULL-level capabilities.

*

The stream configurations listed in the LEVEL_3, RAW, FULL, LEGACY and * LIMITED tables in the {@link android.hardware.camera2.CameraDevice#createCaptureSession createCaptureSession} documentation are guaranteed to be supported.

*

The following additional capabilities are guaranteed to be supported:

*
    *
  • YUV_REPROCESSING capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains * YUV_REPROCESSING)
  • *
  • RAW capability ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} contains * RAW)
  • *
* * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL */ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_3 = 3; /** *

This camera device is backed by an external camera connected to this Android device.

*

The device has capability identical to a LIMITED level device, with the following * exceptions:

*
    *
  • The device may not report lens/sensor related information such as
      *
    • {@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}
    • *
    • {@link CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE android.lens.info.hyperfocalDistance}
    • *
    • {@link CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE android.sensor.info.physicalSize}
    • *
    • {@link CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL android.sensor.info.whiteLevel}
    • *
    • {@link CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN android.sensor.blackLevelPattern}
    • *
    • {@link CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT android.sensor.info.colorFilterArrangement}
    • *
    • {@link CaptureResult#SENSOR_ROLLING_SHUTTER_SKEW android.sensor.rollingShutterSkew}
    • *
    *
  • *
  • The device will report 0 for {@link CameraCharacteristics#SENSOR_ORIENTATION android.sensor.orientation}
  • *
  • The device has less guarantee on stable framerate, as the framerate partly depends * on the external camera being used.
  • *
* * @see CaptureRequest#LENS_FOCAL_LENGTH * @see CameraCharacteristics#LENS_INFO_HYPERFOCAL_DISTANCE * @see CameraCharacteristics#SENSOR_BLACK_LEVEL_PATTERN * @see CameraCharacteristics#SENSOR_INFO_COLOR_FILTER_ARRANGEMENT * @see CameraCharacteristics#SENSOR_INFO_PHYSICAL_SIZE * @see CameraCharacteristics#SENSOR_INFO_WHITE_LEVEL * @see CameraCharacteristics#SENSOR_ORIENTATION * @see CaptureResult#SENSOR_ROLLING_SHUTTER_SKEW * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL */ public static final int INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL = 4; // // Enumeration values for CameraCharacteristics#SYNC_MAX_LATENCY // /** *

Every frame has the requests immediately applied.

*

Changing controls over multiple requests one after another will * produce results that have those controls applied atomically * each frame.

*

All FULL capability devices will have this as their maxLatency.

* @see CameraCharacteristics#SYNC_MAX_LATENCY */ public static final int SYNC_MAX_LATENCY_PER_FRAME_CONTROL = 0; /** *

Each new frame has some subset (potentially the entire set) * of the past requests applied to the camera settings.

*

By submitting a series of identical requests, the camera device * will eventually have the camera settings applied, but it is * unknown when that exact point will be.

*

All LEGACY capability devices will have this as their maxLatency.

* @see CameraCharacteristics#SYNC_MAX_LATENCY */ public static final int SYNC_MAX_LATENCY_UNKNOWN = -1; // // Enumeration values for CameraCharacteristics#LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE // /** *

A software mechanism is used to synchronize between the physical cameras. As a result, * the timestamp of an image from a physical stream is only an approximation of the * image sensor start-of-exposure time.

* @see CameraCharacteristics#LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE */ public static final int LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_APPROXIMATE = 0; /** *

The camera device supports frame timestamp synchronization at the hardware level, * and the timestamp of a physical stream image accurately reflects its * start-of-exposure time.

* @see CameraCharacteristics#LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE */ public static final int LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE_CALIBRATED = 1; // // Enumeration values for CameraCharacteristics#AUTOMOTIVE_LENS_FACING // /** *

The camera device faces the outside of the vehicle body frame but not exactly * one of the exterior sides defined by this enum. Applications should determine * the exact facing direction from {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} and * {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}.

* * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_EXTERIOR_OTHER = 0; /** *

The camera device faces the front of the vehicle body frame.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_EXTERIOR_FRONT = 1; /** *

The camera device faces the rear of the vehicle body frame.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_EXTERIOR_REAR = 2; /** *

The camera device faces the left side of the vehicle body frame.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_EXTERIOR_LEFT = 3; /** *

The camera device faces the right side of the vehicle body frame.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_EXTERIOR_RIGHT = 4; /** *

The camera device faces the inside of the vehicle body frame but not exactly * one of seats described by this enum. Applications should determine the exact * facing direction from {@link CameraCharacteristics#LENS_POSE_ROTATION android.lens.poseRotation} and {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}.

* * @see CameraCharacteristics#LENS_POSE_ROTATION * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_OTHER = 5; /** *

The camera device faces the left side seat of the first row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_LEFT = 6; /** *

The camera device faces the center seat of the first row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_CENTER = 7; /** *

The camera device faces the right seat of the first row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_1_RIGHT = 8; /** *

The camera device faces the left side seat of the second row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_LEFT = 9; /** *

The camera device faces the center seat of the second row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_CENTER = 10; /** *

The camera device faces the right side seat of the second row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_2_RIGHT = 11; /** *

The camera device faces the left side seat of the third row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_LEFT = 12; /** *

The camera device faces the center seat of the third row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_CENTER = 13; /** *

The camera device faces the right seat of the third row.

* @see CameraCharacteristics#AUTOMOTIVE_LENS_FACING */ public static final int AUTOMOTIVE_LENS_FACING_INTERIOR_SEAT_ROW_3_RIGHT = 14; // // Enumeration values for CameraCharacteristics#AUTOMOTIVE_LOCATION // /** *

The camera device exists inside of the vehicle cabin.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_INTERIOR = 0; /** *

The camera exists outside of the vehicle body frame but not exactly on one of the * exterior locations this enum defines. The applications should determine the exact * location from {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}.

* * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTERIOR_OTHER = 1; /** *

The camera device exists outside of the vehicle body frame and on its front side.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTERIOR_FRONT = 2; /** *

The camera device exists outside of the vehicle body frame and on its rear side.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTERIOR_REAR = 3; /** *

The camera device exists outside and on left side of the vehicle body frame.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTERIOR_LEFT = 4; /** *

The camera device exists outside and on right side of the vehicle body frame.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTERIOR_RIGHT = 5; /** *

The camera device exists on an extra vehicle, such as the trailer, but not exactly * on one of front, rear, left, or right side. Applications should determine the exact * location from {@link CameraCharacteristics#LENS_POSE_TRANSLATION android.lens.poseTranslation}.

* * @see CameraCharacteristics#LENS_POSE_TRANSLATION * @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTRA_OTHER = 6; /** *

The camera device exists outside of the extra vehicle's body frame and on its front * side.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTRA_FRONT = 7; /** *

The camera device exists outside of the extra vehicle's body frame and on its rear * side.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTRA_REAR = 8; /** *

The camera device exists outside and on left side of the extra vehicle body.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTRA_LEFT = 9; /** *

The camera device exists outside and on right side of the extra vehicle body.

* @see CameraCharacteristics#AUTOMOTIVE_LOCATION */ public static final int AUTOMOTIVE_LOCATION_EXTRA_RIGHT = 10; // // Enumeration values for CaptureRequest#COLOR_CORRECTION_MODE // /** *

Use the {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} matrix * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} to do color conversion.

*

All advanced white balance adjustments (not specified * by our white balance pipeline) must be disabled.

*

If AWB is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != OFF, then * TRANSFORM_MATRIX is ignored. The camera device will override * this value to either FAST or HIGH_QUALITY.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE * @see CaptureRequest#COLOR_CORRECTION_MODE */ public static final int COLOR_CORRECTION_MODE_TRANSFORM_MATRIX = 0; /** *

Color correction processing must not slow down * capture rate relative to sensor raw output.

*

Advanced white balance adjustments above and beyond * the specified white balance pipeline may be applied.

*

If AWB is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != OFF, then * the camera device uses the last frame's AWB values * (or defaults if AWB has never been run).

* * @see CaptureRequest#CONTROL_AWB_MODE * @see CaptureRequest#COLOR_CORRECTION_MODE */ public static final int COLOR_CORRECTION_MODE_FAST = 1; /** *

Color correction processing operates at improved * quality but the capture rate might be reduced (relative to sensor * raw output rate)

*

Advanced white balance adjustments above and beyond * the specified white balance pipeline may be applied.

*

If AWB is enabled with {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode} != OFF, then * the camera device uses the last frame's AWB values * (or defaults if AWB has never been run).

* * @see CaptureRequest#CONTROL_AWB_MODE * @see CaptureRequest#COLOR_CORRECTION_MODE */ public static final int COLOR_CORRECTION_MODE_HIGH_QUALITY = 2; // // Enumeration values for CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE // /** *

No aberration correction is applied.

* @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE */ public static final int COLOR_CORRECTION_ABERRATION_MODE_OFF = 0; /** *

Aberration correction will not slow down capture rate * relative to sensor raw output.

* @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE */ public static final int COLOR_CORRECTION_ABERRATION_MODE_FAST = 1; /** *

Aberration correction operates at improved quality but the capture rate might be * reduced (relative to sensor raw output rate)

* @see CaptureRequest#COLOR_CORRECTION_ABERRATION_MODE */ public static final int COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY = 2; // // Enumeration values for CaptureRequest#CONTROL_AE_ANTIBANDING_MODE // /** *

The camera device will not adjust exposure duration to * avoid banding problems.

* @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE */ public static final int CONTROL_AE_ANTIBANDING_MODE_OFF = 0; /** *

The camera device will adjust exposure duration to * avoid banding problems with 50Hz illumination sources.

* @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE */ public static final int CONTROL_AE_ANTIBANDING_MODE_50HZ = 1; /** *

The camera device will adjust exposure duration to * avoid banding problems with 60Hz illumination * sources.

* @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE */ public static final int CONTROL_AE_ANTIBANDING_MODE_60HZ = 2; /** *

The camera device will automatically adapt its * antibanding routine to the current illumination * condition. This is the default mode if AUTO is * available on given camera device.

* @see CaptureRequest#CONTROL_AE_ANTIBANDING_MODE */ public static final int CONTROL_AE_ANTIBANDING_MODE_AUTO = 3; // // Enumeration values for CaptureRequest#CONTROL_AE_MODE // /** *

The camera device's autoexposure routine is disabled.

*

The application-selected {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity} and * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} are used by the camera * device, along with android.flash.* fields, if there's * a flash unit for this camera device.

*

Note that auto-white balance (AWB) and auto-focus (AF) * behavior is device dependent when AE is in OFF mode. * To have consistent behavior across different devices, * it is recommended to either set AWB and AF to OFF mode * or lock AWB and AF before setting AE to OFF. * See {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}, * {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}, and {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger} * for more details.

*

LEGACY devices do not support the OFF mode and will * override attempts to use this value to ON.

* * @see CaptureRequest#CONTROL_AF_MODE * @see CaptureRequest#CONTROL_AF_TRIGGER * @see CaptureRequest#CONTROL_AWB_LOCK * @see CaptureRequest#CONTROL_AWB_MODE * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_FRAME_DURATION * @see CaptureRequest#SENSOR_SENSITIVITY * @see CaptureRequest#CONTROL_AE_MODE */ public static final int CONTROL_AE_MODE_OFF = 0; /** *

The camera device's autoexposure routine is active, * with no flash control.

*

The application's values for * {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, * {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, and * {@link CaptureRequest#SENSOR_FRAME_DURATION android.sensor.frameDuration} are ignored. The * application has control over the various * android.flash.* fields.

* * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_FRAME_DURATION * @see CaptureRequest#SENSOR_SENSITIVITY * @see CaptureRequest#CONTROL_AE_MODE */ public static final int CONTROL_AE_MODE_ON = 1; /** *

Like ON, except that the camera device also controls * the camera's flash unit, firing it in low-light * conditions.

*

The flash may be fired during a precapture sequence * (triggered by {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) and * may be fired for captures for which the * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} field is set to * STILL_CAPTURE

* * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureRequest#CONTROL_CAPTURE_INTENT * @see CaptureRequest#CONTROL_AE_MODE */ public static final int CONTROL_AE_MODE_ON_AUTO_FLASH = 2; /** *

Like ON, except that the camera device also controls * the camera's flash unit, always firing it for still * captures.

*

The flash may be fired during a precapture sequence * (triggered by {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}) and * will always be fired for captures for which the * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} field is set to * STILL_CAPTURE

* * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureRequest#CONTROL_CAPTURE_INTENT * @see CaptureRequest#CONTROL_AE_MODE */ public static final int CONTROL_AE_MODE_ON_ALWAYS_FLASH = 3; /** *

Like ON_AUTO_FLASH, but with automatic red eye * reduction.

*

If deemed necessary by the camera device, a red eye * reduction flash will fire during the precapture * sequence.

* @see CaptureRequest#CONTROL_AE_MODE */ public static final int CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE = 4; /** *

An external flash has been turned on.

*

It informs the camera device that an external flash has been turned on, and that * metering (and continuous focus if active) should be quickly recaculated to account * for the external flash. Otherwise, this mode acts like ON.

*

When the external flash is turned off, AE mode should be changed to one of the * other available AE modes.

*

If the camera device supports AE external flash mode, {@link CaptureResult#CONTROL_AE_STATE android.control.aeState} must * be FLASH_REQUIRED after the camera device finishes AE scan and it's too dark without * flash.

* * @see CaptureResult#CONTROL_AE_STATE * @see CaptureRequest#CONTROL_AE_MODE */ public static final int CONTROL_AE_MODE_ON_EXTERNAL_FLASH = 5; // // Enumeration values for CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER // /** *

The trigger is idle.

* @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER */ public static final int CONTROL_AE_PRECAPTURE_TRIGGER_IDLE = 0; /** *

The precapture metering sequence will be started * by the camera device.

*

The exact effect of the precapture trigger depends on * the current AE mode and state.

* @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER */ public static final int CONTROL_AE_PRECAPTURE_TRIGGER_START = 1; /** *

The camera device will cancel any currently active or completed * precapture metering sequence, the auto-exposure routine will return to its * initial state.

* @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER */ public static final int CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL = 2; // // Enumeration values for CaptureRequest#CONTROL_AF_MODE // /** *

The auto-focus routine does not control the lens; * {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance} is controlled by the * application.

* * @see CaptureRequest#LENS_FOCUS_DISTANCE * @see CaptureRequest#CONTROL_AF_MODE */ public static final int CONTROL_AF_MODE_OFF = 0; /** *

Basic automatic focus mode.

*

In this mode, the lens does not move unless * the autofocus trigger action is called. When that trigger * is activated, AF will transition to ACTIVE_SCAN, then to * the outcome of the scan (FOCUSED or NOT_FOCUSED).

*

Always supported if lens is not fixed focus.

*

Use {@link CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE android.lens.info.minimumFocusDistance} to determine if lens * is fixed-focus.

*

Triggering AF_CANCEL resets the lens position to default, * and sets the AF state to INACTIVE.

* * @see CameraCharacteristics#LENS_INFO_MINIMUM_FOCUS_DISTANCE * @see CaptureRequest#CONTROL_AF_MODE */ public static final int CONTROL_AF_MODE_AUTO = 1; /** *

Close-up focusing mode.

*

In this mode, the lens does not move unless the * autofocus trigger action is called. When that trigger is * activated, AF will transition to ACTIVE_SCAN, then to * the outcome of the scan (FOCUSED or NOT_FOCUSED). This * mode is optimized for focusing on objects very close to * the camera.

*

When that trigger is activated, AF will transition to * ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or * NOT_FOCUSED). Triggering cancel AF resets the lens * position to default, and sets the AF state to * INACTIVE.

* @see CaptureRequest#CONTROL_AF_MODE */ public static final int CONTROL_AF_MODE_MACRO = 2; /** *

In this mode, the AF algorithm modifies the lens * position continually to attempt to provide a * constantly-in-focus image stream.

*

The focusing behavior should be suitable for good quality * video recording; typically this means slower focus * movement and no overshoots. When the AF trigger is not * involved, the AF algorithm should start in INACTIVE state, * and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED * states as appropriate. When the AF trigger is activated, * the algorithm should immediately transition into * AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the * lens position until a cancel AF trigger is received.

*

Once cancel is received, the algorithm should transition * back to INACTIVE and resume passive scan. Note that this * behavior is not identical to CONTINUOUS_PICTURE, since an * ongoing PASSIVE_SCAN must immediately be * canceled.

* @see CaptureRequest#CONTROL_AF_MODE */ public static final int CONTROL_AF_MODE_CONTINUOUS_VIDEO = 3; /** *

In this mode, the AF algorithm modifies the lens * position continually to attempt to provide a * constantly-in-focus image stream.

*

The focusing behavior should be suitable for still image * capture; typically this means focusing as fast as * possible. When the AF trigger is not involved, the AF * algorithm should start in INACTIVE state, and then * transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as * appropriate as it attempts to maintain focus. When the AF * trigger is activated, the algorithm should finish its * PASSIVE_SCAN if active, and then transition into * AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the * lens position until a cancel AF trigger is received.

*

When the AF cancel trigger is activated, the algorithm * should transition back to INACTIVE and then act as if it * has just been started.

* @see CaptureRequest#CONTROL_AF_MODE */ public static final int CONTROL_AF_MODE_CONTINUOUS_PICTURE = 4; /** *

Extended depth of field (digital focus) mode.

*

The camera device will produce images with an extended * depth of field automatically; no special focusing * operations need to be done before taking a picture.

*

AF triggers are ignored, and the AF state will always be * INACTIVE.

* @see CaptureRequest#CONTROL_AF_MODE */ public static final int CONTROL_AF_MODE_EDOF = 5; // // Enumeration values for CaptureRequest#CONTROL_AF_TRIGGER // /** *

The trigger is idle.

* @see CaptureRequest#CONTROL_AF_TRIGGER */ public static final int CONTROL_AF_TRIGGER_IDLE = 0; /** *

Autofocus will trigger now.

* @see CaptureRequest#CONTROL_AF_TRIGGER */ public static final int CONTROL_AF_TRIGGER_START = 1; /** *

Autofocus will return to its initial * state, and cancel any currently active trigger.

* @see CaptureRequest#CONTROL_AF_TRIGGER */ public static final int CONTROL_AF_TRIGGER_CANCEL = 2; // // Enumeration values for CaptureRequest#CONTROL_AWB_MODE // /** *

The camera device's auto-white balance routine is disabled.

*

The application-selected color transform matrix * ({@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform}) and gains * ({@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains}) are used by the camera * device for manual white balance control.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_OFF = 0; /** *

The camera device's auto-white balance routine is active.

*

The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored. * For devices that support the MANUAL_POST_PROCESSING capability, the * values used by the camera device for the transform and gains * will be available in the capture result for this request.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_AUTO = 1; /** *

The camera device's auto-white balance routine is disabled; * the camera device uses incandescent light as the assumed scene * illumination for white balance.

*

While the exact white balance transforms are up to the * camera device, they will approximately match the CIE * standard illuminant A.

*

The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored. * For devices that support the MANUAL_POST_PROCESSING capability, the * values used by the camera device for the transform and gains * will be available in the capture result for this request.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_INCANDESCENT = 2; /** *

The camera device's auto-white balance routine is disabled; * the camera device uses fluorescent light as the assumed scene * illumination for white balance.

*

While the exact white balance transforms are up to the * camera device, they will approximately match the CIE * standard illuminant F2.

*

The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored. * For devices that support the MANUAL_POST_PROCESSING capability, the * values used by the camera device for the transform and gains * will be available in the capture result for this request.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_FLUORESCENT = 3; /** *

The camera device's auto-white balance routine is disabled; * the camera device uses warm fluorescent light as the assumed scene * illumination for white balance.

*

While the exact white balance transforms are up to the * camera device, they will approximately match the CIE * standard illuminant F4.

*

The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored. * For devices that support the MANUAL_POST_PROCESSING capability, the * values used by the camera device for the transform and gains * will be available in the capture result for this request.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_WARM_FLUORESCENT = 4; /** *

The camera device's auto-white balance routine is disabled; * the camera device uses daylight light as the assumed scene * illumination for white balance.

*

While the exact white balance transforms are up to the * camera device, they will approximately match the CIE * standard illuminant D65.

*

The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored. * For devices that support the MANUAL_POST_PROCESSING capability, the * values used by the camera device for the transform and gains * will be available in the capture result for this request.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_DAYLIGHT = 5; /** *

The camera device's auto-white balance routine is disabled; * the camera device uses cloudy daylight light as the assumed scene * illumination for white balance.

*

The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored. * For devices that support the MANUAL_POST_PROCESSING capability, the * values used by the camera device for the transform and gains * will be available in the capture result for this request.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_CLOUDY_DAYLIGHT = 6; /** *

The camera device's auto-white balance routine is disabled; * the camera device uses twilight light as the assumed scene * illumination for white balance.

*

The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored. * For devices that support the MANUAL_POST_PROCESSING capability, the * values used by the camera device for the transform and gains * will be available in the capture result for this request.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_TWILIGHT = 7; /** *

The camera device's auto-white balance routine is disabled; * the camera device uses shade light as the assumed scene * illumination for white balance.

*

The application's values for {@link CaptureRequest#COLOR_CORRECTION_TRANSFORM android.colorCorrection.transform} * and {@link CaptureRequest#COLOR_CORRECTION_GAINS android.colorCorrection.gains} are ignored. * For devices that support the MANUAL_POST_PROCESSING capability, the * values used by the camera device for the transform and gains * will be available in the capture result for this request.

* * @see CaptureRequest#COLOR_CORRECTION_GAINS * @see CaptureRequest#COLOR_CORRECTION_TRANSFORM * @see CaptureRequest#CONTROL_AWB_MODE */ public static final int CONTROL_AWB_MODE_SHADE = 8; // // Enumeration values for CaptureRequest#CONTROL_CAPTURE_INTENT // /** *

The goal of this request doesn't fall into the other * categories. The camera device will default to preview-like * behavior.

* @see CaptureRequest#CONTROL_CAPTURE_INTENT */ public static final int CONTROL_CAPTURE_INTENT_CUSTOM = 0; /** *

This request is for a preview-like use case.

*

The precapture trigger may be used to start off a metering * w/flash sequence.

* @see CaptureRequest#CONTROL_CAPTURE_INTENT */ public static final int CONTROL_CAPTURE_INTENT_PREVIEW = 1; /** *

This request is for a still capture-type * use case.

*

If the flash unit is under automatic control, it may fire as needed.

* @see CaptureRequest#CONTROL_CAPTURE_INTENT */ public static final int CONTROL_CAPTURE_INTENT_STILL_CAPTURE = 2; /** *

This request is for a video recording * use case.

* @see CaptureRequest#CONTROL_CAPTURE_INTENT */ public static final int CONTROL_CAPTURE_INTENT_VIDEO_RECORD = 3; /** *

This request is for a video snapshot (still * image while recording video) use case.

*

The camera device should take the highest-quality image * possible (given the other settings) without disrupting the * frame rate of video recording.

* @see CaptureRequest#CONTROL_CAPTURE_INTENT */ public static final int CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT = 4; /** *

This request is for a ZSL usecase; the * application will stream full-resolution images and * reprocess one or several later for a final * capture.

* @see CaptureRequest#CONTROL_CAPTURE_INTENT */ public static final int CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG = 5; /** *

This request is for manual capture use case where * the applications want to directly control the capture parameters.

*

For example, the application may wish to manually control * {@link CaptureRequest#SENSOR_EXPOSURE_TIME android.sensor.exposureTime}, {@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}, etc.

* * @see CaptureRequest#SENSOR_EXPOSURE_TIME * @see CaptureRequest#SENSOR_SENSITIVITY * @see CaptureRequest#CONTROL_CAPTURE_INTENT */ public static final int CONTROL_CAPTURE_INTENT_MANUAL = 6; /** *

This request is for a motion tracking use case, where * the application will use camera and inertial sensor data to * locate and track objects in the world.

*

The camera device auto-exposure routine will limit the exposure time * of the camera to no more than 20 milliseconds, to minimize motion blur.

* @see CaptureRequest#CONTROL_CAPTURE_INTENT */ public static final int CONTROL_CAPTURE_INTENT_MOTION_TRACKING = 7; // // Enumeration values for CaptureRequest#CONTROL_EFFECT_MODE // /** *

No color effect will be applied.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_OFF = 0; /** *

A "monocolor" effect where the image is mapped into * a single color.

*

This will typically be grayscale.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_MONO = 1; /** *

A "photo-negative" effect where the image's colors * are inverted.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_NEGATIVE = 2; /** *

A "solarisation" effect (Sabattier effect) where the * image is wholly or partially reversed in * tone.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_SOLARIZE = 3; /** *

A "sepia" effect where the image is mapped into warm * gray, red, and brown tones.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_SEPIA = 4; /** *

A "posterization" effect where the image uses * discrete regions of tone rather than a continuous * gradient of tones.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_POSTERIZE = 5; /** *

A "whiteboard" effect where the image is typically displayed * as regions of white, with black or grey details.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_WHITEBOARD = 6; /** *

A "blackboard" effect where the image is typically displayed * as regions of black, with white or grey details.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_BLACKBOARD = 7; /** *

An "aqua" effect where a blue hue is added to the image.

* @see CaptureRequest#CONTROL_EFFECT_MODE */ public static final int CONTROL_EFFECT_MODE_AQUA = 8; // // Enumeration values for CaptureRequest#CONTROL_MODE // /** *

Full application control of pipeline.

*

All control by the device's metering and focusing (3A) * routines is disabled, and no other settings in * android.control.* have any effect, except that * {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} may be used by the camera * device to select post-processing values for processing * blocks that do not allow for manual control, or are not * exposed by the camera API.

*

However, the camera device's 3A routines may continue to * collect statistics and update their internal state so that * when control is switched to AUTO mode, good control values * can be immediately applied.

* * @see CaptureRequest#CONTROL_CAPTURE_INTENT * @see CaptureRequest#CONTROL_MODE */ public static final int CONTROL_MODE_OFF = 0; /** *

Use settings for each individual 3A routine.

*

Manual control of capture parameters is disabled. All * controls in android.control.* besides sceneMode take * effect.

* @see CaptureRequest#CONTROL_MODE */ public static final int CONTROL_MODE_AUTO = 1; /** *

Use a specific scene mode.

*

Enabling this disables control.aeMode, control.awbMode and * control.afMode controls; the camera device will ignore * those settings while USE_SCENE_MODE is active (except for * FACE_PRIORITY scene mode). Other control entries are still active. * This setting can only be used if scene mode is supported (i.e. * {@link CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES android.control.availableSceneModes} * contain some modes other than DISABLED).

*

For extended scene modes such as BOKEH, please use USE_EXTENDED_SCENE_MODE instead.

* * @see CameraCharacteristics#CONTROL_AVAILABLE_SCENE_MODES * @see CaptureRequest#CONTROL_MODE */ public static final int CONTROL_MODE_USE_SCENE_MODE = 2; /** *

Same as OFF mode, except that this capture will not be * used by camera device background auto-exposure, auto-white balance and * auto-focus algorithms (3A) to update their statistics.

*

Specifically, the 3A routines are locked to the last * values set from a request with AUTO, OFF, or * USE_SCENE_MODE, and any statistics or state updates * collected from manual captures with OFF_KEEP_STATE will be * discarded by the camera device.

* @see CaptureRequest#CONTROL_MODE */ public static final int CONTROL_MODE_OFF_KEEP_STATE = 3; /** *

Use a specific extended scene mode.

*

When extended scene mode is on, the camera device may override certain control * parameters, such as targetFpsRange, AE, AWB, and AF modes, to achieve best power and * quality tradeoffs. Only the mandatory stream combinations of LIMITED hardware level * are guaranteed.

*

This setting can only be used if extended scene mode is supported (i.e. * android.control.availableExtendedSceneModes * contains some modes other than DISABLED).

* @see CaptureRequest#CONTROL_MODE */ public static final int CONTROL_MODE_USE_EXTENDED_SCENE_MODE = 4; // // Enumeration values for CaptureRequest#CONTROL_SCENE_MODE // /** *

Indicates that no scene modes are set for a given capture request.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_DISABLED = 0; /** *

If face detection support exists, use face * detection data for auto-focus, auto-white balance, and * auto-exposure routines.

*

If face detection statistics are disabled * (i.e. {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode} is set to OFF), * this should still operate correctly (but will not return * face detection statistics to the framework).

*

Unlike the other scene modes, {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} * remain active when FACE_PRIORITY is set.

* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AF_MODE * @see CaptureRequest#CONTROL_AWB_MODE * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE * @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_FACE_PRIORITY = 1; /** *

Optimized for photos of quickly moving objects.

*

Similar to SPORTS.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_ACTION = 2; /** *

Optimized for still photos of people.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_PORTRAIT = 3; /** *

Optimized for photos of distant macroscopic objects.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_LANDSCAPE = 4; /** *

Optimized for low-light settings.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_NIGHT = 5; /** *

Optimized for still photos of people in low-light * settings.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_NIGHT_PORTRAIT = 6; /** *

Optimized for dim, indoor settings where flash must * remain off.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_THEATRE = 7; /** *

Optimized for bright, outdoor beach settings.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_BEACH = 8; /** *

Optimized for bright, outdoor settings containing snow.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_SNOW = 9; /** *

Optimized for scenes of the setting sun.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_SUNSET = 10; /** *

Optimized to avoid blurry photos due to small amounts of * device motion (for example: due to hand shake).

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_STEADYPHOTO = 11; /** *

Optimized for nighttime photos of fireworks.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_FIREWORKS = 12; /** *

Optimized for photos of quickly moving people.

*

Similar to ACTION.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_SPORTS = 13; /** *

Optimized for dim, indoor settings with multiple moving * people.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_PARTY = 14; /** *

Optimized for dim settings where the main light source * is a candle.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_CANDLELIGHT = 15; /** *

Optimized for accurately capturing a photo of barcode * for use by camera applications that wish to read the * barcode value.

* @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_BARCODE = 16; /** *

This is deprecated, please use {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession } * and {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList } * for high speed video recording.

*

Optimized for high speed video recording (frame rate >=60fps) use case.

*

The supported high speed video sizes and fps ranges are specified in * android.control.availableHighSpeedVideoConfigurations. To get desired * output frame rates, the application is only allowed to select video size * and fps range combinations listed in this static metadata. The fps range * can be control via {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}.

*

In this mode, the camera device will override aeMode, awbMode, and afMode to * ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode * controls will be overridden to be FAST. Therefore, no manual control of capture * and post-processing parameters is possible. All other controls operate the * same as when {@link CaptureRequest#CONTROL_MODE android.control.mode} == AUTO. This means that all other * android.control.* fields continue to work, such as

*
    *
  • {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange}
  • *
  • {@link CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION android.control.aeExposureCompensation}
  • *
  • {@link CaptureRequest#CONTROL_AE_LOCK android.control.aeLock}
  • *
  • {@link CaptureRequest#CONTROL_AWB_LOCK android.control.awbLock}
  • *
  • {@link CaptureRequest#CONTROL_EFFECT_MODE android.control.effectMode}
  • *
  • {@link CaptureRequest#CONTROL_AE_REGIONS android.control.aeRegions}
  • *
  • {@link CaptureRequest#CONTROL_AF_REGIONS android.control.afRegions}
  • *
  • {@link CaptureRequest#CONTROL_AWB_REGIONS android.control.awbRegions}
  • *
  • {@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}
  • *
  • {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger}
  • *
  • {@link CaptureRequest#CONTROL_ZOOM_RATIO android.control.zoomRatio}
  • *
*

Outside of android.control.*, the following controls will work:

*
    *
  • {@link CaptureRequest#FLASH_MODE android.flash.mode} (automatic flash for still capture will not work since aeMode is ON)
  • *
  • {@link CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE android.lens.opticalStabilizationMode} (if it is supported)
  • *
  • {@link CaptureRequest#SCALER_CROP_REGION android.scaler.cropRegion}
  • *
  • {@link CaptureRequest#STATISTICS_FACE_DETECT_MODE android.statistics.faceDetectMode}
  • *
*

For high speed recording use case, the actual maximum supported frame rate may * be lower than what camera can output, depending on the destination Surfaces for * the image data. For example, if the destination surface is from video encoder, * the application need check if the video encoder is capable of supporting the * high frame rate for a given video size, or it will end up with lower recording * frame rate. If the destination surface is from preview window, the preview frame * rate will be bounded by the screen refresh rate.

*

The camera device will only support up to 2 output high speed streams * (processed non-stalling format defined in android.request.maxNumOutputStreams) * in this mode. This control will be effective only if all of below conditions are true:

*
    *
  • The application created no more than maxNumHighSpeedStreams processed non-stalling * format output streams, where maxNumHighSpeedStreams is calculated as * min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]).
  • *
  • The stream sizes are selected from the sizes reported by * android.control.availableHighSpeedVideoConfigurations.
  • *
  • No processed non-stalling or raw streams are configured.
  • *
*

When above conditions are NOT satisfied, the controls of this mode and * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange} will be ignored by the camera device, * the camera device will fall back to {@link CaptureRequest#CONTROL_MODE android.control.mode} == AUTO, * and the returned capture result metadata will give the fps range chosen * by the camera device.

*

Switching into or out of this mode may trigger some camera ISP/sensor * reconfigurations, which may introduce extra latency. It is recommended that * the application avoids unnecessary scene mode switch as much as possible.

* * @see CaptureRequest#CONTROL_AE_EXPOSURE_COMPENSATION * @see CaptureRequest#CONTROL_AE_LOCK * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureRequest#CONTROL_AE_REGIONS * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE * @see CaptureRequest#CONTROL_AF_REGIONS * @see CaptureRequest#CONTROL_AF_TRIGGER * @see CaptureRequest#CONTROL_AWB_LOCK * @see CaptureRequest#CONTROL_AWB_REGIONS * @see CaptureRequest#CONTROL_EFFECT_MODE * @see CaptureRequest#CONTROL_MODE * @see CaptureRequest#CONTROL_ZOOM_RATIO * @see CaptureRequest#FLASH_MODE * @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE * @see CaptureRequest#SCALER_CROP_REGION * @see CaptureRequest#STATISTICS_FACE_DETECT_MODE * @see CaptureRequest#CONTROL_SCENE_MODE * @deprecated Please refer to this API documentation to find the alternatives */ @Deprecated public static final int CONTROL_SCENE_MODE_HIGH_SPEED_VIDEO = 17; /** *

Turn on a device-specific high dynamic range (HDR) mode.

*

In this scene mode, the camera device captures images * that keep a larger range of scene illumination levels * visible in the final image. For example, when taking a * picture of a object in front of a bright window, both * the object and the scene through the window may be * visible when using HDR mode, while in normal AUTO mode, * one or the other may be poorly exposed. As a tradeoff, * HDR mode generally takes much longer to capture a single * image, has no user control, and may have other artifacts * depending on the HDR method used.

*

Therefore, HDR captures operate at a much slower rate * than regular captures.

*

In this mode, on LIMITED or FULL devices, when a request * is made with a {@link CaptureRequest#CONTROL_CAPTURE_INTENT android.control.captureIntent} of * STILL_CAPTURE, the camera device will capture an image * using a high dynamic range capture technique. On LEGACY * devices, captures that target a JPEG-format output will * be captured with HDR, and the capture intent is not * relevant.

*

The HDR capture may involve the device capturing a burst * of images internally and combining them into one, or it * may involve the device using specialized high dynamic * range capture hardware. In all cases, a single image is * produced in response to a capture request submitted * while in HDR mode.

*

Since substantial post-processing is generally needed to * produce an HDR image, only YUV, PRIVATE, and JPEG * outputs are supported for LIMITED/FULL device HDR * captures, and only JPEG outputs are supported for LEGACY * HDR captures. Using a RAW output for HDR capture is not * supported.

*

Some devices may also support always-on HDR, which * applies HDR processing at full frame rate. For these * devices, intents other than STILL_CAPTURE will also * produce an HDR output with no frame rate impact compared * to normal operation, though the quality may be lower * than for STILL_CAPTURE intents.

*

If SCENE_MODE_HDR is used with unsupported output types * or capture intents, the images captured will be as if * the SCENE_MODE was not enabled at all.

* * @see CaptureRequest#CONTROL_CAPTURE_INTENT * @see CaptureRequest#CONTROL_SCENE_MODE */ public static final int CONTROL_SCENE_MODE_HDR = 18; /** *

Same as FACE_PRIORITY scene mode, except that the camera * device will choose higher sensitivity values ({@link CaptureRequest#SENSOR_SENSITIVITY android.sensor.sensitivity}) * under low light conditions.

*

The camera device may be tuned to expose the images in a reduced * sensitivity range to produce the best quality images. For example, * if the {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange} gives range of [100, 1600], * the camera device auto-exposure routine tuning process may limit the actual * exposure sensitivity range to [100, 1200] to ensure that the noise level isn't * excessive in order to preserve the image quality. Under this situation, the image under * low light may be under-exposed when the sensor max exposure time (bounded by the * {@link CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE android.control.aeTargetFpsRange} when {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode} is one of the * ON_* modes) and effective max sensitivity are reached. This scene mode allows the * camera device auto-exposure routine to increase the sensitivity up to the max * sensitivity specified by {@link CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE android.sensor.info.sensitivityRange} when the scene is too * dark and the max exposure time is reached. The captured images may be noisier * compared with the images captured in normal FACE_PRIORITY mode; therefore, it is * recommended that the application only use this scene mode when it is capable of * reducing the noise level of the captured images.

*

Unlike the other scene modes, {@link CaptureRequest#CONTROL_AE_MODE android.control.aeMode}, * {@link CaptureRequest#CONTROL_AWB_MODE android.control.awbMode}, and {@link CaptureRequest#CONTROL_AF_MODE android.control.afMode} * remain active when FACE_PRIORITY_LOW_LIGHT is set.

* * @see CaptureRequest#CONTROL_AE_MODE * @see CaptureRequest#CONTROL_AE_TARGET_FPS_RANGE * @see CaptureRequest#CONTROL_AF_MODE * @see CaptureRequest#CONTROL_AWB_MODE * @see CameraCharacteristics#SENSOR_INFO_SENSITIVITY_RANGE * @see CaptureRequest#SENSOR_SENSITIVITY * @see CaptureRequest#CONTROL_SCENE_MODE * @hide */ public static final int CONTROL_SCENE_MODE_FACE_PRIORITY_LOW_LIGHT = 19; /** *

Scene mode values within the range of * [DEVICE_CUSTOM_START, DEVICE_CUSTOM_END] are reserved for device specific * customized scene modes.

* @see CaptureRequest#CONTROL_SCENE_MODE * @hide */ public static final int CONTROL_SCENE_MODE_DEVICE_CUSTOM_START = 100; /** *

Scene mode values within the range of * [DEVICE_CUSTOM_START, DEVICE_CUSTOM_END] are reserved for device specific * customized scene modes.

* @see CaptureRequest#CONTROL_SCENE_MODE * @hide */ public static final int CONTROL_SCENE_MODE_DEVICE_CUSTOM_END = 127; // // Enumeration values for CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE // /** *

Video stabilization is disabled.

* @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE */ public static final int CONTROL_VIDEO_STABILIZATION_MODE_OFF = 0; /** *

Video stabilization is enabled.

* @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE */ public static final int CONTROL_VIDEO_STABILIZATION_MODE_ON = 1; /** *

Preview stabilization, where the preview in addition to all other non-RAW streams are * stabilized with the same quality of stabilization, is enabled. This mode aims to give * clients a 'what you see is what you get' effect. In this mode, the FoV reduction will * be a maximum of 20 % both horizontally and vertically * (10% from left, right, top, bottom) for the given zoom ratio / crop region. * The resultant FoV will also be the same across all processed streams * (that have the same aspect ratio).

* @see CaptureRequest#CONTROL_VIDEO_STABILIZATION_MODE */ public static final int CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION = 2; // // Enumeration values for CaptureRequest#CONTROL_EXTENDED_SCENE_MODE // /** *

Extended scene mode is disabled.

* @see CaptureRequest#CONTROL_EXTENDED_SCENE_MODE */ public static final int CONTROL_EXTENDED_SCENE_MODE_DISABLED = 0; /** *

High quality bokeh mode is enabled for all non-raw streams (including YUV, * JPEG, and IMPLEMENTATION_DEFINED) when capture intent is STILL_CAPTURE. Due to the * extra image processing, this mode may introduce additional stall to non-raw streams. * This mode should be used in high quality still capture use case.

* @see CaptureRequest#CONTROL_EXTENDED_SCENE_MODE */ public static final int CONTROL_EXTENDED_SCENE_MODE_BOKEH_STILL_CAPTURE = 1; /** *

Bokeh effect must not slow down capture rate relative to sensor raw output, * and the effect is applied to all processed streams no larger than the maximum * streaming dimension. This mode should be used if performance and power are a * priority, such as video recording.

* @see CaptureRequest#CONTROL_EXTENDED_SCENE_MODE */ public static final int CONTROL_EXTENDED_SCENE_MODE_BOKEH_CONTINUOUS = 2; /** *

Vendor defined extended scene modes. These depend on vendor implementation.

* @see CaptureRequest#CONTROL_EXTENDED_SCENE_MODE * @hide */ public static final int CONTROL_EXTENDED_SCENE_MODE_VENDOR_START = 0x40; // // Enumeration values for CaptureRequest#EDGE_MODE // /** *

No edge enhancement is applied.

* @see CaptureRequest#EDGE_MODE */ public static final int EDGE_MODE_OFF = 0; /** *

Apply edge enhancement at a quality level that does not slow down frame rate * relative to sensor output. It may be the same as OFF if edge enhancement will * slow down frame rate relative to sensor.

* @see CaptureRequest#EDGE_MODE */ public static final int EDGE_MODE_FAST = 1; /** *

Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate.

* @see CaptureRequest#EDGE_MODE */ public static final int EDGE_MODE_HIGH_QUALITY = 2; /** *

Edge enhancement is applied at different * levels for different output streams, based on resolution. Streams at maximum recording * resolution (see {@link android.hardware.camera2.CameraDevice#createCaptureSession }) * or below have edge enhancement applied, while higher-resolution streams have no edge * enhancement applied. The level of edge enhancement for low-resolution streams is tuned * so that frame rate is not impacted, and the quality is equal to or better than FAST * (since it is only applied to lower-resolution outputs, quality may improve from FAST).

*

This mode is intended to be used by applications operating in a zero-shutter-lag mode * with YUV or PRIVATE reprocessing, where the application continuously captures * high-resolution intermediate buffers into a circular buffer, from which a final image is * produced via reprocessing when a user takes a picture. For such a use case, the * high-resolution buffers must not have edge enhancement applied to maximize efficiency of * preview and to avoid double-applying enhancement when reprocessed, while low-resolution * buffers (used for recording or preview, generally) need edge enhancement applied for * reasonable preview quality.

*

This mode is guaranteed to be supported by devices that support either the * YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities * ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} lists either of those capabilities) and it will * be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.

* * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES * @see CaptureRequest#EDGE_MODE */ public static final int EDGE_MODE_ZERO_SHUTTER_LAG = 3; // // Enumeration values for CaptureRequest#FLASH_MODE // /** *

Do not fire the flash for this capture.

* @see CaptureRequest#FLASH_MODE */ public static final int FLASH_MODE_OFF = 0; /** *

If the flash is available and charged, fire flash * for this capture.

* @see CaptureRequest#FLASH_MODE */ public static final int FLASH_MODE_SINGLE = 1; /** *

Transition flash to continuously on.

* @see CaptureRequest#FLASH_MODE */ public static final int FLASH_MODE_TORCH = 2; // // Enumeration values for CaptureRequest#HOT_PIXEL_MODE // /** *

No hot pixel correction is applied.

*

The frame rate must not be reduced relative to sensor raw output * for this option.

*

The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.

* * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP * @see CaptureRequest#HOT_PIXEL_MODE */ public static final int HOT_PIXEL_MODE_OFF = 0; /** *

Hot pixel correction is applied, without reducing frame * rate relative to sensor raw output.

*

The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.

* * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP * @see CaptureRequest#HOT_PIXEL_MODE */ public static final int HOT_PIXEL_MODE_FAST = 1; /** *

High-quality hot pixel correction is applied, at a cost * of possibly reduced frame rate relative to sensor raw output.

*

The hotpixel map may be returned in {@link CaptureResult#STATISTICS_HOT_PIXEL_MAP android.statistics.hotPixelMap}.

* * @see CaptureResult#STATISTICS_HOT_PIXEL_MAP * @see CaptureRequest#HOT_PIXEL_MODE */ public static final int HOT_PIXEL_MODE_HIGH_QUALITY = 2; // // Enumeration values for CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE // /** *

Optical stabilization is unavailable.

* @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE */ public static final int LENS_OPTICAL_STABILIZATION_MODE_OFF = 0; /** *

Optical stabilization is enabled.

* @see CaptureRequest#LENS_OPTICAL_STABILIZATION_MODE */ public static final int LENS_OPTICAL_STABILIZATION_MODE_ON = 1; // // Enumeration values for CaptureRequest#NOISE_REDUCTION_MODE // /** *

No noise reduction is applied.

* @see CaptureRequest#NOISE_REDUCTION_MODE */ public static final int NOISE_REDUCTION_MODE_OFF = 0; /** *

Noise reduction is applied without reducing frame rate relative to sensor * output. It may be the same as OFF if noise reduction will reduce frame rate * relative to sensor.

* @see CaptureRequest#NOISE_REDUCTION_MODE */ public static final int NOISE_REDUCTION_MODE_FAST = 1; /** *

High-quality noise reduction is applied, at the cost of possibly reduced frame * rate relative to sensor output.

* @see CaptureRequest#NOISE_REDUCTION_MODE */ public static final int NOISE_REDUCTION_MODE_HIGH_QUALITY = 2; /** *

MINIMAL noise reduction is applied without reducing frame rate relative to * sensor output.

* @see CaptureRequest#NOISE_REDUCTION_MODE */ public static final int NOISE_REDUCTION_MODE_MINIMAL = 3; /** *

Noise reduction is applied at different levels for different output streams, * based on resolution. Streams at maximum recording resolution (see {@link android.hardware.camera2.CameraDevice#createCaptureSession }) * or below have noise reduction applied, while higher-resolution streams have MINIMAL (if * supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of * noise reduction for low-resolution streams is tuned so that frame rate is not impacted, * and the quality is equal to or better than FAST (since it is only applied to * lower-resolution outputs, quality may improve from FAST).

*

This mode is intended to be used by applications operating in a zero-shutter-lag mode * with YUV or PRIVATE reprocessing, where the application continuously captures * high-resolution intermediate buffers into a circular buffer, from which a final image is * produced via reprocessing when a user takes a picture. For such a use case, the * high-resolution buffers must not have noise reduction applied to maximize efficiency of * preview and to avoid over-applying noise filtering when reprocessing, while * low-resolution buffers (used for recording or preview, generally) need noise reduction * applied for reasonable preview quality.

*

This mode is guaranteed to be supported by devices that support either the * YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities * ({@link CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES android.request.availableCapabilities} lists either of those capabilities) and it will * be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template.

* * @see CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES * @see CaptureRequest#NOISE_REDUCTION_MODE */ public static final int NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG = 4; // // Enumeration values for CaptureRequest#SCALER_ROTATE_AND_CROP // /** *

No rotate and crop is applied. Processed outputs are in the sensor orientation.

* @see CaptureRequest#SCALER_ROTATE_AND_CROP */ public static final int SCALER_ROTATE_AND_CROP_NONE = 0; /** *

Processed images are rotated by 90 degrees clockwise, and then cropped * to the original aspect ratio.

* @see CaptureRequest#SCALER_ROTATE_AND_CROP */ public static final int SCALER_ROTATE_AND_CROP_90 = 1; /** *

Processed images are rotated by 180 degrees. Since the aspect ratio does not * change, no cropping is performed.

* @see CaptureRequest#SCALER_ROTATE_AND_CROP */ public static final int SCALER_ROTATE_AND_CROP_180 = 2; /** *

Processed images are rotated by 270 degrees clockwise, and then cropped * to the original aspect ratio.

* @see CaptureRequest#SCALER_ROTATE_AND_CROP */ public static final int SCALER_ROTATE_AND_CROP_270 = 3; /** *

The camera API automatically selects the best concrete value for * rotate-and-crop based on the application's support for resizability and the current * multi-window mode.

*

If the application does not support resizing but the display mode for its main * Activity is not in a typical orientation, the camera API will set ROTATE_AND_CROP_90 * or some other supported rotation value, depending on device configuration, * to ensure preview and captured images are correctly shown to the user. Otherwise, * ROTATE_AND_CROP_NONE will be selected.

*

When a value other than NONE is selected, several metadata fields will also be parsed * differently to ensure that coordinates are correctly handled for features like drawing * face detection boxes or passing in tap-to-focus coordinates. The camera API will * convert positions in the active array coordinate system to/from the cropped-and-rotated * coordinate system to make the operation transparent for applications.

*

No coordinate mapping will be done when the application selects a non-AUTO mode.

* @see CaptureRequest#SCALER_ROTATE_AND_CROP */ public static final int SCALER_ROTATE_AND_CROP_AUTO = 4; // // Enumeration values for CaptureRequest#SENSOR_TEST_PATTERN_MODE // /** *

No test pattern mode is used, and the camera * device returns captures from the image sensor.

*

This is the default if the key is not set.

* @see CaptureRequest#SENSOR_TEST_PATTERN_MODE */ public static final int SENSOR_TEST_PATTERN_MODE_OFF = 0; /** *

Each pixel in [R, G_even, G_odd, B] is replaced by its * respective color channel provided in * {@link CaptureRequest#SENSOR_TEST_PATTERN_DATA android.sensor.testPatternData}.

*

For example:

*
{@link CaptureRequest#SENSOR_TEST_PATTERN_DATA android.sensor.testPatternData} = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0]
     * 
*

All green pixels are 100% green. All red/blue pixels are black.

*
{@link CaptureRequest#SENSOR_TEST_PATTERN_DATA android.sensor.testPatternData} = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0]
     * 
*

All red pixels are 100% red. Only the odd green pixels * are 100% green. All blue pixels are 100% black.

* * @see CaptureRequest#SENSOR_TEST_PATTERN_DATA * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE */ public static final int SENSOR_TEST_PATTERN_MODE_SOLID_COLOR = 1; /** *

All pixel data is replaced with an 8-bar color pattern.

*

The vertical bars (left-to-right) are as follows:

*
    *
  • 100% white
  • *
  • yellow
  • *
  • cyan
  • *
  • green
  • *
  • magenta
  • *
  • red
  • *
  • blue
  • *
  • black
  • *
*

In general the image would look like the following:

*
W Y C G M R B K
     * W Y C G M R B K
     * W Y C G M R B K
     * W Y C G M R B K
     * W Y C G M R B K
     * . . . . . . . .
     * . . . . . . . .
     * . . . . . . . .
     *
     * (B = Blue, K = Black)
     * 
*

Each bar should take up 1/8 of the sensor pixel array width. * When this is not possible, the bar size should be rounded * down to the nearest integer and the pattern can repeat * on the right side.

*

Each bar's height must always take up the full sensor * pixel array height.

*

Each pixel in this test pattern must be set to either * 0% intensity or 100% intensity.

* @see CaptureRequest#SENSOR_TEST_PATTERN_MODE */ public static final int SENSOR_TEST_PATTERN_MODE_COLOR_BARS = 2; /** *

The test pattern is similar to COLOR_BARS, except that * each bar should start at its specified color at the top, * and fade to gray at the bottom.

*

Furthermore each bar is further subdivided into a left and * right half. The left half should have a smooth gradient, * and the right half should have a quantized gradient.

*

In particular, the right half's should consist of blocks of the * same color for 1/16th active sensor pixel array width.

*

The least significant bits in the quantized gradient should * be copied from the most significant bits of the smooth gradient.

*

The height of each bar should always be a multiple of 128. * When this is not the case, the pattern should repeat at the bottom * of the image.

* @see CaptureRequest#SENSOR_TEST_PATTERN_MODE */ public static final int SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY = 3; /** *

All pixel data is replaced by a pseudo-random sequence * generated from a PN9 512-bit sequence (typically implemented * in hardware with a linear feedback shift register).

*

The generator should be reset at the beginning of each frame, * and thus each subsequent raw frame with this test pattern should * be exactly the same as the last.

* @see CaptureRequest#SENSOR_TEST_PATTERN_MODE */ public static final int SENSOR_TEST_PATTERN_MODE_PN9 = 4; /** *

All pixel data is replaced by 0% intensity (black) values.

*

This test pattern is identical to SOLID_COLOR with a value of [0, 0, 0, 0] for * {@link CaptureRequest#SENSOR_TEST_PATTERN_DATA android.sensor.testPatternData}. It is recommended that devices implement full * SOLID_COLOR support instead, but BLACK can be used to provide minimal support for a * test pattern suitable for privacy use cases.

* * @see CaptureRequest#SENSOR_TEST_PATTERN_DATA * @see CaptureRequest#SENSOR_TEST_PATTERN_MODE * @hide */ @TestApi public static final int SENSOR_TEST_PATTERN_MODE_BLACK = 5; /** *

The first custom test pattern. All custom patterns that are * available only on this camera device are at least this numeric * value.

*

All of the custom test patterns will be static * (that is the raw image must not vary from frame to frame).

* @see CaptureRequest#SENSOR_TEST_PATTERN_MODE */ public static final int SENSOR_TEST_PATTERN_MODE_CUSTOM1 = 256; // // Enumeration values for CaptureRequest#SENSOR_PIXEL_MODE // /** *

This is the default sensor pixel mode. This is the only sensor pixel mode * supported unless a camera device advertises * {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR }.

* @see CaptureRequest#SENSOR_PIXEL_MODE */ public static final int SENSOR_PIXEL_MODE_DEFAULT = 0; /** *

This sensor pixel mode is offered by devices with capability * {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR }. * In this mode, sensors typically do not bin pixels, as a result can offer larger * image sizes.

* @see CaptureRequest#SENSOR_PIXEL_MODE */ public static final int SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION = 1; // // Enumeration values for CaptureRequest#SHADING_MODE // /** *

No lens shading correction is applied.

* @see CaptureRequest#SHADING_MODE */ public static final int SHADING_MODE_OFF = 0; /** *

Apply lens shading corrections, without slowing * frame rate relative to sensor raw output

* @see CaptureRequest#SHADING_MODE */ public static final int SHADING_MODE_FAST = 1; /** *

Apply high-quality lens shading correction, at the * cost of possibly reduced frame rate.

* @see CaptureRequest#SHADING_MODE */ public static final int SHADING_MODE_HIGH_QUALITY = 2; // // Enumeration values for CaptureRequest#STATISTICS_FACE_DETECT_MODE // /** *

Do not include face detection statistics in capture * results.

* @see CaptureRequest#STATISTICS_FACE_DETECT_MODE */ public static final int STATISTICS_FACE_DETECT_MODE_OFF = 0; /** *

Return face rectangle and confidence values only.

* @see CaptureRequest#STATISTICS_FACE_DETECT_MODE */ public static final int STATISTICS_FACE_DETECT_MODE_SIMPLE = 1; /** *

Return all face * metadata.

*

In this mode, face rectangles, scores, landmarks, and face IDs are all valid.

* @see CaptureRequest#STATISTICS_FACE_DETECT_MODE */ public static final int STATISTICS_FACE_DETECT_MODE_FULL = 2; // // Enumeration values for CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE // /** *

Do not include a lens shading map in the capture result.

* @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE */ public static final int STATISTICS_LENS_SHADING_MAP_MODE_OFF = 0; /** *

Include a lens shading map in the capture result.

* @see CaptureRequest#STATISTICS_LENS_SHADING_MAP_MODE */ public static final int STATISTICS_LENS_SHADING_MAP_MODE_ON = 1; // // Enumeration values for CaptureRequest#STATISTICS_OIS_DATA_MODE // /** *

Do not include OIS data in the capture result.

* @see CaptureRequest#STATISTICS_OIS_DATA_MODE */ public static final int STATISTICS_OIS_DATA_MODE_OFF = 0; /** *

Include OIS data in the capture result.

*

{@link CaptureResult#STATISTICS_OIS_SAMPLES android.statistics.oisSamples} provides OIS sample data in the * output result metadata.

* * @see CaptureResult#STATISTICS_OIS_SAMPLES * @see CaptureRequest#STATISTICS_OIS_DATA_MODE */ public static final int STATISTICS_OIS_DATA_MODE_ON = 1; // // Enumeration values for CaptureRequest#TONEMAP_MODE // /** *

Use the tone mapping curve specified in * the {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}* entries.

*

All color enhancement and tonemapping must be disabled, except * for applying the tonemapping curve specified by * {@link CaptureRequest#TONEMAP_CURVE android.tonemap.curve}.

*

Must not slow down frame rate relative to raw * sensor output.

* * @see CaptureRequest#TONEMAP_CURVE * @see CaptureRequest#TONEMAP_MODE */ public static final int TONEMAP_MODE_CONTRAST_CURVE = 0; /** *

Advanced gamma mapping and color enhancement may be applied, without * reducing frame rate compared to raw sensor output.

* @see CaptureRequest#TONEMAP_MODE */ public static final int TONEMAP_MODE_FAST = 1; /** *

High-quality gamma mapping and color enhancement will be applied, at * the cost of possibly reduced frame rate compared to raw sensor output.

* @see CaptureRequest#TONEMAP_MODE */ public static final int TONEMAP_MODE_HIGH_QUALITY = 2; /** *

Use the gamma value specified in {@link CaptureRequest#TONEMAP_GAMMA android.tonemap.gamma} to perform * tonemapping.

*

All color enhancement and tonemapping must be disabled, except * for applying the tonemapping curve specified by {@link CaptureRequest#TONEMAP_GAMMA android.tonemap.gamma}.

*

Must not slow down frame rate relative to raw sensor output.

* * @see CaptureRequest#TONEMAP_GAMMA * @see CaptureRequest#TONEMAP_MODE */ public static final int TONEMAP_MODE_GAMMA_VALUE = 3; /** *

Use the preset tonemapping curve specified in * {@link CaptureRequest#TONEMAP_PRESET_CURVE android.tonemap.presetCurve} to perform tonemapping.

*

All color enhancement and tonemapping must be disabled, except * for applying the tonemapping curve specified by * {@link CaptureRequest#TONEMAP_PRESET_CURVE android.tonemap.presetCurve}.

*

Must not slow down frame rate relative to raw sensor output.

* * @see CaptureRequest#TONEMAP_PRESET_CURVE * @see CaptureRequest#TONEMAP_MODE */ public static final int TONEMAP_MODE_PRESET_CURVE = 4; // // Enumeration values for CaptureRequest#TONEMAP_PRESET_CURVE // /** *

Tonemapping curve is defined by sRGB

* @see CaptureRequest#TONEMAP_PRESET_CURVE */ public static final int TONEMAP_PRESET_CURVE_SRGB = 0; /** *

Tonemapping curve is defined by ITU-R BT.709

* @see CaptureRequest#TONEMAP_PRESET_CURVE */ public static final int TONEMAP_PRESET_CURVE_REC709 = 1; // // Enumeration values for CaptureRequest#DISTORTION_CORRECTION_MODE // /** *

No distortion correction is applied.

* @see CaptureRequest#DISTORTION_CORRECTION_MODE */ public static final int DISTORTION_CORRECTION_MODE_OFF = 0; /** *

Lens distortion correction is applied without reducing frame rate * relative to sensor output. It may be the same as OFF if distortion correction would * reduce frame rate relative to sensor.

* @see CaptureRequest#DISTORTION_CORRECTION_MODE */ public static final int DISTORTION_CORRECTION_MODE_FAST = 1; /** *

High-quality distortion correction is applied, at the cost of * possibly reduced frame rate relative to sensor output.

* @see CaptureRequest#DISTORTION_CORRECTION_MODE */ public static final int DISTORTION_CORRECTION_MODE_HIGH_QUALITY = 2; // // Enumeration values for CaptureResult#CONTROL_AE_STATE // /** *

AE is off or recently reset.

*

When a camera device is opened, it starts in * this state. This is a transient state, the camera device may skip reporting * this state in capture result.

* @see CaptureResult#CONTROL_AE_STATE */ public static final int CONTROL_AE_STATE_INACTIVE = 0; /** *

AE doesn't yet have a good set of control values * for the current scene.

*

This is a transient state, the camera device may skip * reporting this state in capture result.

* @see CaptureResult#CONTROL_AE_STATE */ public static final int CONTROL_AE_STATE_SEARCHING = 1; /** *

AE has a good set of control values for the * current scene.

* @see CaptureResult#CONTROL_AE_STATE */ public static final int CONTROL_AE_STATE_CONVERGED = 2; /** *

AE has been locked.

* @see CaptureResult#CONTROL_AE_STATE */ public static final int CONTROL_AE_STATE_LOCKED = 3; /** *

AE has a good set of control values, but flash * needs to be fired for good quality still * capture.

* @see CaptureResult#CONTROL_AE_STATE */ public static final int CONTROL_AE_STATE_FLASH_REQUIRED = 4; /** *

AE has been asked to do a precapture sequence * and is currently executing it.

*

Precapture can be triggered through setting * {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} to START. Currently * active and completed (if it causes camera device internal AE lock) precapture * metering sequence can be canceled through setting * {@link CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER android.control.aePrecaptureTrigger} to CANCEL.

*

Once PRECAPTURE completes, AE will transition to CONVERGED * or FLASH_REQUIRED as appropriate. This is a transient * state, the camera device may skip reporting this state in * capture result.

* * @see CaptureRequest#CONTROL_AE_PRECAPTURE_TRIGGER * @see CaptureResult#CONTROL_AE_STATE */ public static final int CONTROL_AE_STATE_PRECAPTURE = 5; // // Enumeration values for CaptureResult#CONTROL_AF_STATE // /** *

AF is off or has not yet tried to scan/been asked * to scan.

*

When a camera device is opened, it starts in this * state. This is a transient state, the camera device may * skip reporting this state in capture * result.

* @see CaptureResult#CONTROL_AF_STATE */ public static final int CONTROL_AF_STATE_INACTIVE = 0; /** *

AF is currently performing an AF scan initiated the * camera device in a continuous autofocus mode.

*

Only used by CONTINUOUS_* AF modes. This is a transient * state, the camera device may skip reporting this state in * capture result.

* @see CaptureResult#CONTROL_AF_STATE */ public static final int CONTROL_AF_STATE_PASSIVE_SCAN = 1; /** *

AF currently believes it is in focus, but may * restart scanning at any time.

*

Only used by CONTINUOUS_* AF modes. This is a transient * state, the camera device may skip reporting this state in * capture result.

* @see CaptureResult#CONTROL_AF_STATE */ public static final int CONTROL_AF_STATE_PASSIVE_FOCUSED = 2; /** *

AF is performing an AF scan because it was * triggered by AF trigger.

*

Only used by AUTO or MACRO AF modes. This is a transient * state, the camera device may skip reporting this state in * capture result.

* @see CaptureResult#CONTROL_AF_STATE */ public static final int CONTROL_AF_STATE_ACTIVE_SCAN = 3; /** *

AF believes it is focused correctly and has locked * focus.

*

This state is reached only after an explicit START AF trigger has been * sent ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}), when good focus has been obtained.

*

The lens will remain stationary until the AF mode ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) is changed or * a new AF trigger is sent to the camera device ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}).

* * @see CaptureRequest#CONTROL_AF_MODE * @see CaptureRequest#CONTROL_AF_TRIGGER * @see CaptureResult#CONTROL_AF_STATE */ public static final int CONTROL_AF_STATE_FOCUSED_LOCKED = 4; /** *

AF has failed to focus successfully and has locked * focus.

*

This state is reached only after an explicit START AF trigger has been * sent ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}), when good focus cannot be obtained.

*

The lens will remain stationary until the AF mode ({@link CaptureRequest#CONTROL_AF_MODE android.control.afMode}) is changed or * a new AF trigger is sent to the camera device ({@link CaptureRequest#CONTROL_AF_TRIGGER android.control.afTrigger}).

* * @see CaptureRequest#CONTROL_AF_MODE * @see CaptureRequest#CONTROL_AF_TRIGGER * @see CaptureResult#CONTROL_AF_STATE */ public static final int CONTROL_AF_STATE_NOT_FOCUSED_LOCKED = 5; /** *

AF finished a passive scan without finding focus, * and may restart scanning at any time.

*

Only used by CONTINUOUS_* AF modes. This is a transient state, the camera * device may skip reporting this state in capture result.

*

LEGACY camera devices do not support this state. When a passive * scan has finished, it will always go to PASSIVE_FOCUSED.

* @see CaptureResult#CONTROL_AF_STATE */ public static final int CONTROL_AF_STATE_PASSIVE_UNFOCUSED = 6; // // Enumeration values for CaptureResult#CONTROL_AWB_STATE // /** *

AWB is not in auto mode, or has not yet started metering.

*

When a camera device is opened, it starts in this * state. This is a transient state, the camera device may * skip reporting this state in capture * result.

* @see CaptureResult#CONTROL_AWB_STATE */ public static final int CONTROL_AWB_STATE_INACTIVE = 0; /** *

AWB doesn't yet have a good set of control * values for the current scene.

*

This is a transient state, the camera device * may skip reporting this state in capture result.

* @see CaptureResult#CONTROL_AWB_STATE */ public static final int CONTROL_AWB_STATE_SEARCHING = 1; /** *

AWB has a good set of control values for the * current scene.

* @see CaptureResult#CONTROL_AWB_STATE */ public static final int CONTROL_AWB_STATE_CONVERGED = 2; /** *

AWB has been locked.

* @see CaptureResult#CONTROL_AWB_STATE */ public static final int CONTROL_AWB_STATE_LOCKED = 3; // // Enumeration values for CaptureResult#CONTROL_AF_SCENE_CHANGE // /** *

Scene change is not detected within the AF region(s).

* @see CaptureResult#CONTROL_AF_SCENE_CHANGE */ public static final int CONTROL_AF_SCENE_CHANGE_NOT_DETECTED = 0; /** *

Scene change is detected within the AF region(s).

* @see CaptureResult#CONTROL_AF_SCENE_CHANGE */ public static final int CONTROL_AF_SCENE_CHANGE_DETECTED = 1; // // Enumeration values for CaptureResult#FLASH_STATE // /** *

No flash on camera.

* @see CaptureResult#FLASH_STATE */ public static final int FLASH_STATE_UNAVAILABLE = 0; /** *

Flash is charging and cannot be fired.

* @see CaptureResult#FLASH_STATE */ public static final int FLASH_STATE_CHARGING = 1; /** *

Flash is ready to fire.

* @see CaptureResult#FLASH_STATE */ public static final int FLASH_STATE_READY = 2; /** *

Flash fired for this capture.

* @see CaptureResult#FLASH_STATE */ public static final int FLASH_STATE_FIRED = 3; /** *

Flash partially illuminated this frame.

*

This is usually due to the next or previous frame having * the flash fire, and the flash spilling into this capture * due to hardware limitations.

* @see CaptureResult#FLASH_STATE */ public static final int FLASH_STATE_PARTIAL = 4; // // Enumeration values for CaptureResult#LENS_STATE // /** *

The lens parameters ({@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance}, * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} and {@link CaptureRequest#LENS_APERTURE android.lens.aperture}) are not changing.

* * @see CaptureRequest#LENS_APERTURE * @see CaptureRequest#LENS_FILTER_DENSITY * @see CaptureRequest#LENS_FOCAL_LENGTH * @see CaptureRequest#LENS_FOCUS_DISTANCE * @see CaptureResult#LENS_STATE */ public static final int LENS_STATE_STATIONARY = 0; /** *

One or several of the lens parameters * ({@link CaptureRequest#LENS_FOCAL_LENGTH android.lens.focalLength}, {@link CaptureRequest#LENS_FOCUS_DISTANCE android.lens.focusDistance}, * {@link CaptureRequest#LENS_FILTER_DENSITY android.lens.filterDensity} or {@link CaptureRequest#LENS_APERTURE android.lens.aperture}) is * currently changing.

* * @see CaptureRequest#LENS_APERTURE * @see CaptureRequest#LENS_FILTER_DENSITY * @see CaptureRequest#LENS_FOCAL_LENGTH * @see CaptureRequest#LENS_FOCUS_DISTANCE * @see CaptureResult#LENS_STATE */ public static final int LENS_STATE_MOVING = 1; // // Enumeration values for CaptureResult#STATISTICS_SCENE_FLICKER // /** *

The camera device does not detect any flickering illumination * in the current scene.

* @see CaptureResult#STATISTICS_SCENE_FLICKER */ public static final int STATISTICS_SCENE_FLICKER_NONE = 0; /** *

The camera device detects illumination flickering at 50Hz * in the current scene.

* @see CaptureResult#STATISTICS_SCENE_FLICKER */ public static final int STATISTICS_SCENE_FLICKER_50HZ = 1; /** *

The camera device detects illumination flickering at 60Hz * in the current scene.

* @see CaptureResult#STATISTICS_SCENE_FLICKER */ public static final int STATISTICS_SCENE_FLICKER_60HZ = 2; // // Enumeration values for CaptureResult#SYNC_FRAME_NUMBER // /** *

The current result is not yet fully synchronized to any request.

*

Synchronization is in progress, and reading metadata from this * result may include a mix of data that have taken effect since the * last synchronization time.

*

In some future result, within {@link CameraCharacteristics#SYNC_MAX_LATENCY android.sync.maxLatency} frames, * this value will update to the actual frame number frame number * the result is guaranteed to be synchronized to (as long as the * request settings remain constant).

* * @see CameraCharacteristics#SYNC_MAX_LATENCY * @see CaptureResult#SYNC_FRAME_NUMBER * @hide */ public static final int SYNC_FRAME_NUMBER_CONVERGING = -1; /** *

The current result's synchronization status is unknown.

*

The result may have already converged, or it may be in * progress. Reading from this result may include some mix * of settings from past requests.

*

After a settings change, the new settings will eventually all * take effect for the output buffers and results. However, this * value will not change when that happens. Altering settings * rapidly may provide outcomes using mixes of settings from recent * requests.

*

This value is intended primarily for backwards compatibility with * the older camera implementations (for android.hardware.Camera).

* @see CaptureResult#SYNC_FRAME_NUMBER * @hide */ public static final int SYNC_FRAME_NUMBER_UNKNOWN = -2; /*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~ * End generated code *~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~O@*/ }