From 3651aa3f8296eae150590b765fda1d87f49fb5b6 Mon Sep 17 00:00:00 2001 From: Eino-Ville Talvala Date: Thu, 14 Apr 2016 14:03:35 -0700 Subject: Fix DevCamera name and many other things DevCamera is a camera application that uses the camera2 API for all operations. It has a flexible output configuration, and is meant as a sample application for developers and Android OEMs, and as a testing and development tool for the Android camera stack. - Rename package/etc to DevCamera - Add new DevCamera app icon - Update comments and class names - Implement runtime permissions - Add license information to all files - Clean up makefile Bug: 27543253 Change-Id: I050ef9034108053c87ae7cb84e6a72c3e88948cb --- src/com/android/devcamera/Api2Camera.java | 808 ++++++++++++++++++++++ src/com/android/devcamera/BitmapUtility.java | 69 ++ src/com/android/devcamera/CameraDeviceReport.java | 396 +++++++++++ src/com/android/devcamera/CameraInfoCache.java | 231 +++++++ src/com/android/devcamera/CameraInterface.java | 143 ++++ src/com/android/devcamera/CameraTimer.java | 33 + src/com/android/devcamera/DevCameraActivity.java | 644 +++++++++++++++++ src/com/android/devcamera/GyroListener.java | 22 + src/com/android/devcamera/GyroOperations.java | 105 +++ src/com/android/devcamera/LoggingCallbacks.java | 138 ++++ src/com/android/devcamera/MediaSaver.java | 114 +++ src/com/android/devcamera/NormalizedFace.java | 91 +++ src/com/android/devcamera/PreviewOverlay.java | 225 ++++++ 13 files changed, 3019 insertions(+) create mode 100644 src/com/android/devcamera/Api2Camera.java create mode 100644 src/com/android/devcamera/BitmapUtility.java create mode 100644 src/com/android/devcamera/CameraDeviceReport.java create mode 100644 src/com/android/devcamera/CameraInfoCache.java create mode 100644 src/com/android/devcamera/CameraInterface.java create mode 100644 src/com/android/devcamera/CameraTimer.java create mode 100644 src/com/android/devcamera/DevCameraActivity.java create mode 100644 src/com/android/devcamera/GyroListener.java create mode 100644 src/com/android/devcamera/GyroOperations.java create mode 100644 src/com/android/devcamera/LoggingCallbacks.java create mode 100644 src/com/android/devcamera/MediaSaver.java create mode 100644 src/com/android/devcamera/NormalizedFace.java create mode 100644 src/com/android/devcamera/PreviewOverlay.java (limited to 'src/com/android') diff --git a/src/com/android/devcamera/Api2Camera.java b/src/com/android/devcamera/Api2Camera.java new file mode 100644 index 0000000..1c61cb0 --- /dev/null +++ b/src/com/android/devcamera/Api2Camera.java @@ -0,0 +1,808 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.content.Context; +import android.graphics.ImageFormat; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraAccessException; +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.hardware.camera2.params.Face; +import android.hardware.camera2.params.InputConfiguration; +import android.media.Image; +import android.media.ImageReader; +import android.media.ImageWriter; +import android.media.MediaActionSound; +import android.opengl.GLES11Ext; +import android.opengl.GLES20; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.SystemClock; +import android.util.Log; +import android.util.Size; +import android.view.Surface; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.LinkedList; +import java.util.List; + +import javax.microedition.khronos.opengles.GL10; + + +/** + * Api2Camera : a camera2 implementation + * + * The goal here is to make the simplest possible API2 camera, + * where individual streams and capture options (e.g. edge enhancement, + * noise reduction, face detection) can be toggled on and off. + * + */ + +public class Api2Camera implements CameraInterface, SurfaceTexture.OnFrameAvailableListener { + private static final String TAG = "DevCamera_API2"; + + // Nth frame to log; put 10^6 if you don't want logging. + private static int LOG_NTH_FRAME = 30; + // Log dropped frames. There are a log on Angler MDA32. + private static boolean LOG_DROPPED_FRAMES = true; + + // IMPORTANT: Only one of these can be true: + private static boolean SECOND_YUV_IMAGEREADER_STREAM = true; + private static boolean SECOND_SURFACE_TEXTURE_STREAM = false; + + // Enable raw stream if available. + private static boolean RAW_STREAM_ENABLE = true; + // Use JPEG ImageReader and YUV ImageWriter if reprocessing is available + private static final boolean USE_REPROCESSING_IF_AVAIL = true; + + // Whether we are continuously taking pictures, or not. + boolean mIsBursting = false; + // Last total capture result + TotalCaptureResult mLastTotalCaptureResult; + + // ImageReader/Writer buffer sizes. + private static final int YUV1_IMAGEREADER_SIZE = 8; + private static final int YUV2_IMAGEREADER_SIZE = 8; + private static final int RAW_IMAGEREADER_SIZE = 8; + private static final int IMAGEWRITER_SIZE = 2; + + private CameraInfoCache mCameraInfoCache; + private CameraManager mCameraManager; + private CameraCaptureSession mCurrentCaptureSession; + private MediaActionSound mMediaActionSound = new MediaActionSound(); + + MyCameraCallback mMyCameraCallback; + + // Generally everything running on this thread & this module is *not thread safe*. + private HandlerThread mOpsThread; + private Handler mOpsHandler; + private HandlerThread mInitThread; + private Handler mInitHandler; + private HandlerThread mJpegListenerThread; + private Handler mJpegListenerHandler; + + Context mContext; + boolean mCameraIsFront; + SurfaceTexture mSurfaceTexture; + Surface mSurfaceTextureSurface; + + private boolean mFirstFrameArrived; + private ImageReader mYuv1ImageReader; + private int mYuv1ImageCounter; + // Handle to last received Image: allows ZSL to be implemented. + private Image mYuv1LastReceivedImage = null; + // Time at which reprocessing request went in (right now we are doing one at a time). + private long mReprocessingRequestNanoTime; + + private ImageReader mJpegImageReader; + private ImageReader mYuv2ImageReader; + private int mYuv2ImageCounter; + private ImageReader mRawImageReader; + private int mRawImageCounter; + + // Starting the preview requires each of these 3 to be true/non-null: + volatile private Surface mPreviewSurface; + volatile private CameraDevice mCameraDevice; + volatile boolean mAllThingsInitialized = false; + + /** + * Constructor. + */ + public Api2Camera(Context context, boolean useFrontCamera) { + mContext = context; + mCameraIsFront = useFrontCamera; + mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE); + mCameraInfoCache = new CameraInfoCache(mCameraManager, useFrontCamera); + + // Create thread and handler for camera operations. + mOpsThread = new HandlerThread("CameraOpsThread"); + mOpsThread.start(); + mOpsHandler = new Handler(mOpsThread.getLooper()); + + // Create thread and handler for slow initialization operations. + // Don't want to use camera operations thread because we want to time camera open carefully. + mInitThread = new HandlerThread("CameraInitThread"); + mInitThread.start(); + mInitHandler = new Handler(mInitThread.getLooper()); + mInitHandler.post(new Runnable() { + @Override + public void run() { + InitializeAllTheThings(); + mAllThingsInitialized = true; + Log.v(TAG, "STARTUP_REQUIREMENT ImageReader initialization done."); + tryToStartCaptureSession(); + } + }); + + // Set initial Noise and Edge modes. + if (mCameraInfoCache.IS_BULLHEAD || mCameraInfoCache.IS_ANGLER) { + // YUV streams. + mCaptureNoiseIndex = 4 /*ZSL*/ % mCameraInfoCache.noiseModes.length; + mCaptureEdgeIndex = 3 /*ZSL*/ % mCameraInfoCache.edgeModes.length; + // Reprocessing. + mReprocessingNoiseIndex = 2 /*High Quality*/ % mCameraInfoCache.noiseModes.length; + mReprocessingEdgeIndex = 2 /*HIgh Quality*/ % mCameraInfoCache.edgeModes.length; + } + } + + // Ugh, why is this stuff so slow? + private void InitializeAllTheThings() { + + // Thread to handle returned JPEGs. + mJpegListenerThread = new HandlerThread("CameraJpegThread"); + mJpegListenerThread.start(); + mJpegListenerHandler = new Handler(mJpegListenerThread.getLooper()); + + // Create ImageReader to receive JPEG image buffers via reprocessing. + mJpegImageReader = ImageReader.newInstance( + mCameraInfoCache.getYuvStream1Size().getWidth(), + mCameraInfoCache.getYuvStream1Size().getHeight(), + ImageFormat.JPEG, + 2); + mJpegImageReader.setOnImageAvailableListener(mJpegImageListener, mJpegListenerHandler); + + // Create ImageReader to receive YUV image buffers. + mYuv1ImageReader = ImageReader.newInstance( + mCameraInfoCache.getYuvStream1Size().getWidth(), + mCameraInfoCache.getYuvStream1Size().getHeight(), + ImageFormat.YUV_420_888, + YUV1_IMAGEREADER_SIZE); + mYuv1ImageReader.setOnImageAvailableListener(mYuv1ImageListener, mOpsHandler); + + if (SECOND_YUV_IMAGEREADER_STREAM) { + // Create ImageReader to receive YUV image buffers. + mYuv2ImageReader = ImageReader.newInstance( + mCameraInfoCache.getYuvStream2Size().getWidth(), + mCameraInfoCache.getYuvStream2Size().getHeight(), + ImageFormat.YUV_420_888, + YUV2_IMAGEREADER_SIZE); + mYuv2ImageReader.setOnImageAvailableListener(mYuv2ImageListener, mOpsHandler); + } + + if (SECOND_SURFACE_TEXTURE_STREAM) { + int[] textures = new int[1]; + // generate one texture pointer and bind it as an external texture. + GLES20.glGenTextures(1, textures, 0); + GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); + // No mip-mapping with camera source. + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, + GL10.GL_TEXTURE_MIN_FILTER, + GL10.GL_LINEAR); + GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, + GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR); + // Clamp to edge is only option. + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, + GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE); + GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, + GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE); + + int texture_id = textures[0]; + mSurfaceTexture = new SurfaceTexture(texture_id); + mSurfaceTexture.setDefaultBufferSize(320, 240); + mSurfaceTexture.setOnFrameAvailableListener(this); + mSurfaceTextureSurface = new Surface(mSurfaceTexture); + } + + if (RAW_STREAM_ENABLE && mCameraInfoCache.rawAvailable()) { + // Create ImageReader to receive thumbnail sized YUV image buffers. + mRawImageReader = ImageReader.newInstance( + mCameraInfoCache.getRawStreamSize().getWidth(), + mCameraInfoCache.getRawStreamSize().getHeight(), + mCameraInfoCache.getRawFormat(), + RAW_IMAGEREADER_SIZE); + mRawImageReader.setOnImageAvailableListener(mRawImageListener, mOpsHandler); + } + + // Load click sound. + mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK); + + } + + public void setCallback(MyCameraCallback callback) { + mMyCameraCallback = callback; + } + + public void triggerAFScan() { + Log.v(TAG, "AF trigger"); + issuePreviewCaptureRequest(true); + } + + public void setCAF() { + Log.v(TAG, "run CAF"); + issuePreviewCaptureRequest(false); + } + + public void takePicture() { + mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK); + mOpsHandler.post(new Runnable() { + @Override + public void run() { + runReprocessing(); + } + }); + } + + public void onFrameAvailable (SurfaceTexture surfaceTexture) { + Log.v(TAG, " onFrameAvailable(SurfaceTexture)"); + } + + public void setBurst(boolean go) { + // if false to true transition. + if (go && !mIsBursting) { + takePicture(); + } + mIsBursting = go; + } + + public boolean isRawAvailable() { + return mCameraInfoCache.rawAvailable(); + } + + public boolean isReprocessingAvailable() { + return mCameraInfoCache.reprocessingAvailable(); + } + + @Override + public Size getPreviewSize() { + return mCameraInfoCache.getPreviewSize(); + } + + @Override + public void openCamera() { + // If API2 FULL mode is not available, display toast, do nothing. + if (!mCameraInfoCache.isCamera2FullModeAvailable()) { + mMyCameraCallback.noCamera2Full(); + if (!mCameraInfoCache.IS_NEXUS_6) { + return; + } + } + + Log.v(TAG, "Opening camera " + mCameraInfoCache.getCameraId()); + mOpsHandler.post(new Runnable() { + @Override + public void run() { + CameraTimer.t_open_start = SystemClock.elapsedRealtime(); + try { + mCameraManager.openCamera(mCameraInfoCache.getCameraId(), mCameraStateCallback, null); + } catch (CameraAccessException e) { + Log.e(TAG, "Unable to openCamera()."); + } + } + }); + } + + @Override + public void closeCamera() { + // TODO: We are stalling main thread now which is bad. + Log.v(TAG, "Closing camera " + mCameraInfoCache.getCameraId()); + if (mCameraDevice != null) { + try { + mCurrentCaptureSession.abortCaptures(); + } catch (CameraAccessException e) { + Log.e(TAG, "Could not abortCaptures()."); + } + mCameraDevice.close(); + } + mCurrentCaptureSession = null; + Log.v(TAG, "Done closing camera " + mCameraInfoCache.getCameraId()); + } + + public void startPreview(final Surface surface) { + Log.v(TAG, "STARTUP_REQUIREMENT preview Surface ready."); + mPreviewSurface = surface; + tryToStartCaptureSession(); + } + + private CameraDevice.StateCallback mCameraStateCallback = new LoggingCallbacks.DeviceStateCallback() { + @Override + public void onOpened(CameraDevice camera) { + CameraTimer.t_open_end = SystemClock.elapsedRealtime(); + mCameraDevice = camera; + Log.v(TAG, "STARTUP_REQUIREMENT Done opening camera " + mCameraInfoCache.getCameraId() + + ". HAL open took: (" + (CameraTimer.t_open_end - CameraTimer.t_open_start) + " ms)"); + + super.onOpened(camera); + tryToStartCaptureSession(); + } + }; + + private void tryToStartCaptureSession() { + if (mCameraDevice != null && mAllThingsInitialized && mPreviewSurface != null) { + mOpsHandler.post(new Runnable() { + @Override + public void run() { + // It used to be: this needed to be posted on a Handler. + startCaptureSession(); + } + }); + } + } + + // Create CameraCaptureSession. Callback will start repeating request with current parameters. + private void startCaptureSession() { + CameraTimer.t_session_go = SystemClock.elapsedRealtime(); + + Log.v(TAG, "Configuring session.."); + List outputSurfaces = new ArrayList(3); + + outputSurfaces.add(mPreviewSurface); + Log.v(TAG, " .. added SurfaceView " + mCameraInfoCache.getPreviewSize().getWidth() + + " x " + mCameraInfoCache.getPreviewSize().getHeight()); + + outputSurfaces.add(mYuv1ImageReader.getSurface()); + Log.v(TAG, " .. added YUV ImageReader " + mCameraInfoCache.getYuvStream1Size().getWidth() + + " x " + mCameraInfoCache.getYuvStream1Size().getHeight()); + + if (SECOND_YUV_IMAGEREADER_STREAM) { + outputSurfaces.add(mYuv2ImageReader.getSurface()); + Log.v(TAG, " .. added YUV ImageReader " + mCameraInfoCache.getYuvStream2Size().getWidth() + + " x " + mCameraInfoCache.getYuvStream2Size().getHeight()); + } + + if (SECOND_SURFACE_TEXTURE_STREAM) { + outputSurfaces.add(mSurfaceTextureSurface); + Log.v(TAG, " .. added SurfaceTexture"); + } + + if (RAW_STREAM_ENABLE && mCameraInfoCache.rawAvailable()) { + outputSurfaces.add(mRawImageReader.getSurface()); + Log.v(TAG, " .. added Raw ImageReader " + mCameraInfoCache.getRawStreamSize().getWidth() + + " x " + mCameraInfoCache.getRawStreamSize().getHeight()); + } + + if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.reprocessingAvailable()) { + outputSurfaces.add(mJpegImageReader.getSurface()); + Log.v(TAG, " .. added JPEG ImageReader " + mCameraInfoCache.getJpegStreamSize().getWidth() + + " x " + mCameraInfoCache.getJpegStreamSize().getHeight()); + } + + try { + if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.reprocessingAvailable()) { + InputConfiguration inputConfig = new InputConfiguration(mCameraInfoCache.getYuvStream1Size().getWidth(), + mCameraInfoCache.getYuvStream1Size().getHeight(), ImageFormat.YUV_420_888); + mCameraDevice.createReprocessableCaptureSession(inputConfig, outputSurfaces, + mSessionStateCallback, null); + Log.v(TAG, " Call to createReprocessableCaptureSession complete."); + } else { + mCameraDevice.createCaptureSession(outputSurfaces, mSessionStateCallback, null); + Log.v(TAG, " Call to createCaptureSession complete."); + } + + } catch (CameraAccessException e) { + Log.e(TAG, "Error configuring ISP."); + } + } + + ImageWriter mImageWriter; + + private CameraCaptureSession.StateCallback mSessionStateCallback = new LoggingCallbacks.SessionStateCallback() { + @Override + public void onReady(CameraCaptureSession session) { + Log.v(TAG, "capture session onReady(). HAL capture session took: (" + (SystemClock.elapsedRealtime() - CameraTimer.t_session_go) + " ms)"); + mCurrentCaptureSession = session; + issuePreviewCaptureRequest(false); + + if (session.isReprocessable()) { + mImageWriter = ImageWriter.newInstance(session.getInputSurface(), IMAGEWRITER_SIZE); + mImageWriter.setOnImageReleasedListener( + new ImageWriter.OnImageReleasedListener() { + @Override + public void onImageReleased(ImageWriter writer) { + Log.v(TAG, "ImageWriter.OnImageReleasedListener onImageReleased()"); + } + }, null); + Log.v(TAG, "Created ImageWriter."); + } + super.onReady(session); + } + }; + + // Variables to hold capture flow state. + private boolean mCaptureYuv1 = false; + private boolean mCaptureYuv2 = false; + private boolean mCaptureRaw = false; + private int mCaptureNoiseIndex = CaptureRequest.NOISE_REDUCTION_MODE_OFF; + private int mCaptureEdgeIndex = CaptureRequest.EDGE_MODE_OFF; + private boolean mCaptureFace = false; + // Variables to hold reprocessing state. + private int mReprocessingNoiseIndex = CaptureRequest.NOISE_REDUCTION_MODE_OFF; + private int mReprocessingEdgeIndex = CaptureRequest.EDGE_MODE_OFF; + + + public void setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face) { + if (yuv1 != null) mCaptureYuv1 = yuv1; + if (yuv2 != null) mCaptureYuv2 = yuv2; + if (raw10 != null) mCaptureRaw = raw10 && RAW_STREAM_ENABLE; + if (nr) { + mCaptureNoiseIndex = ++mCaptureNoiseIndex % mCameraInfoCache.noiseModes.length; + } + if (edge) { + mCaptureEdgeIndex = ++mCaptureEdgeIndex % mCameraInfoCache.edgeModes.length; + } + if (face != null) mCaptureFace = face; + mMyCameraCallback.setNoiseEdgeText( + "NR " + noiseModeToString(mCameraInfoCache.noiseModes[mCaptureNoiseIndex]), + "Edge " + edgeModeToString(mCameraInfoCache.edgeModes[mCaptureEdgeIndex]) + ); + + if (mCurrentCaptureSession != null) { + issuePreviewCaptureRequest(false); + } + } + + public void setReprocessingFlow(Boolean nr, Boolean edge) { + if (nr) { + mReprocessingNoiseIndex = ++mReprocessingNoiseIndex % mCameraInfoCache.noiseModes.length; + } + if (edge) { + mReprocessingEdgeIndex = ++mReprocessingEdgeIndex % mCameraInfoCache.edgeModes.length; + } + mMyCameraCallback.setNoiseEdgeTextForReprocessing( + "NR " + noiseModeToString(mCameraInfoCache.noiseModes[mReprocessingNoiseIndex]), + "Edge " + edgeModeToString(mCameraInfoCache.edgeModes[mReprocessingEdgeIndex]) + ); + } + + public void issuePreviewCaptureRequest(boolean AFtrigger) { + CameraTimer.t_burst = SystemClock.elapsedRealtime(); + Log.v(TAG, "issuePreviewCaptureRequest..."); + try { + CaptureRequest.Builder b1 = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); + b1.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_USE_SCENE_MODE); + b1.set(CaptureRequest.CONTROL_SCENE_MODE, CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY); + if (AFtrigger) { + b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO); + } else { + b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE); + } + + b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCameraInfoCache.noiseModes[mCaptureNoiseIndex]); + b1.set(CaptureRequest.EDGE_MODE, mCameraInfoCache.edgeModes[mCaptureEdgeIndex]); + b1.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mCaptureFace ? mCameraInfoCache.bestFaceDetectionMode() : CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF); + + Log.v(TAG, " .. NR=" + mCaptureNoiseIndex + " Edge=" + mCaptureEdgeIndex + " Face=" + mCaptureFace); + + if (mCaptureYuv1) { + b1.addTarget(mYuv1ImageReader.getSurface()); + Log.v(TAG, " .. YUV1 on"); + } + + if (mCaptureRaw) { + b1.addTarget(mRawImageReader.getSurface()); + } + + b1.addTarget(mPreviewSurface); + + if (mCaptureYuv2) { + if (SECOND_SURFACE_TEXTURE_STREAM) { + b1.addTarget(mSurfaceTextureSurface); + } + if (SECOND_YUV_IMAGEREADER_STREAM) { + b1.addTarget(mYuv2ImageReader.getSurface()); + } + Log.v(TAG, " .. YUV2 on"); + } + + if (AFtrigger) { + b1.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START); + mCurrentCaptureSession.capture(b1.build(), mCaptureCallback, mOpsHandler); + b1.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE); + } + mCurrentCaptureSession.setRepeatingRequest(b1.build(), mCaptureCallback, mOpsHandler); + } catch (CameraAccessException e) { + Log.e(TAG, "Could not access camera for issuePreviewCaptureRequest."); + } + } + + void runReprocessing() { + if (mYuv1LastReceivedImage == null) { + Log.e(TAG, "No YUV Image available."); + return; + } + mImageWriter.queueInputImage(mYuv1LastReceivedImage); + Log.v(TAG, " Sent YUV1 image to ImageWriter.queueInputImage()"); + try { + CaptureRequest.Builder b1 = mCameraDevice.createReprocessCaptureRequest(mLastTotalCaptureResult); + // Portrait. + b1.set(CaptureRequest.JPEG_ORIENTATION, 90); + b1.set(CaptureRequest.JPEG_QUALITY, (byte) 95); + b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCameraInfoCache.noiseModes[mReprocessingNoiseIndex]); + b1.set(CaptureRequest.EDGE_MODE, mCameraInfoCache.edgeModes[mReprocessingEdgeIndex]); + b1.addTarget(mJpegImageReader.getSurface()); + mCurrentCaptureSession.capture(b1.build(), mReprocessingCaptureCallback, mOpsHandler); + mReprocessingRequestNanoTime = System.nanoTime(); + } catch (CameraAccessException e) { + Log.e(TAG, "Could not access camera for issuePreviewCaptureRequest."); + } + mYuv1LastReceivedImage = null; + Log.v(TAG, " Reprocessing request submitted."); + } + + + /********************************* + * onImageAvailable() processing * + *********************************/ + + ImageReader.OnImageAvailableListener mYuv1ImageListener = + new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(ImageReader reader) { + Image img = reader.acquireLatestImage(); + if (img == null) { + Log.e(TAG, "Null image returned YUV1"); + return; + } + if (mYuv1LastReceivedImage != null) { + mYuv1LastReceivedImage.close(); + } + mYuv1LastReceivedImage = img; + if (++mYuv1ImageCounter % LOG_NTH_FRAME == 0) { + Log.v(TAG, "YUV1 buffer available, Frame #=" + mYuv1ImageCounter + " w=" + img.getWidth() + " h=" + img.getHeight() + " time=" + img.getTimestamp()); + } + + } + }; + + + ImageReader.OnImageAvailableListener mJpegImageListener = + new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(ImageReader reader) { + Image img = reader.acquireLatestImage(); + if (img == null) { + Log.e(TAG, "Null image returned JPEG"); + return; + } + Image.Plane plane0 = img.getPlanes()[0]; + final ByteBuffer buffer = plane0.getBuffer(); + long dt = System.nanoTime() - mReprocessingRequestNanoTime; + Log.v(TAG, String.format("JPEG buffer available, w=%d h=%d time=%d size=%d dt=%.1f ms ISO=%d", + img.getWidth(), img.getHeight(), img.getTimestamp(), buffer.capacity(), 0.000001 * dt, mLastIso)); + // Save JPEG on the utility thread, + final byte[] jpegBuf; + if (buffer.hasArray()) { + jpegBuf = buffer.array(); + } else { + jpegBuf = new byte[buffer.capacity()]; + buffer.get(jpegBuf); + } + mMyCameraCallback.jpegAvailable(jpegBuf, img.getWidth(), img.getHeight()); + img.close(); + + // take (reprocess) another picture right away if bursting. + if (mIsBursting) { + takePicture(); + } + } + }; + + + ImageReader.OnImageAvailableListener mYuv2ImageListener = + new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(ImageReader reader) { + Image img = reader.acquireLatestImage(); + if (img == null) { + Log.e(TAG, "Null image returned YUV2"); + } else { + if (++mYuv2ImageCounter % LOG_NTH_FRAME == 0) { + Log.v(TAG, "YUV2 buffer available, Frame #=" + mYuv2ImageCounter + " w=" + img.getWidth() + " h=" + img.getHeight() + " time=" + img.getTimestamp()); + } + img.close(); + } + } + }; + + + ImageReader.OnImageAvailableListener mRawImageListener = + new ImageReader.OnImageAvailableListener() { + @Override + public void onImageAvailable(ImageReader reader) { + final Image img = reader.acquireLatestImage(); + if (img == null) { + Log.e(TAG, "Null image returned RAW"); + } else { + if (++mRawImageCounter % LOG_NTH_FRAME == 0) { + Image.Plane plane0 = img.getPlanes()[0]; + final ByteBuffer buffer = plane0.getBuffer(); + Log.v(TAG, "Raw buffer available, Frame #=" + mRawImageCounter + "w=" + img.getWidth() + + " h=" + img.getHeight() + + " format=" + CameraDeviceReport.getFormatName(img.getFormat()) + + " time=" + img.getTimestamp() + + " size=" + buffer.capacity() + + " getRowStride()=" + plane0.getRowStride()); + } + img.close(); + } + } + }; + + /************************************* + * CaptureResult metadata processing * + *************************************/ + + private CameraCaptureSession.CaptureCallback mCaptureCallback = new LoggingCallbacks.SessionCaptureCallback() { + @Override + public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { + if (!mFirstFrameArrived) { + mFirstFrameArrived = true; + long now = SystemClock.elapsedRealtime(); + long dt = now - CameraTimer.t0; + long camera_dt = now - CameraTimer.t_session_go + CameraTimer.t_open_end - CameraTimer.t_open_start; + long repeating_req_dt = now - CameraTimer.t_burst; + Log.v(TAG, "App control to first frame: (" + dt + " ms)"); + Log.v(TAG, "HAL request to first frame: (" + repeating_req_dt + " ms) " + " Total HAL wait: (" + camera_dt + " ms)"); + mMyCameraCallback.receivedFirstFrame(); + mMyCameraCallback.performanceDataAvailable((int) dt, (int) camera_dt, null); + } + publishFrameData(result); + // Used for reprocessing. + mLastTotalCaptureResult = result; + super.onCaptureCompleted(session, request, result); + } + }; + + // Reprocessing capture completed. + private CameraCaptureSession.CaptureCallback mReprocessingCaptureCallback = new LoggingCallbacks.SessionCaptureCallback() { + @Override + public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { + Log.v(TAG, "Reprocessing onCaptureCompleted()"); + } + }; + + private static double SHORT_LOG_EXPOSURE = Math.log10(1000000000 / 10000); // 1/10000 second + private static double LONG_LOG_EXPOSURE = Math.log10(1000000000 / 10); // 1/10 second + public int FPS_CALC_LOOKBACK = 15; + private LinkedList mFrameTimes = new LinkedList(); + + private void publishFrameData(TotalCaptureResult result) { + // Faces. + final Face[] faces = result.get(CaptureResult.STATISTICS_FACES); + NormalizedFace[] newFaces = new NormalizedFace[faces.length]; + if (faces.length > 0) { + int offX = mCameraInfoCache.faceOffsetX(); + int offY = mCameraInfoCache.faceOffsetY(); + int dX = mCameraInfoCache.activeAreaWidth() - 2 * offX; + int dY = mCameraInfoCache.activeAreaHeight() - 2 * offY; + if (mCameraInfoCache.IS_NEXUS_6 && mCameraIsFront) { + // Front camera on Nexus 6 is currently 16 x 9 cropped to 4 x 3. + // TODO: Generalize this. + int cropOffset = dX / 8; + dX -= 2 * cropOffset; + offX += cropOffset; + } + int orientation = mCameraInfoCache.sensorOrientation(); + for (int i = 0; i < faces.length; ++i) { + newFaces[i] = new NormalizedFace(faces[i], dX, dY, offX, offY); + if (mCameraIsFront && orientation == 90) { + newFaces[i].mirrorInY(); + } + if (mCameraIsFront && orientation == 270) { + newFaces[i].mirrorInX(); + } + if (!mCameraIsFront && orientation == 270) { + newFaces[i].mirrorInX(); + newFaces[i].mirrorInY(); + } + } + } + + // Normalized lens and exposure coordinates. + double rm = Math.log10(result.get(CaptureResult.SENSOR_EXPOSURE_TIME)); + float normExposure = (float) ((rm - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE)); + float normLensPos = (mCameraInfoCache.getDiopterHi() - result.get(CaptureResult.LENS_FOCUS_DISTANCE)) / (mCameraInfoCache.getDiopterHi() - mCameraInfoCache.getDiopterLow()); + mLastIso = result.get(CaptureResult.SENSOR_SENSITIVITY); + + // Update frame arrival history. + mFrameTimes.add(result.get(CaptureResult.SENSOR_TIMESTAMP)); + if (mFrameTimes.size() > FPS_CALC_LOOKBACK) { + mFrameTimes.removeFirst(); + } + + // Frame drop detector + { + float frameDuration = result.get(CaptureResult.SENSOR_FRAME_DURATION); + if (mFrameTimes.size() > 1) { + long dt = result.get(CaptureResult.SENSOR_TIMESTAMP) - mFrameTimes.get(mFrameTimes.size()-2); + if (dt > 3 * frameDuration / 2 && LOG_DROPPED_FRAMES) { + float drops = (dt * 1f / frameDuration) - 1f; + Log.e(TAG, String.format("dropped %.2f frames", drops)); + mMyCameraCallback.performanceDataAvailable(null, null, drops); + } + } + } + + // FPS calc. + float fps = 0; + if (mFrameTimes.size() > 1) { + long dt = mFrameTimes.getLast() - mFrameTimes.getFirst(); + fps = (mFrameTimes.size() - 1) * 1000000000f / dt; + fps = (float) Math.floor(fps + 0.1); // round to nearest whole number, ish. + } + + // Do callback. + if (mMyCameraCallback != null) { + mMyCameraCallback.frameDataAvailable(newFaces, normExposure, normLensPos, fps, + (int) mLastIso, result.get(CaptureResult.CONTROL_AF_STATE), result.get(CaptureResult.CONTROL_AE_STATE), result.get(CaptureResult.CONTROL_AWB_STATE)); + } else { + Log.v(TAG, "mMyCameraCallbacks is null!!."); + } + } + + long mLastIso = 0; + + /********************* + * UTILITY FUNCTIONS * + *********************/ + + private static String edgeModeToString(int mode) { + switch (mode) { + case CaptureRequest.EDGE_MODE_OFF: + return "OFF"; + case CaptureRequest.EDGE_MODE_FAST: + return "FAST"; + case CaptureRequest.EDGE_MODE_HIGH_QUALITY: + return "HiQ"; + case 3: + return "ZSL"; + } + return Integer.toString(mode); + } + + + private static String noiseModeToString(int mode) { + switch (mode) { + case CaptureRequest.NOISE_REDUCTION_MODE_OFF: + return "OFF"; + case CaptureRequest.NOISE_REDUCTION_MODE_FAST: + return "FAST"; + case CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY: + return "HiQ"; + case 3: + return "MIN"; + case 4: + return "ZSL"; + } + return Integer.toString(mode); + } +} diff --git a/src/com/android/devcamera/BitmapUtility.java b/src/com/android/devcamera/BitmapUtility.java new file mode 100644 index 0000000..683b6a7 --- /dev/null +++ b/src/com/android/devcamera/BitmapUtility.java @@ -0,0 +1,69 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.graphics.Bitmap; +import android.graphics.BitmapFactory; +import android.graphics.Matrix; +import android.media.Image; + +import java.nio.ByteBuffer; + +/** + * Some Bitmap utility functions. + */ +public class BitmapUtility { + + public static Bitmap bitmapFromJpeg(byte[] data) { + // 32K buffer. + byte[] decodeBuffer = new byte[32 * 1024]; // 32K buffer. + + BitmapFactory.Options opts = new BitmapFactory.Options(); + opts.inSampleSize = 16; // 3264 / 16 = 204. + opts.inTempStorage = decodeBuffer; + Bitmap b = BitmapFactory.decodeByteArray(data, 0, data.length, opts); + + return rotatedBitmap(b); + } + + public static Bitmap bitmapFromYuvImage(Image img) { + int w = img.getWidth(); + int h = img.getHeight(); + ByteBuffer buf0 = img.getPlanes()[0].getBuffer(); + int len = buf0.capacity(); + int[] colors = new int[len]; + int alpha = 255 << 24; + int green; + for (int i = 0; i < len; i++) { + green = ((int) buf0.get(i)) & 255; + colors[i] = green << 16 | green << 8 | green | alpha; + } + Bitmap b = Bitmap.createBitmap(colors, w, h, Bitmap.Config.ARGB_8888); + + return rotatedBitmap(b); + } + + /** + * Returns parameter bitmap rotated 90 degrees + */ + private static Bitmap rotatedBitmap(Bitmap b) { + Matrix mat = new Matrix(); + mat.postRotate(90); + Bitmap b2 = Bitmap.createBitmap(b, 0, 0,b.getWidth(),b.getHeight(), mat, true); + return b2; + } + +} diff --git a/src/com/android/devcamera/CameraDeviceReport.java b/src/com/android/devcamera/CameraDeviceReport.java new file mode 100644 index 0000000..ebd96ed --- /dev/null +++ b/src/com/android/devcamera/CameraDeviceReport.java @@ -0,0 +1,396 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.app.Activity; +import android.content.Context; +import android.graphics.ImageFormat; +import android.graphics.Rect; +import android.graphics.SurfaceTexture; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.os.Build; +import android.util.DisplayMetrics; +import android.util.Log; +import android.util.Range; +import android.util.Size; +import android.util.SizeF; +import android.view.SurfaceHolder; +import android.view.WindowManager; + +public class CameraDeviceReport { + private static final String TAG = "DevCamera_INFO"; + + // Note: we actually need the activity to get window information + public static void printReport(Activity activity, boolean firstCameraOnly) { + printDisplayInfo(activity); + printCameraSystemInfo(activity, firstCameraOnly); + } + + /** + * Print out information about all cameras. + */ + private static void printCameraSystemInfo(Activity activity, boolean firstCameraOnly) { + CameraManager cameraMgr = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); // "camera" + String[] cameralist; + try { + cameralist = cameraMgr.getCameraIdList(); + Log.v(TAG, "Number of cameras:" + cameralist.length); + } catch (Exception e) { + Log.e(TAG, "Could not get camera ID list: "+e); + return; + } + for (String cameraId : cameralist) { + printCameraInfo(cameraMgr, cameraId); + if (firstCameraOnly) { + break; + } + } + } + + /** + * Print out information about a specific camera. + */ + private static void printCameraInfo(CameraManager manager, String id) { + Log.v(TAG, "============= CAMERA " + id + " INFO ============="); + + CameraCharacteristics p; + try { + p = manager.getCameraCharacteristics(id); + } catch (Exception e) { + Log.e(TAG, "Could not get getCameraCharacteristics"); + return; + } + // dumpsys media.camera + + // Print out various CameraCharacteristics. + Rect size = p.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + if (size != null) { + Log.v(TAG, "SENSOR_INFO_ACTIVE_ARRAY_SIZE: " + + size.width() + "x" + size.height()); + } else { + Log.v(TAG, "SENSOR_INFO_ACTIVE_ARRAY_SIZE: null"); + } + + Size size2 = p.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE); + Log.v(TAG, "SENSOR_INFO_PIXEL_ARRAY_SIZE: " + size2.getWidth() + "x" + size2.getHeight()); + + SizeF size3 = p.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE); + Log.v(TAG, "SENSOR_INFO_PHYSICAL_SIZE: " + size3.getWidth() + "x" + size3.getHeight()); + + + int sensorOrientation = p.get(CameraCharacteristics.SENSOR_ORIENTATION); + Log.v(TAG, "SENSOR_ORIENTATION: " + sensorOrientation); + + Log.v(TAG, "SENSOR_INFO_TIMESTAMP_SOURCE: " + + getTimestampSourceName(p.get(CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE))); + + Log.v(TAG, "LENS_INFO_FOCUS_DISTANCE_CALIBRATION: " + + getFocusDistanceCalibrationName(p.get(CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION))); + + int[] faceModes = p.get(CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES); + Log.v(TAG, "STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES: "); + for (int i = 0; i < faceModes.length; i++) { + switch (faceModes[i]) { + case CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_OFF: + Log.v(TAG, " STATISTICS_FACE_DETECT_MODE_OFF"); + break; + case CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_SIMPLE: + Log.v(TAG, " STATISTICS_FACE_DETECT_MODE_SIMPLE"); + break; + case CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_FULL: + Log.v(TAG, " STATISTICS_FACE_DETECT_MODE_FULL"); + break; + default: + Log.v(TAG, " STATISTICS_FACE_DETECT_MODE_? (unknown)"); + } + } + + Log.v(TAG, "STATISTICS_INFO_MAX_FACE_COUNT: " + p.get(CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT)); + + Log.v(TAG, "REQUEST_PIPELINE_MAX_DEPTH: " + + p.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH)); + + Log.v(TAG, "REQUEST_MAX_NUM_OUTPUT_RAW: " + + p.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW)); + Log.v(TAG, "REQUEST_MAX_NUM_OUTPUT_PROC: " + + p.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC)); + Log.v(TAG, "REQUEST_MAX_NUM_OUTPUT_PROC_STALLING: " + + p.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)); + + Log.v(TAG, "EDGE_AVAILABLE_EDGE_MODES: " + + intsToString(p.get(CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES))); + + Log.v(TAG, "NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES: " + + intsToString(p.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES))); + + Log.v(TAG, "REQUEST_MAX_NUM_OUTPUT_PROC_STALLING: " + + p.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING)); + + + // REQUEST_AVAILABLE_CAPABILITIES + boolean mHasReprocessing = false; + { + Log.v(TAG, "CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES:"); + for (int item : p.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)) { + Log.v(TAG, " " + item + " = " + getCapabilityName(item)); + if (item == 4 || item == 7) { + mHasReprocessing = true; + } + } + } + + StreamConfigurationMap map = p.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + { + int[] formats = map.getOutputFormats(); + Log.v(TAG, "number of output formats: " + formats.length); + for (int i = 0; i < formats.length; i++) { + Log.v(TAG, "output sizes for format " + formats[i] + + " = ImageFormat." + getFormatName(formats[i]) + " = " + + ImageFormat.getBitsPerPixel(formats[i]) + " bits per pixel."); + Size[] sizes = map.getOutputSizes(formats[i]); + if (sizes != null) { + Log.v(TAG, " Size Stall duration Min frame duration"); + for (int j = 0; j < sizes.length; j++) { + Log.v(TAG, String.format(" %10s %7d ms %7d ms \n", + sizes[j].toString(), + map.getOutputStallDuration(formats[i], sizes[j]) / 1000000, + map.getOutputMinFrameDuration(formats[i], sizes[j]) / 1000000 + )); + } + } + } + } + + if (mHasReprocessing) { + int[] formats = map.getInputFormats(); + Log.v(TAG, "number of input formats: " + formats.length); + for (int i = 0; i < formats.length; i++) { + Size[] sizes = map.getInputSizes(formats[i]); + Log.v(TAG, "input sizes for format " + formats[i] + " = ImageFormat." + + getFormatName(formats[i]) + " are: " + sizesToString(sizes)); + } + } + + { + Size[] sizes = map.getOutputSizes(SurfaceHolder.class); + Log.v(TAG, "output sizes for SurfaceHolder.class are: " + sizesToString(sizes)); + } + + { + Size[] sizes = map.getOutputSizes(SurfaceTexture.class); + Log.v(TAG, "output sizes for SurfaceTexture.class are: " + sizesToString(sizes)); + } + + // JPEG thumbnail sizes + { + Size[] sizes = p.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES); + Log.v(TAG, "JPEG thumbnail sizes: " + sizesToString(sizes)); + } + + // REQUEST HARDWARE LEVEL + { + int level = p.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); + Log.v(TAG, "CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL: " + getHardwareLevelName(level)); + } + + + // REQUEST CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES + { + Log.v(TAG, "CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES:"); + for (Range item : p.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)) { + Log.v(TAG, " " + item); + } + } + // SENSOR_INFO_EXPOSURE_TIME_RANGE + { + Range rr = p.get(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE); + Log.v(TAG, "CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE: " + rr); + } + + + // CAPTURE REQUEST KEYS + { + String keys = ""; + for (CaptureRequest.Key key : p.getAvailableCaptureRequestKeys()) { + keys += key.getName() + " "; + } + Log.v(TAG, "CameraCharacteristics.getAvailableCaptureRequestKeys() = " + keys); + } + + // CAPTURE RESULT KEYS + { + String keys = ""; + for (CaptureResult.Key key : p.getAvailableCaptureResultKeys()) { + keys += key.getName() + " "; + } + Log.v(TAG, "CameraCharacteristics.getAvailableCaptureResultKeys() = " + keys); + } + + } + + public static String sizesToString(Size[] sizes) { + String result = ""; + if (sizes != null) { + for (int j = 0; j < sizes.length; j++) { + result += sizes[j].toString() + " "; + } + } + return result; + } + + public static String intsToString(int[] modes) { + String result = ""; + if (modes != null) { + for (int j = 0; j < modes.length; j++) { + result += modes[j] + " "; + } + } + return result; + } + + public static String getTimestampSourceName(Integer level) { + if (level == null) return "null"; + switch (level) { + case CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME: + return "SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME"; + case CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN: + return "SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN"; + } + return "Unknown"; + } + + public static String getFocusDistanceCalibrationName(Integer level) { + if (level == null) return "null"; + switch (level) { + case CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE: + return "LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE"; + case CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED: + return "LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED"; + case CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED: + return "LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED"; + } + return "Unknown"; + } + + public static String getCapabilityName(int format) { + switch (format) { + case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE: + return "REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE"; + case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR: + return "REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR"; + case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING: + return "REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING"; + case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW: + return "REQUEST_AVAILABLE_CAPABILITIES_RAW"; + case 4: + return "REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING"; + case 5: + return "REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS"; + case 6: + return "REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE"; + case 7: + return "REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING"; + case 8: + return "REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT"; + } + return "Unknown"; + } + + public static String getHardwareLevelName(int level) { + switch (level) { + case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL: + return "INFO_SUPPORTED_HARDWARE_LEVEL_FULL"; + case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: + return "INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED"; + case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY: + return "INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY"; + } + return "Unknown"; + } + + + public static String getFormatName(int format) { + switch (format) { + // Android M + //case ImageFormat.PRIVATE: + // return "PRIVATE"; + // Android L + case ImageFormat.JPEG: + return "JPEG"; + case ImageFormat.RGB_565: + return "RGB_565"; + case ImageFormat.NV16: + return "NV16"; + case ImageFormat.YUY2: + return "YUY2"; + case ImageFormat.YV12: + return "YV12"; + case ImageFormat.NV21: + return "NV21"; + case ImageFormat.YUV_420_888: + return "YUV_420_888"; + case ImageFormat.RAW_SENSOR: + return "RAW_SENSOR"; + case ImageFormat.RAW10: + return "RAW10"; + } + return "Unknown"; + } + + + /** + * Print out various information about the device display. + */ + private static void printDisplayInfo(Activity activity) { + Log.v(TAG, "============= DEVICE INFO ============="); + Log.v(TAG, "Build.DEVICE = " + Build.DEVICE); + Log.v(TAG, "Build.FINGERPRINT = " + Build.FINGERPRINT); + Log.v(TAG, "Build.BRAND = " + Build.BRAND); + Log.v(TAG, "Build.MODEL = " + Build.MODEL); + Log.v(TAG, "Build.PRODUCT = " + Build.PRODUCT); + Log.v(TAG, "Build.MANUFACTURER = " + Build.MANUFACTURER); + Log.v(TAG, "Build.VERSION.CODENAME = " + Build.VERSION.CODENAME); + Log.v(TAG, "Build.VERSION.SDK_INT = " + Build.VERSION.SDK_INT); + + Log.v(TAG, "============= DEVICE DISPLAY INFO ============="); + WindowManager windowMgr = activity.getWindowManager(); + + // Nexus 5 is 360dp * 567dp + // Each dp is 3 hardware pixels + Log.v(TAG, "screen width dp = " + activity.getResources().getConfiguration().screenWidthDp); + Log.v(TAG, "screen height dp = " + activity.getResources().getConfiguration().screenHeightDp); + + DisplayMetrics metrics = new DisplayMetrics(); + // With chrome subtracted. + windowMgr.getDefaultDisplay().getMetrics(metrics); + Log.v(TAG, "screen width pixels = " + metrics.widthPixels); + Log.v(TAG, "screen height pixels = " + metrics.heightPixels); + // Native. + windowMgr.getDefaultDisplay().getRealMetrics(metrics); + Log.v(TAG, "real screen width pixels = " + metrics.widthPixels); + Log.v(TAG, "real screen height pixels = " + metrics.heightPixels); + + Log.v(TAG, "refresh rate = " + windowMgr.getDefaultDisplay().getRefreshRate() + " Hz"); + } + + + +} diff --git a/src/com/android/devcamera/CameraInfoCache.java b/src/com/android/devcamera/CameraInfoCache.java new file mode 100644 index 0000000..03d27a4 --- /dev/null +++ b/src/com/android/devcamera/CameraInfoCache.java @@ -0,0 +1,231 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.graphics.ImageFormat; +import android.graphics.Rect; +import android.hardware.camera2.CameraCharacteristics; +import android.hardware.camera2.CameraManager; +import android.hardware.camera2.CameraMetadata; +import android.hardware.camera2.params.StreamConfigurationMap; +import android.os.Build; +import android.util.Log; +import android.util.Size; + +/** + * Caches (static) information about the first/main camera. + * Convenience functions represent data from CameraCharacteristics. + */ + +public class CameraInfoCache { + private static final String TAG = "DevCamera_CAMINFO"; + + public static final boolean IS_NEXUS_5 = "hammerhead".equalsIgnoreCase(Build.DEVICE); + public static final boolean IS_NEXUS_6 = "shamu".equalsIgnoreCase(Build.DEVICE); + public static final boolean IS_NEXUS_9 = "flounder".equalsIgnoreCase(Build.DEVICE); + public static final boolean IS_ANGLER = "angler".equalsIgnoreCase(Build.DEVICE); + public static final boolean IS_BULLHEAD = "bullhead".equalsIgnoreCase(Build.DEVICE); + public static final boolean IS_SAMSUNG_S6 = "zerofltevzw".equalsIgnoreCase(Build.DEVICE); + public static final boolean IS_LG_G4 = "p1_lgu_kr".equalsIgnoreCase(Build.PRODUCT); + + public int[] noiseModes; + public int[] edgeModes; + + private CameraCharacteristics mCameraCharacteristics; + private String mCameraId; + private Size mLargestYuvSize; + private Size mLargestJpegSize; + private Size mRawSize; + private Rect mActiveArea; + private Integer mSensorOrientation; + private Integer mRawFormat; + private int mBestFaceMode; + private boolean mCamera2FullModeAvailable; + + /** + * Constructor. + */ + public CameraInfoCache(CameraManager cameraMgr, boolean useFrontCamera) { + String[] cameralist; + try { + cameralist = cameraMgr.getCameraIdList(); + for (String id : cameralist) { + mCameraCharacteristics = cameraMgr.getCameraCharacteristics(id); + Integer facing = mCameraCharacteristics.get(CameraCharacteristics.LENS_FACING); + if (facing == (useFrontCamera ? CameraMetadata.LENS_FACING_FRONT : CameraMetadata.LENS_FACING_BACK)) { + mCameraId = id; + break; + } + } + } catch (Exception e) { + Log.e(TAG, "ERROR: Could not get camera ID list / no camera information is available: " + e); + return; + } + // Should have mCameraId as this point. + if (mCameraId == null) { + Log.e(TAG, "ERROR: Could not find a suitable rear or front camera."); + return; + } + + // Store YUV_420_888, JPEG, Raw info + StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + int[] formats = map.getOutputFormats(); + long lowestStall = Long.MAX_VALUE; + for (int i = 0; i < formats.length; i++) { + if (formats[i] == ImageFormat.YUV_420_888) { + mLargestYuvSize = returnLargestSize(map.getOutputSizes(formats[i])); + } + if (formats[i] == ImageFormat.JPEG) { + mLargestJpegSize = returnLargestSize(map.getOutputSizes(formats[i])); + } + if (formats[i] == ImageFormat.RAW10 || formats[i] == ImageFormat.RAW_SENSOR) { // TODO: Add RAW12 + Size size = returnLargestSize(map.getOutputSizes(formats[i])); + long stall = map.getOutputStallDuration(formats[i], size); + if (stall < lowestStall) { + mRawFormat = formats[i]; + mRawSize = size; + lowestStall = stall; + } + } + } + + mActiveArea = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE); + + // Compute best face mode. + int[] faceModes = mCameraCharacteristics.get(CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES); + for (int i=0; i mBestFaceMode) { + mBestFaceMode = faceModes[i]; + } + } + edgeModes = mCameraCharacteristics.get(CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES); + noiseModes = mCameraCharacteristics.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES); + + // Misc stuff. + int hwLevel = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); + + mCamera2FullModeAvailable = (hwLevel != CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) + && (hwLevel >= CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); + + mSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); + } + + public int sensorOrientation() { + return mSensorOrientation; + } + + public boolean isCamera2FullModeAvailable() { + return mCamera2FullModeAvailable; + } + + public float getDiopterLow() { + if (IS_NEXUS_6) { + return 0f; + } + return 0f; // Infinity + } + + public float getDiopterHi() { + if (IS_NEXUS_6) { + return 14.29f; + } + return 16f; + } + + /** + * Private utility function. + */ + private Size returnLargestSize(Size[] sizes) { + Size largestSize = null; + int area = 0; + for (int j = 0; j < sizes.length; j++) { + if (sizes[j].getHeight() * sizes[j].getWidth() > area) { + area = sizes[j].getHeight() * sizes[j].getWidth(); + largestSize = sizes[j]; + } + } + return largestSize; + } + + public int bestFaceDetectionMode() { + return mBestFaceMode; + } + + public int faceOffsetX() { + return (mActiveArea.width() - mLargestYuvSize.getWidth()) / 2; + } + + public int faceOffsetY() { + return (mActiveArea.height() - mLargestYuvSize.getHeight()) / 2; + } + + public int activeAreaWidth() { + return mActiveArea.width(); + } + + public int activeAreaHeight() { + return mActiveArea.height(); + } + + public Rect getActiveAreaRect() { + return mActiveArea; + } + + public String getCameraId() { + return mCameraId; + } + + public Size getPreviewSize() { + float aspect = mLargestYuvSize.getWidth() / mLargestYuvSize.getHeight(); + aspect = aspect > 1f ? aspect : 1f / aspect; + if (aspect > 1.6) { + return new Size(1920, 1080); // TODO: Check available resolutions. + } + if (IS_ANGLER || IS_BULLHEAD) { + return new Size(1440, 1080); + } + return new Size(1280, 960); // TODO: Check available resolutions. + } + + public Size getJpegStreamSize() { + return mLargestJpegSize; + } + + public Size getYuvStream1Size() { + return mLargestYuvSize; + } + + public Size getYuvStream2Size() { + return new Size(320, 240); + } + + public boolean rawAvailable() { + return mRawSize != null; + } + public boolean reprocessingAvailable() { + // TODO: Actually query capabilities list. + return (IS_ANGLER || IS_BULLHEAD); + } + + public Integer getRawFormat() { + return mRawFormat; + } + + public Size getRawStreamSize() { + return mRawSize; + } + +} diff --git a/src/com/android/devcamera/CameraInterface.java b/src/com/android/devcamera/CameraInterface.java new file mode 100644 index 0000000..61c1a63 --- /dev/null +++ b/src/com/android/devcamera/CameraInterface.java @@ -0,0 +1,143 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.util.Size; +import android.view.Surface; + +/** + * This is a simple camera interface not specific to API1 or API2. + */ +public interface CameraInterface { + /** + * Return preview size to use pass thru from camera API. + */ + Size getPreviewSize(); + + /** + * Open the camera. Call startPreview() to actually see something. + */ + void openCamera(); + + /** + * Start preview to a surface. Also need to call openCamera(). + * @param surface + */ + void startPreview(Surface surface); + + /** + * Close the camera. + */ + void closeCamera(); + + /** + * Take a picture and return data with provided callback. + * Preview must be started. + */ + void takePicture(); + + /** + * Set whether we are continuously taking pictures, or not. + */ + void setBurst(boolean go); + + /** + * Take a picture and return data with provided callback. + * Preview must be started. + */ + void setCallback(MyCameraCallback callback); + + /** + * Is a raw stream available. + */ + boolean isRawAvailable(); + + /** + * Is a reprocessing available. + */ + boolean isReprocessingAvailable(); + + /** + * Triggers an AF scan. Leaves camera in AUTO. + */ + void triggerAFScan(); + + /** + * Runs CAF (continuous picture). + */ + void setCAF(); + + /** + * Camera picture callbacks. + */ + interface MyCameraCallback { + /** + * What text to display on the Edge and NR mode buttons. + */ + void setNoiseEdgeText(String s1, String s2); + + /** + * What text to display on the Edge and NR mode buttons (reprocessing flow). + */ + void setNoiseEdgeTextForReprocessing(String s1, String s2); + + /** + * Full size JPEG is available. + * @param jpegData + * @param x + * @param y + */ + void jpegAvailable(byte[] jpegData, int x, int y); + + /** + * Metadata from an image frame. + * + * @param info Info string we print just under viewfinder. + * + * fps, mLastIso, af, ae, awb + * @param faces Face coordinates. + * @param normExposure Exposure value normalized from 0 to 1. + * @param normLensPos Lens position value normalized from 0 to 1. + * @param fps + * @param iso + * @param afState + * @param aeState + * @param awbState + * + */ + void frameDataAvailable(NormalizedFace[] faces, float normExposure, float normLensPos, float fps, int iso, int afState, int aeState, int awbState); + + /** + * Misc performance data. + */ + void performanceDataAvailable(Integer timeToFirstFrame, Integer halWaitTime, Float droppedFrameCount); + + /** + * Called when camera2 FULL not available. + */ + void noCamera2Full(); + + /** + * Used to set the preview SurfaceView background color from black to transparent. + */ + void receivedFirstFrame(); + } + + void setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face); + + void setReprocessingFlow(Boolean nr, Boolean edge); + +} diff --git a/src/com/android/devcamera/CameraTimer.java b/src/com/android/devcamera/CameraTimer.java new file mode 100644 index 0000000..1ecac50 --- /dev/null +++ b/src/com/android/devcamera/CameraTimer.java @@ -0,0 +1,33 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +/** + * A global spot to store some times. + */ +public class CameraTimer { + // Got control in onCreate() + public static long t0; + // Sent open() to camera. + public static long t_open_start; + // Open from camera done. + public static long t_open_end; + // Told camera to configure capture session. + public static long t_session_go; + // Told session to do repeating request. + public static long t_burst; + +} diff --git a/src/com/android/devcamera/DevCameraActivity.java b/src/com/android/devcamera/DevCameraActivity.java new file mode 100644 index 0000000..fe49a3a --- /dev/null +++ b/src/com/android/devcamera/DevCameraActivity.java @@ -0,0 +1,644 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.Manifest; +import android.content.Intent; +import android.content.pm.PackageManager; +import android.graphics.Color; +import android.hardware.camera2.CaptureResult; +import android.hardware.SensorManager; +import android.os.Bundle; +import android.app.Activity; +import android.os.Handler; +import android.os.HandlerThread; +import android.os.SystemClock; +import android.util.DisplayMetrics; +import android.util.Log; +import android.util.Size; +import android.view.Gravity; +import android.view.SurfaceHolder; +import android.view.SurfaceView; +import android.view.View; +import android.view.WindowManager; +import android.widget.Button; +import android.widget.FrameLayout; +import android.widget.LinearLayout; +import android.widget.TextView; +import android.widget.Toast; +import android.widget.ToggleButton; + + +/** + * A minimum camera app. + * To keep it simple: portrait mode only. + */ +public class DevCameraActivity extends Activity implements CameraInterface.MyCameraCallback, SurfaceHolder.Callback { + private static final String TAG = "DevCamera_UI"; + + private static final boolean LOG_FRAME_DATA = false; + private static final int AF_TRIGGER_HOLD_MILLIS = 4000; + private static final boolean STARTUP_FULL_YUV_ON = true; + private static final boolean START_WITH_FRONT_CAMERA = false; + + private static final int PERMISSIONS_REQUEST_CAMERA = 1; + private boolean mPermissionCheckActive = false; + + private SurfaceView mPreviewView; + private SurfaceHolder mPreviewHolder; + private PreviewOverlay mPreviewOverlay; + private FrameLayout mPreviewFrame; + + private TextView mLabel1; + private TextView mLabel2; + private ToggleButton mToggleFrontCam; // Use front camera + private ToggleButton mToggleYuvFull; // full YUV + private ToggleButton mToggleYuvVga; // VGA YUV + private ToggleButton mToggleRaw; // raw10 + private Button mButtonNoiseMode; // Noise reduction mode + private Button mButtonEdgeModeReprocess; // Edge mode + private Button mButtonNoiseModeReprocess; // Noise reduction mode for reprocessing + private Button mButtonEdgeMode; // Edge mode for reprocessing + private ToggleButton mToggleFace; // Face detection + private ToggleButton mToggleShow3A; // 3A info + private ToggleButton mToggleGyro; // Gyro + private ToggleButton mToggleBurstJpeg; + private ToggleButton mToggleSaveSdCard; + private LinearLayout mReprocessingGroup; + private Handler mMainHandler; + private CameraInterface mCamera; + + // Used for saving JPEGs. + private HandlerThread mUtilityThread; + private Handler mUtilityHandler; + + // send null for initialization + View.OnClickListener mTransferUiStateToCameraState = new View.OnClickListener() { + @Override + public void onClick(View view) { + // set capture flow. + if (view == mToggleYuvFull || view == mToggleYuvVga || view == mToggleRaw || + view == mButtonNoiseMode || view == mButtonEdgeMode || view == mToggleFace || view == null) + mCamera.setCaptureFlow( + mToggleYuvFull.isChecked(), + mToggleYuvVga.isChecked(), + mToggleRaw.isChecked(), + view == mButtonNoiseMode, /* cycle noise reduction mode */ + view == mButtonEdgeMode, /* cycle edge mode */ + mToggleFace.isChecked() + ); + // set reprocessing flow. + if (view == mButtonNoiseModeReprocess || view == mButtonEdgeModeReprocess || view == null) { + mCamera.setReprocessingFlow(view == mButtonNoiseModeReprocess, view == mButtonEdgeModeReprocess); + } + // set visibility of cluster of reprocessing controls. + int reprocessingViz = mToggleYuvFull.isChecked() && mCamera.isReprocessingAvailable() ? View.VISIBLE : View.GONE; + mReprocessingGroup.setVisibility(reprocessingViz); + + // if just turned off YUV1 stream, end burst. + if (view == mToggleYuvFull && !mToggleYuvFull.isChecked()) { + mToggleBurstJpeg.setChecked(false); + mCamera.setBurst(false); + } + + if (view == mToggleBurstJpeg) { + mCamera.setBurst(mToggleBurstJpeg.isChecked()); + } + + if (view == mToggleShow3A || view == null) { + mPreviewOverlay.show3AInfo(mToggleShow3A.isChecked()); + } + if (view == mToggleGyro || view == null) { + if (mToggleGyro.isChecked()) { + startGyroDisplay(); + } else { + stopGyroDisplay(); + } + } + } + }; + + @Override + protected void onCreate(Bundle savedInstanceState) { + Log.v(TAG, "onCreate"); + CameraTimer.t0 = SystemClock.elapsedRealtime(); + + if (checkPermissions()) { + // Go speed racer. + openCamera(START_WITH_FRONT_CAMERA); + } + + // Initialize UI. + setContentView(R.layout.activity_main); + mLabel1 = (TextView) findViewById(R.id.label1); + mLabel1.setText("Snappy initializing."); + mLabel2 = (TextView) findViewById(R.id.label2); + mLabel2.setText(" ..."); + Button mAfTriggerButton = (Button) findViewById(R.id.af_trigger); + mToggleFrontCam = (ToggleButton) findViewById(R.id.toggle_front_cam); + mToggleFrontCam.setChecked(START_WITH_FRONT_CAMERA); + mToggleYuvFull = (ToggleButton) findViewById(R.id.toggle_yuv_full); + mToggleYuvVga = (ToggleButton) findViewById(R.id.toggle_yuv_vga); + mToggleRaw = (ToggleButton) findViewById(R.id.toggle_raw); + mButtonNoiseMode = (Button) findViewById(R.id.button_noise); + mButtonEdgeMode = (Button) findViewById(R.id.button_edge); + mButtonNoiseModeReprocess = (Button) findViewById(R.id.button_noise_reprocess); + mButtonEdgeModeReprocess = (Button) findViewById(R.id.button_edge_reprocess); + + mToggleFace = (ToggleButton) findViewById(R.id.toggle_face); + mToggleShow3A = (ToggleButton) findViewById(R.id.toggle_show_3A); + mToggleGyro = (ToggleButton) findViewById(R.id.toggle_show_gyro); + Button mGetJpegButton = (Button) findViewById(R.id.jpeg_capture); + Button mGalleryButton = (Button) findViewById(R.id.gallery); + + mToggleBurstJpeg = (ToggleButton) findViewById(R.id.toggle_burst_jpeg); + mToggleSaveSdCard = (ToggleButton) findViewById(R.id.toggle_save_sdcard); + mReprocessingGroup = (LinearLayout) findViewById(R.id.reprocessing_controls); + mPreviewView = (SurfaceView) findViewById(R.id.preview_view); + mPreviewHolder = mPreviewView.getHolder(); + mPreviewHolder.addCallback(this); + mPreviewOverlay = (PreviewOverlay) findViewById(R.id.preview_overlay_view); + mPreviewFrame = (FrameLayout) findViewById(R.id.preview_frame); + + // Set UI listeners. + mAfTriggerButton.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View view) { + doAFScan(); + } + }); + mGetJpegButton.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View view) { + hitCaptureButton(); + } + }); + mGalleryButton.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View view) { + launchPhotosViewer(); + } + }); + mToggleFrontCam.setOnClickListener(new View.OnClickListener() { + @Override + public void onClick(View view) { + Log.v(TAG, "switchCamera()"); + CameraTimer.t0 = SystemClock.elapsedRealtime(); + // ToggleButton isChecked state will determine which camera is started. + openCamera(mToggleFrontCam.isChecked()); + startCamera(); + } + }); + mToggleYuvFull.setOnClickListener(mTransferUiStateToCameraState); + mToggleYuvVga.setOnClickListener(mTransferUiStateToCameraState); + mToggleRaw.setOnClickListener(mTransferUiStateToCameraState); + mButtonNoiseMode.setOnClickListener(mTransferUiStateToCameraState); + mButtonEdgeMode.setOnClickListener(mTransferUiStateToCameraState); + mButtonNoiseModeReprocess.setOnClickListener(mTransferUiStateToCameraState); + mButtonEdgeModeReprocess.setOnClickListener(mTransferUiStateToCameraState); + mToggleFace.setOnClickListener(mTransferUiStateToCameraState); + mToggleShow3A.setOnClickListener(mTransferUiStateToCameraState); + mToggleGyro.setOnClickListener(mTransferUiStateToCameraState); + mToggleBurstJpeg.setOnClickListener(mTransferUiStateToCameraState); + mToggleSaveSdCard.setOnClickListener(mTransferUiStateToCameraState); + mToggleSaveSdCard.setChecked(true); + + mMainHandler = new Handler(this.getApplicationContext().getMainLooper()); + + // General utility thread for e.g. saving JPEGs. + mUtilityThread = new HandlerThread("UtilityThread"); + mUtilityThread.start(); + mUtilityHandler = new Handler(mUtilityThread.getLooper()); + + // --- PRINT REPORT --- + //CameraDeviceReport.printReport(this, false); + super.onCreate(savedInstanceState); + } + + // Open camera. No UI required. + private void openCamera(boolean frontCamera) { + // Close previous camera if required. + if (mCamera != null) { + mCamera.closeCamera(); + } + // --- SET UP CAMERA --- + mCamera = new Api2Camera(this, frontCamera); + mCamera.setCallback(this); + mCamera.openCamera(); + } + + // Initialize camera related UI and start camera; call openCamera first. + private void startCamera() { + // --- SET UP USER INTERFACE --- + mToggleYuvFull.setChecked(STARTUP_FULL_YUV_ON); + mToggleFace.setChecked(true); + mToggleRaw.setVisibility(mCamera.isRawAvailable() ? View.VISIBLE : View.GONE); + mToggleShow3A.setChecked(true); + mTransferUiStateToCameraState.onClick(null); + + // --- SET UP PREVIEW AND OPEN CAMERA --- + + if (mPreviewSurfaceValid) { + mCamera.startPreview(mPreviewHolder.getSurface()); + } else { + // Note that preview is rotated 90 degrees from camera. We just hard code this now. + Size previewSize = mCamera.getPreviewSize(); + // Render in top 12 x 9 of 16 x 9 display. + int renderHeight = 3 * displayHeight() / 4; + int renderWidth = renderHeight * previewSize.getHeight() / previewSize.getWidth(); + int renderPad = (displayWidth() - renderWidth) / 2; + + mPreviewFrame.setPadding(renderPad, 0, 0, 0); + mPreviewFrame.setLayoutParams(new LinearLayout.LayoutParams(renderWidth + renderPad, renderHeight)); + // setFixedSize() will trigger surfaceChanged() callback below, which will start preview. + mPreviewHolder.setFixedSize(previewSize.getHeight(), previewSize.getWidth()); + } + } + + boolean mPreviewSurfaceValid = false; + + @Override + public synchronized void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { + Log.v(TAG, String.format("surfaceChanged: format=%x w=%d h=%d", format, width, height)); + if (checkPermissions()) { + mPreviewSurfaceValid = true; + mCamera.startPreview(mPreviewHolder.getSurface()); + } + } + + Runnable mReturnToCafRunnable = new Runnable() { + @Override + public void run() { + mCamera.setCAF(); + } + }; + + private void doAFScan() { + mCamera.triggerAFScan(); + mMainHandler.removeCallbacks(mReturnToCafRunnable); + mMainHandler.postDelayed(mReturnToCafRunnable, AF_TRIGGER_HOLD_MILLIS); + } + + private int displayWidth() { + DisplayMetrics metrics = new DisplayMetrics(); + this.getWindowManager().getDefaultDisplay().getRealMetrics(metrics); + return metrics.widthPixels; + } + + private int displayHeight() { + DisplayMetrics metrics = new DisplayMetrics(); + this.getWindowManager().getDefaultDisplay().getRealMetrics(metrics); + return metrics.heightPixels; + } + + @Override + public void onStart() { + Log.v(TAG, "onStart"); + super.onStart(); + // Leave screen on. + getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); + + if (!checkPermissions()) return; + + // Can start camera now that we have the above initialized. + if (mCamera == null) { + openCamera(mToggleFrontCam.isChecked()); + } + startCamera(); + } + + private boolean checkPermissions() { + if (mPermissionCheckActive) return false; + + // Check for all runtime permissions + if ((checkSelfPermission(Manifest.permission.CAMERA) + != PackageManager.PERMISSION_GRANTED ) + || (checkSelfPermission(Manifest.permission.RECORD_AUDIO) + != PackageManager.PERMISSION_GRANTED) + || (checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) + != PackageManager.PERMISSION_GRANTED)) { + Log.i(TAG, "Requested camera/video permissions"); + requestPermissions(new String[] { + Manifest.permission.CAMERA, + Manifest.permission.RECORD_AUDIO, + Manifest.permission.WRITE_EXTERNAL_STORAGE}, + PERMISSIONS_REQUEST_CAMERA); + mPermissionCheckActive = true; + return false; + } + + return true; + } + + @Override + public void onRequestPermissionsResult(int requestCode, String[] permissions, + int[] grantResults) { + mPermissionCheckActive = false; + if (requestCode == PERMISSIONS_REQUEST_CAMERA) { + for (int i = 0; i < grantResults.length; i++) { + if (grantResults[i] == PackageManager.PERMISSION_DENIED) { + Log.i(TAG, "At least one permission denied, can't continue: " + permissions[i]); + finish(); + return; + } + } + + Log.i(TAG, "All permissions granted"); + openCamera(mToggleFrontCam.isChecked()); + startCamera(); + } + } + + @Override + public void onStop() { + Log.v(TAG, "onStop"); + if (mCamera != null) { + mCamera.closeCamera(); + mCamera = null; + } + + // Cancel any pending AF operations. + mMainHandler.removeCallbacks(mReturnToCafRunnable); + stopGyroDisplay(); // No-op if not running. + super.onStop(); + } + + public void noCamera2Full() { + Toast toast = Toast.makeText(this, "WARNING: this camera does not support camera2 HARDWARE_LEVEL_FULL.", Toast.LENGTH_LONG); + toast.setGravity(Gravity.TOP, 0, 0); + toast.show(); + } + + @Override + public void setNoiseEdgeText(final String nrMode, final String edgeMode) { + mMainHandler.post(new Runnable() { + @Override + public void run() { + mButtonNoiseMode.setText(nrMode); + mButtonEdgeMode.setText(edgeMode); + } + }); + } + + @Override + public void setNoiseEdgeTextForReprocessing(final String nrMode, final String edgeMode) { + mMainHandler.post(new Runnable() { + @Override + public void run() { + mButtonNoiseModeReprocess.setText(nrMode); + mButtonEdgeModeReprocess.setText(edgeMode); + } + }); + } + + int mJpegCounter = 0; + long mJpegMillis = 0; + + @Override + public void jpegAvailable(final byte[] jpegData, final int x, final int y) { + Log.v(TAG, "JPEG returned, size = " + jpegData.length); + long now = SystemClock.elapsedRealtime(); + final long dt = mJpegMillis > 0 ? now - mJpegMillis : 0; + mJpegMillis = now; + + if (mToggleSaveSdCard.isChecked()) { + mUtilityHandler.post(new Runnable() { + @Override + public void run() { + final String result = MediaSaver.saveJpeg(getApplicationContext(), jpegData, getContentResolver()); + mMainHandler.post(new Runnable() { + @Override + public void run() { + fileNameToast(String.format("Saved %dx%d and %d bytes JPEG to %s in %d ms.", x, y, jpegData.length, result, dt)); + } + }); + + } + }); + } else { + mMainHandler.post(new Runnable() { + @Override + public void run() { + fileNameToast(String.format("Processing JPEG #%d %dx%d and %d bytes in %d ms.", ++mJpegCounter, x, y, jpegData.length, dt)); + } + }); + } + } + + @Override + public void receivedFirstFrame() { + mMainHandler.post(new Runnable() { + @Override + public void run() { + mPreviewView.setBackgroundColor(Color.TRANSPARENT); + } + }); + } + + Toast mToast; + + public void fileNameToast(String s) { + if (mToast != null) { + mToast.cancel(); + } + mToast = Toast.makeText(this, s, Toast.LENGTH_SHORT); + mToast.setGravity(Gravity.TOP, 0, 0); + mToast.show(); + } + + @Override + public void frameDataAvailable(final NormalizedFace[] faces, final float normExposure, final float normLens, float fps, int iso, final int afState, int aeState, int awbState) { + mMainHandler.post(new Runnable() { + @Override + public void run() { + mPreviewOverlay.setFrameData(faces, normExposure, normLens, afState); + } + }); + // Build info string. + String ae = aeStateToString(aeState); + String af = afStateToString(afState); + String awb = awbStateToString(awbState); + final String info = String.format(" %2.0f FPS%5d ISO AF:%s AE:%s AWB:%s", fps, iso, af, ae, awb); + mLastInfo = info; + + if (LOG_FRAME_DATA && faces != null) { + Log.v(TAG, "normExposure: " + normExposure); + Log.v(TAG, "normLens: " + normLens); + for (int i = 0; i < faces.length; ++i) { + Log.v(TAG, "Face getBounds: " + faces[i].bounds); + Log.v(TAG, "Face left eye: " + faces[i].leftEye); + Log.v(TAG, "Face right eye: " + faces[i].rightEye); + Log.v(TAG, "Face mouth: " + faces[i].mouth); + } + } + + // Status line + mMainHandler.post(new Runnable() { + @Override + public void run() { + mLabel1.setText(info); + } + }); + } + + Integer mTimeToFirstFrame = 0; + Integer mHalWaitTime = 0; + Float mDroppedFrameCount = 0f; + String mLastInfo; + + @Override + public void performanceDataAvailable(Integer timeToFirstFrame, Integer halWaitTime, Float droppedFrameCount) { + if (timeToFirstFrame != null) { + mTimeToFirstFrame = timeToFirstFrame; + } + if (halWaitTime != null) { + mHalWaitTime = halWaitTime; + } + if (droppedFrameCount != null) { + mDroppedFrameCount += droppedFrameCount; + } + mMainHandler.post(new Runnable() { + @Override + public void run() { + mLabel2.setText(String.format("TTP %dms HAL %dms Framedrops:%.2f", mTimeToFirstFrame, mHalWaitTime, mDroppedFrameCount)); + } + }); + } + + // Hit capture button. + private void hitCaptureButton() { + Log.v(TAG, "hitCaptureButton"); + mCamera.takePicture(); + } + + // Hit Photos button. + private void launchPhotosViewer() { + Intent intent = new Intent(android.content.Intent.ACTION_VIEW); + intent.setType("image/*"); + intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); + startActivity(intent); + } + + /********************************* + * Gyro graphics overlay update. * + *********************************/ + GyroOperations mGyroOperations; + + private void startGyroDisplay() { + // TODO: Get field of view angles from Camera API. + // TODO: Consider turning OIS off. + float fovLargeDegrees = 62.7533f; // Nexus 6 + float fovSmallDegrees = 49.157f; // Nexus 6 + mPreviewOverlay.setFieldOfView(fovLargeDegrees, fovSmallDegrees); + + if (mGyroOperations == null) { + SensorManager sensorManager = (SensorManager) getSystemService(this.SENSOR_SERVICE); + mGyroOperations = new GyroOperations(sensorManager); + } + mGyroOperations.startListening( + new GyroListener() { + @Override + public void updateGyroAngles(float[] gyroAngles) { + mPreviewOverlay.setGyroAngles(gyroAngles); + } + } + ); + + mPreviewOverlay.showGyroGrid(true); + } + + private void stopGyroDisplay() { + if (mGyroOperations != null) { + mGyroOperations.stopListening(); + } + mPreviewOverlay.showGyroGrid(false); + } + + + /******************************************* + * SurfaceView callbacks just for logging. * + *******************************************/ + + @Override + public void surfaceCreated(SurfaceHolder holder) { + Log.v(TAG, "surfaceCreated"); + } + + @Override + public void surfaceDestroyed(SurfaceHolder holder) { + Log.v(TAG, "surfaceDestroyed"); + } + + /********************* + * UTILITY FUNCTIONS * + *********************/ + + private static String awbStateToString(int mode) { + switch (mode) { + case CaptureResult.CONTROL_AWB_STATE_INACTIVE: + return "inactive"; + case CaptureResult.CONTROL_AWB_STATE_SEARCHING: + return "searching"; + case CaptureResult.CONTROL_AWB_STATE_CONVERGED: + return "converged"; + case CaptureResult.CONTROL_AWB_STATE_LOCKED: + return "lock"; + default: + return "unknown " + Integer.toString(mode); + } + } + + private static String aeStateToString(int mode) { + switch (mode) { + case CaptureResult.CONTROL_AE_STATE_INACTIVE: + return "inactive"; + case CaptureResult.CONTROL_AE_STATE_SEARCHING: + return "searching"; + case CaptureResult.CONTROL_AE_STATE_PRECAPTURE: + return "precapture"; + case CaptureResult.CONTROL_AE_STATE_CONVERGED: + return "converged"; + case CaptureResult.CONTROL_AE_STATE_FLASH_REQUIRED: + return "flashReq"; + case CaptureResult.CONTROL_AE_STATE_LOCKED: + return "lock"; + default: + return "unknown " + Integer.toString(mode); + } + } + + private static String afStateToString(int mode) { + switch (mode) { + case CaptureResult.CONTROL_AF_STATE_INACTIVE: + return "inactive"; + case CaptureResult.CONTROL_AF_STATE_PASSIVE_SCAN: + return "passiveScan"; + case CaptureResult.CONTROL_AF_STATE_PASSIVE_FOCUSED: + return "passiveFocused"; + case CaptureResult.CONTROL_AF_STATE_PASSIVE_UNFOCUSED: + return "passiveUnfocused"; + case CaptureResult.CONTROL_AF_STATE_ACTIVE_SCAN: + return "activeScan"; + case CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED: + return "focusedLock"; + case CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: + return "notFocusedLock"; + default: + return "unknown" + Integer.toString(mode); + } + } + +} diff --git a/src/com/android/devcamera/GyroListener.java b/src/com/android/devcamera/GyroListener.java new file mode 100644 index 0000000..cdc1888 --- /dev/null +++ b/src/com/android/devcamera/GyroListener.java @@ -0,0 +1,22 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +public interface GyroListener { + + void updateGyroAngles(float[] gyroAngles); + +} diff --git a/src/com/android/devcamera/GyroOperations.java b/src/com/android/devcamera/GyroOperations.java new file mode 100644 index 0000000..a89eafb --- /dev/null +++ b/src/com/android/devcamera/GyroOperations.java @@ -0,0 +1,105 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.hardware.Sensor; +import android.hardware.SensorEvent; +import android.hardware.SensorEventListener; +import android.hardware.SensorManager; +import android.util.Log; + +import java.util.ArrayDeque; + +/** + * Put all the Gyro stuff here. + */ +public class GyroOperations { + private static final String TAG = "DevCamera_GYRO"; + + private SensorManager mSensorManager; + private GyroListener mListener; + + private SensorEventListener mSensorEventListener = new SensorEventListener() { + @Override + public void onSensorChanged(SensorEvent event) { + delayGyroData(event); + } + @Override + public void onAccuracyChanged(Sensor sensor, int accuracy) { + } + }; + + public GyroOperations(SensorManager sensorManager) { + mSensorManager = sensorManager; + } + + public void startListening(GyroListener listener) { + mSensorManager.registerListener(mSensorEventListener, mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE), SensorManager.SENSOR_DELAY_FASTEST); + mListener = listener; + } + + public void stopListening() { + mSensorManager.unregisterListener(mSensorEventListener); + } + + // We need to make a copy of SensorEvent so we can put it in our delay-line. + class GyroEvent2D { + public long timestamp; + public final float[] values = new float[2]; + + public GyroEvent2D(SensorEvent event) { + this.timestamp = event.timestamp; + this.values[0] = event.values[0]; + this.values[1] = event.values[1]; + } + } + + private long mGyroLastTimestamp = 0; + private float[] mGyroAngle = new float[]{0f, 0f}; // radians, X and Y axes. + // Gyro arrives at 230 Hz on N6: 23 samples in 100 ms. Viewfinder latency is 70 ms. Delay about 15 samples. + private ArrayDeque mSensorDelayLine = new ArrayDeque<>(); + private static final int DELAY_SIZE = 10; + + void delayGyroData(SensorEvent event) { + mSensorDelayLine.addLast(new GyroEvent2D(event)); + if (mSensorDelayLine.size() < DELAY_SIZE) { + return; + } + GyroEvent2D delayedEvent = mSensorDelayLine.removeFirst(); + integrateGyroForPosition(delayedEvent); + } + + void integrateGyroForPosition(GyroEvent2D event) { + if (mGyroLastTimestamp == 0) { + mGyroLastTimestamp = event.timestamp; + return; + } + long dt = (event.timestamp - mGyroLastTimestamp) / 1000; // microseconds between samples + if (dt > 10000) { // below 100 Hz + Log.v(TAG, " ===============> GYRO STALL <=============="); + } + mGyroAngle[0] += event.values[0] * 0.000001f * dt; + mGyroAngle[1] += event.values[1] * 0.000001f * dt; + mGyroLastTimestamp = event.timestamp; + + // TODO: Add UI + //updateOrientationUI(mGyroAngle, dt); + //Log.v(TAG, String.format("Gyro: theta_x = %.2f theta_y = %.2f dt = %d", mGyroAngle[0]*180f/3.14f, mGyroAngle[1]*180f/3.14f, dt)); + + mListener.updateGyroAngles(mGyroAngle); + } + +} diff --git a/src/com/android/devcamera/LoggingCallbacks.java b/src/com/android/devcamera/LoggingCallbacks.java new file mode 100644 index 0000000..0644699 --- /dev/null +++ b/src/com/android/devcamera/LoggingCallbacks.java @@ -0,0 +1,138 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.hardware.camera2.CameraCaptureSession; +import android.hardware.camera2.CameraDevice; +import android.hardware.camera2.CaptureFailure; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.CaptureResult; +import android.hardware.camera2.TotalCaptureResult; +import android.util.Log; + +/** + * Static utility class that logs various camera2 callbacks. + * + * The only reason this exists as a separate class is void cluttering up Api2Camera. + */ + +public class LoggingCallbacks { + private static final String TAG = "DevCamera_LOG2"; + private static final Boolean LOG_EVERY_FRAME = false; + private static final Boolean LOG_NON_ERRORS = false; + + public static class DeviceStateCallback extends CameraDevice.StateCallback { + @Override + public void onOpened(CameraDevice camera) { + if (LOG_NON_ERRORS) { + Log.v(TAG, "Camera opened."); + } + } + + @Override + public void onClosed(CameraDevice camera) { + if (LOG_NON_ERRORS) { + Log.v(TAG, "Camera closed."); + } + } + + @Override + public void onDisconnected(CameraDevice camera) { + Log.v(TAG, "Camera disconnected."); + } + + @Override + public void onError(CameraDevice camera, int error) { + Log.v(TAG, "Camera error: " + error); + } + } + + public static class SessionStateCallback extends CameraCaptureSession.StateCallback { + @Override + public void onConfigured(CameraCaptureSession session) { + if (LOG_NON_ERRORS) { + Log.v(TAG, "Capture session callback onConfigured("+session+")"); + } + } + + @Override + public void onConfigureFailed(CameraCaptureSession session) { + Log.v(TAG, "Capture session callback onConfigureFailed("+session+")"); + super.onReady(session); + } + + @Override + public void onReady(CameraCaptureSession session) { + if (LOG_NON_ERRORS) { + Log.v(TAG, "Capture session callback onReady("+session+")"); + } + super.onReady(session); + } + + @Override + public void onActive(CameraCaptureSession session) { + if (LOG_NON_ERRORS) { + Log.v(TAG, "Capture session callback onActive("+session+")"); + } + super.onActive(session); + } + + @Override + public void onClosed(CameraCaptureSession session) { + if (LOG_NON_ERRORS) { + Log.v(TAG, "Capture session callback onClosed("+session+")"); + } + super.onClosed(session); + } + } + + public static class SessionCaptureCallback extends CameraCaptureSession.CaptureCallback { + @Override + public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) { + if (LOG_EVERY_FRAME) { + Log.v(TAG, "Capture started."); + } + super.onCaptureStarted(session, request, timestamp, frameNumber); + } + + @Override + public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) { + if (LOG_EVERY_FRAME) { + Log.v(TAG, "Capture progressed."); + } + super.onCaptureProgressed(session, request, partialResult); + } + + @Override + public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) { + if (LOG_EVERY_FRAME) { + Log.v(TAG, "Capture completed."); + } + super.onCaptureCompleted(session, request, result); + } + + @Override + public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) { + super.onCaptureFailed(session, request, failure); + } + + @Override + public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, long frameNumber) { + super.onCaptureSequenceCompleted(session, sequenceId, frameNumber); + } + + } +} diff --git a/src/com/android/devcamera/MediaSaver.java b/src/com/android/devcamera/MediaSaver.java new file mode 100644 index 0000000..4929e33 --- /dev/null +++ b/src/com/android/devcamera/MediaSaver.java @@ -0,0 +1,114 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.content.ContentResolver; +import android.content.ContentValues; +import android.content.Context; +import android.content.SharedPreferences; +import android.os.SystemClock; +import android.provider.MediaStore; +import android.util.Log; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; + +/** + * This class has methods required to save a JPEG to disk as well as update the + * MediaStore database. + */ + + +public class MediaSaver { + private static final String TAG = "Snappy_MediaSaver"; + private static final String MY_PREFS_NAME = "SnappyPrefs"; + + // MediaStore is slow/broken + private static final boolean UDPATE_MEDIA_STORE = true; + + + public static int getNextInt(Context context) { + SharedPreferences prefs = context.getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE); + int i = prefs.getInt("counter", 1); + SharedPreferences.Editor editor = prefs.edit(); + editor.putInt("counter", i+1); + editor.commit(); + return i; + } + + /** + * @param context Application context. + * @param jpegData JPEG byte stream. + */ + public static String saveJpeg(Context context, byte[] jpegData, ContentResolver resolver) { + String filename = ""; + try { + File file; + while (true) { + int i = getNextInt(context); + filename = String.format("/sdcard/DCIM/Camera/SNAP_%05d.JPG", i); + file = new File(filename); + if (file.createNewFile()) { + break; + } + } + + long t0 = SystemClock.uptimeMillis(); + OutputStream os = new FileOutputStream(file); + os.write(jpegData); + os.flush(); + os.close(); + long t1 = SystemClock.uptimeMillis(); + + // update MediaStore so photos apps can find photos right away. + if (UDPATE_MEDIA_STORE) { + // really slow for some reason: MediaStore.Images.Media.insertImage(resolver, file.getAbsolutePath(), file.getName(), file.getName()); + insertImage(resolver, file); + } + long t2 = SystemClock.uptimeMillis(); + + Log.v(TAG, String.format("Wrote JPEG %d bytes as %s in %.3f seconds; mediastore update = %.3f secs", + jpegData.length, file, (t1 - t0) * 0.001, (t2 - t1) * 0.001) ); + } catch (IOException e) { + Log.e(TAG, "Error creating new file: ", e); + } + return filename; + } + + + // We use this instead of MediaStore.Images.Media.insertImage() because we want to add date metadata + public static void insertImage(ContentResolver cr, File file) { + + ContentValues values = new ContentValues(); + values.put(MediaStore.Images.Media.TITLE, file.getName()); + values.put(MediaStore.Images.Media.DISPLAY_NAME, file.getName()); + values.put(MediaStore.Images.Media.DESCRIPTION, file.getName()); + values.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg"); + values.put(MediaStore.Images.Media.DATA, file.getAbsolutePath()); + // Add the date meta data to ensure the image is added at the front of the gallery + values.put(MediaStore.Images.Media.DATE_ADDED, System.currentTimeMillis()); + values.put(MediaStore.Images.Media.DATE_TAKEN, System.currentTimeMillis()); + + try { + cr.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values); + } catch (Exception e) { + Log.w(TAG, "Error updating media store for " + file, e); + } + } + +} diff --git a/src/com/android/devcamera/NormalizedFace.java b/src/com/android/devcamera/NormalizedFace.java new file mode 100644 index 0000000..546fe12 --- /dev/null +++ b/src/com/android/devcamera/NormalizedFace.java @@ -0,0 +1,91 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.graphics.PointF; +import android.graphics.RectF; +import android.hardware.camera2.params.Face; + +/** + * + * Face coordinates. Normalized 0 to 1, and in native sensor orientation, which so far seems to be + * landscape. + * + */ +public class NormalizedFace { + public RectF bounds; + public PointF leftEye; + public PointF rightEye; + public PointF mouth; + + public NormalizedFace(Face face, int dX, int dY, int offX, int offY) { + if (face.getLeftEyePosition() != null) { + leftEye = new PointF(); + leftEye.x = (float) (face.getLeftEyePosition().x - offX) / dX; + leftEye.y = (float) (face.getLeftEyePosition().y - offY) / dY; + } + if (face.getRightEyePosition() != null) { + rightEye = new PointF(); + rightEye.x = (float) (face.getRightEyePosition().x - offX) / dX; + rightEye.y = (float) (face.getRightEyePosition().y - offY) / dY; + } + if (face.getMouthPosition() != null) { + mouth = new PointF(); + mouth.x = (float) (face.getMouthPosition().x - offX) / dX; + mouth.y = (float) (face.getMouthPosition().y - offY) / dY; + } + if (face.getBounds() != null) { + bounds = new RectF(); + bounds.left = (float) (face.getBounds().left - offX) / dX; + bounds.top = (float) (face.getBounds().top - offY) / dY; + bounds.right = (float) (face.getBounds().right - offX) / dX; + bounds.bottom = (float) (face.getBounds().bottom - offY) / dY; + } + } + + public void mirrorInX() { + if (leftEye != null) { + leftEye.x = 1f - leftEye.x; + } + if (rightEye != null) { + rightEye.x = 1f - rightEye.x; + } + if (mouth != null) { + mouth.x = 1f - mouth.x; + } + float oldLeft = bounds.left; + bounds.left = 1f - bounds.right; + bounds.right = 1f - oldLeft; + } + + /** + * Typically required for front camera + */ + public void mirrorInY() { + if (leftEye != null) { + leftEye.y = 1f - leftEye.y; + } + if (rightEye != null) { + rightEye.y = 1f - rightEye.y; + } + if (mouth != null) { + mouth.y = 1f - mouth.y; + } + float oldTop = bounds.top; + bounds.top = 1f - bounds.bottom; + bounds.bottom = 1f - oldTop; + } +} diff --git a/src/com/android/devcamera/PreviewOverlay.java b/src/com/android/devcamera/PreviewOverlay.java new file mode 100644 index 0000000..dff60e9 --- /dev/null +++ b/src/com/android/devcamera/PreviewOverlay.java @@ -0,0 +1,225 @@ +/* + * Copyright (C) 2016 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package com.android.devcamera; + +import android.content.Context; +import android.content.res.Resources; +import android.graphics.Canvas; +import android.graphics.Paint; +import android.graphics.PointF; +import android.graphics.RectF; +import android.util.AttributeSet; +import android.view.View; + +public class PreviewOverlay extends View { + private static final String TAG = "DevCamera_FACE"; + + private boolean mShow3AInfo; + private boolean mShowGyroGrid; + private int mColor; + private int mColor2; + private Paint mPaint; + private Paint mPaint2; + + // Rendered data: + private NormalizedFace[] mFaces; + private float mExposure; + private float mLens; + private int mAfState; + private float mFovLargeDegrees; + private float mFovSmallDegrees; + float[] mAngles = new float[2]; + + public PreviewOverlay(Context context, AttributeSet attrs) { + super(context, attrs); + Resources res = getResources(); + mColor = res.getColor(R.color.face_color); + mPaint = new Paint(); + mPaint.setColor(mColor); + mPaint.setAntiAlias(true); + mPaint.setStyle(Paint.Style.STROKE); + mPaint.setStrokeWidth(res.getDimension(R.dimen.face_circle_stroke)); + + mColor2 = res.getColor(R.color.hud_color); + mPaint2 = new Paint(); + mPaint2.setAntiAlias(true); + mPaint2.setStyle(Paint.Style.STROKE); + mPaint2.setStrokeWidth(res.getDimension(R.dimen.hud_stroke)); + } + + public void setFrameData(NormalizedFace[] faces, float normExposure, float normLens, int afState) { + mFaces = faces; + mExposure = normExposure; + mLens = normLens; + mAfState = afState; + this.setVisibility(VISIBLE); + invalidate(); + } + + public void show3AInfo(boolean show) { + mShow3AInfo = show; + this.setVisibility(VISIBLE); + invalidate(); + } + + public void setGyroAngles(float[] angles) { + mAngles = angles; + } + + public void setFieldOfView(float fovLargeDegrees, float fovSmallDegrees) { + mFovLargeDegrees = fovLargeDegrees; + mFovSmallDegrees = fovSmallDegrees; + } + + public void showGyroGrid(boolean show) { + mShowGyroGrid = show; + this.setVisibility(VISIBLE); + invalidate(); + } + + private static double SHORT_LOG_EXPOSURE = Math.log10(1000000000 / 10000); // 1/10000 second + private static double LONG_LOG_EXPOSURE = Math.log10(1000000000 / 10); // 1/10 second + float[] yGridValues = new float[] { + (float) ((Math.log10(1000000000 / 30) - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE)), + (float) ((Math.log10(1000000000 / 100) - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE)), + (float) ((Math.log10(1000000000 / 1000) - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE))}; + + /** Focus states + CONTROL_AF_STATE_INACTIVE 0 + CONTROL_AF_STATE_PASSIVE_SCAN 1 + CONTROL_AF_STATE_PASSIVE_FOCUSED 2 + CONTROL_AF_STATE_ACTIVE_SCAN 3 + CONTROL_AF_STATE_FOCUSED_LOCKED 4 + CONTROL_AF_STATE_NOT_FOCUSED_LOCKED 5 + CONTROL_AF_STATE_PASSIVE_UNFOCUSED 6 + */ + + @Override + protected void onDraw(Canvas canvas) { + if (mFaces == null) { + return; + } + float previewW = this.getWidth(); + float previewH = this.getHeight(); + + // 3A visualizatoins + if (mShow3AInfo) { + + // Draw 3A ball on a rail + if (false) { + mPaint2.setStyle(Paint.Style.FILL_AND_STROKE); + mPaint2.setColor(0x33FFFFFF); + canvas.drawRect(0.04f * previewW, 0.03f * previewH, 0.96f * previewW, 0.05f * previewH, mPaint2); + + mPaint2.setStyle(Paint.Style.FILL_AND_STROKE); + float x1 = (0.92f * mLens + 0.04f) * previewW; + float y1 = (0.04f) * previewH; + mPaint2.setColor(0xFF000000); + canvas.drawCircle(x1, y1, 20, mPaint2); + mPaint2.setColor(0xFFDDDDDD); + canvas.drawCircle(x1, y1, 18, mPaint2); + } + + // Draw AF center thing + mPaint2.setStyle(Paint.Style.FILL_AND_STROKE); + float x2 = 0.5f * previewW; + float y2 = 0.5f * previewH; + mPaint2.setColor(0x990000FF); + String text = "NOT IN CAF"; + if (mAfState == 1) { // passive scan RED + mPaint2.setColor(0x99FF0000); + text = "CAF SCAN"; + } + if (mAfState == 2) { // passive good + mPaint2.setColor(0x9999FF99); + text = "CAF FOCUSED"; + } + if (mAfState == 6) { // passive bad + mPaint2.setColor(0x99FFFFFF); + text = "CAF UNFOCUSED"; + } + canvas.drawCircle(x2, y2, mLens * 0.25f * previewW, mPaint2); + mPaint.setColor(0xFFFFFFFF); + mPaint.setTextSize(36f); + canvas.drawText(text, x2, y2 - mLens * 0.25f * previewW - 7f, mPaint); + } + + // Draw Faces + for (NormalizedFace face : mFaces) { + RectF r1 = face.bounds; + float newY = r1.centerX() * previewH; + float newX = (1 - r1.centerY()) * previewW; + float dY = r1.width() * previewH; + float dX = r1.height() * previewW; + float dP = (dX + dY) * 0.045f; + RectF newR1 = new RectF(newX - dX * 0.5f, newY - dY * 0.5f, newX + dX * 0.5f, newY + dY * 0.5f); + canvas.drawRoundRect(newR1, dP, dP, mPaint); + + PointF[] p = new PointF[3]; + p[0] = face.leftEye; + p[1] = face.rightEye; + p[2] = face.mouth; + + for (int j = 0; j < 3; j++) { + if (p[j] == null) { + continue; + } + newY = p[j].x * previewH; + newX = (1 - p[j].y) * previewW; + canvas.drawCircle(newX, newY, dP, mPaint); + } + } + + // Draw Gyro grid. + if (mShowGyroGrid) { + float x1, x2, y1, y2; + + // + // screen/sensor + // | + // screen/2 = FL tan(FOV/2) | + // | lens + // |<––––––––––––– FL –––––––––––>()–––––––––> scene @ infinity + // | + // | + // | + // + + float focalLengthH = 0.5f * previewH / (float) Math.tan(Math.toRadians(mFovLargeDegrees) * 0.5); + float focalLengthW = 0.5f * previewW / (float) Math.tan(Math.toRadians(mFovSmallDegrees) * 0.5); + final double ANGLE_STEP = (float) Math.toRadians(10f); + + // Draw horizontal lines, with 10 degree spacing. + double phase1 = mAngles[0] % ANGLE_STEP; + for (double i = -5 * ANGLE_STEP + phase1; i < 5 * ANGLE_STEP; i += ANGLE_STEP) { + x1 = 0; + x2 = previewW; + y1 = y2 = previewH / 2 + focalLengthH * (float) Math.tan(i); + canvas.drawLine(x1, y1, x2, y2, mPaint); + } + // Draw vertical lines, with 10 degree spacing. + double phase2 = mAngles[1] % ANGLE_STEP; + for (double i = -5 * ANGLE_STEP + phase2; i < 5 * ANGLE_STEP; i += ANGLE_STEP) { + x1 = x2 = previewW / 2 + focalLengthW * (float) Math.tan(i); + y1 = 0; + y2 = previewH; + canvas.drawLine(x1, y1, x2, y2, mPaint); + } + } + + super.onDraw(canvas); + } +} -- cgit v1.2.3