summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorEino-Ville Talvala <etalvala@google.com>2016-04-19 22:52:09 +0000
committerandroid-build-merger <android-build-merger@google.com>2016-04-19 22:52:09 +0000
commit2c24aa7b9ee7f5455f0c84325d1831c5206c4fe1 (patch)
tree08af4a9ab3773f88ff8cd73d7c29314392f8ed85
parent3c34ea04f6ed48559c1d94a4cac1d5e2e5b6517f (diff)
parent25faa63fc78fe3745f985db8c73f4f7c07e6ed23 (diff)
downloadDevCamera-2c24aa7b9ee7f5455f0c84325d1831c5206c4fe1.tar.gz
Initial check-in of Snappy code am: a8a96df am: 070396f
am: 25faa63 * commit '25faa63fc78fe3745f985db8c73f4f7c07e6ed23': Initial check-in of Snappy code Change-Id: I65b182af740381eab2cfc33c266e35e00eda5806
-rw-r--r--Android.mk40
-rw-r--r--AndroidManifest.xml39
-rw-r--r--res/drawable-hdpi/ic_launcher.pngbin0 -> 5115 bytes
-rw-r--r--res/drawable-mdpi/ic_launcher.pngbin0 -> 5115 bytes
-rw-r--r--res/drawable-nodpi/ic_capture_camera_normal.pngbin0 -> 3547 bytes
-rw-r--r--res/drawable-nodpi/photos.pngbin0 -> 4679 bytes
-rw-r--r--res/drawable-nodpi/record_button.pngbin0 -> 1558 bytes
-rw-r--r--res/drawable-xhdpi/ic_launcher.pngbin0 -> 5115 bytes
-rw-r--r--res/drawable/circle_background.xml18
-rw-r--r--res/drawable/circle_button.xml20
-rw-r--r--res/drawable/circle_button_blue.xml20
-rw-r--r--res/drawable/focus_square_button.xml21
-rw-r--r--res/drawable/shot_circle.xml10
-rw-r--r--res/layout/activity_main.xml232
-rw-r--r--res/values-sw600dp/dimens.xml8
-rw-r--r--res/values-sw720dp-land/dimens.xml9
-rw-r--r--res/values-v11/styles.xml11
-rw-r--r--res/values-v14/styles.xml12
-rw-r--r--res/values/colors.xml11
-rw-r--r--res/values/dimens.xml10
-rw-r--r--res/values/strings.xml16
-rw-r--r--res/values/styles.xml20
-rw-r--r--src/com/google/snappy/BitmapUtility.java54
-rw-r--r--src/com/google/snappy/CameraInfoCache.java214
-rw-r--r--src/com/google/snappy/GyroListener.java10
-rw-r--r--src/com/google/snappy/GyroOperations.java92
-rw-r--r--src/com/google/snappy/MainActivity.java545
-rw-r--r--src/com/google/snappy/MediaSaver.java99
-rw-r--r--src/com/google/snappy/MyApi2Camera.java793
-rw-r--r--src/com/google/snappy/MyCameraInterface.java128
-rw-r--r--src/com/google/snappy/MyDeviceReport.java385
-rw-r--r--src/com/google/snappy/MyLoggingCallbacks.java123
-rw-r--r--src/com/google/snappy/MyTimer.java18
-rw-r--r--src/com/google/snappy/NormalizedFace.java77
-rw-r--r--src/com/google/snappy/PreviewOverlay.java216
35 files changed, 3251 insertions, 0 deletions
diff --git a/Android.mk b/Android.mk
new file mode 100644
index 0000000..256ce47
--- /dev/null
+++ b/Android.mk
@@ -0,0 +1,40 @@
+# Copyright (C) 2013 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifeq ($(TARGET_BUILD_JAVA_SUPPORT_LEVEL),platform)
+
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SDK_VERSION := current
+# LOCAL_SDK_VERSION := 19
+
+#LOCAL_STATIC_JAVA_LIBRARIES := android-ex-camera2
+
+LOCAL_SRC_FILES := \
+ $(call all-java-files-under, src) \
+ $(call all-renderscript-files-under, src)
+
+LOCAL_PACKAGE_NAME := Snappy
+
+LOCAL_AAPT_FLAGS += --rename-manifest-package com.google.Snappy2
+
+LOCAL_JACK_ENABLED := disabled
+
+include $(BUILD_PACKAGE)
+
+endif
diff --git a/AndroidManifest.xml b/AndroidManifest.xml
new file mode 100644
index 0000000..d9904ed
--- /dev/null
+++ b/AndroidManifest.xml
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="com.google.snappy"
+ android:versionCode="1"
+ android:versionName="1.0" >
+ <uses-permission android:name="android.permission.CAMERA" />
+ <uses-feature android:name="android.hardware.camera" />
+ <uses-feature android:name="android.hardware.camera.autofocus" android:required="false" />
+ <uses-feature android:name="android.hardware.camera.front" android:required="false"/>
+
+ <uses-sdk
+ android:minSdkVersion="21" android:targetSdkVersion="21"
+ />
+
+
+ <uses-permission android:name="android.permission.RECORD_AUDIO"/>
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
+ <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
+
+ <application
+ android:allowBackup="true"
+ android:icon="@drawable/ic_launcher"
+ android:theme="@android:style/Theme.Holo.NoActionBar.Fullscreen"
+ android:label="@string/app_name" >
+ <activity
+ android:screenOrientation="portrait"
+ android:name="com.google.snappy.MainActivity">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+ </application>
+
+</manifest>
+
+<!---android:targetSdkVersion="20"-->
+
diff --git a/res/drawable-hdpi/ic_launcher.png b/res/drawable-hdpi/ic_launcher.png
new file mode 100644
index 0000000..16dfd55
--- /dev/null
+++ b/res/drawable-hdpi/ic_launcher.png
Binary files differ
diff --git a/res/drawable-mdpi/ic_launcher.png b/res/drawable-mdpi/ic_launcher.png
new file mode 100644
index 0000000..16dfd55
--- /dev/null
+++ b/res/drawable-mdpi/ic_launcher.png
Binary files differ
diff --git a/res/drawable-nodpi/ic_capture_camera_normal.png b/res/drawable-nodpi/ic_capture_camera_normal.png
new file mode 100644
index 0000000..0360396
--- /dev/null
+++ b/res/drawable-nodpi/ic_capture_camera_normal.png
Binary files differ
diff --git a/res/drawable-nodpi/photos.png b/res/drawable-nodpi/photos.png
new file mode 100644
index 0000000..26410f7
--- /dev/null
+++ b/res/drawable-nodpi/photos.png
Binary files differ
diff --git a/res/drawable-nodpi/record_button.png b/res/drawable-nodpi/record_button.png
new file mode 100644
index 0000000..7817eb4
--- /dev/null
+++ b/res/drawable-nodpi/record_button.png
Binary files differ
diff --git a/res/drawable-xhdpi/ic_launcher.png b/res/drawable-xhdpi/ic_launcher.png
new file mode 100644
index 0000000..16dfd55
--- /dev/null
+++ b/res/drawable-xhdpi/ic_launcher.png
Binary files differ
diff --git a/res/drawable/circle_background.xml b/res/drawable/circle_background.xml
new file mode 100644
index 0000000..07f7699
--- /dev/null
+++ b/res/drawable/circle_background.xml
@@ -0,0 +1,18 @@
+<selector xmlns:android="http://schemas.android.com/apk/res/android">
+ <item android:state_pressed="true">
+ <shape android:shape="oval">
+ <size android:width="48dp" android:height="48dp"/>
+ <stroke android:width="0dp" android:color="#FFBBBBBB" />
+ <solid android:color="#D05555FF" />
+ </shape>
+ </item>
+ <item>
+ <shape android:shape="oval">
+ <size android:width="48dp" android:height="48dp"/>
+ <stroke android:width="0dp" android:color="#FFBBBBBB" />
+ <solid android:color="#665555FF" /> <!-- spec is 26000000 -->
+ </shape>
+ </item>
+
+
+</selector> \ No newline at end of file
diff --git a/res/drawable/circle_button.xml b/res/drawable/circle_button.xml
new file mode 100644
index 0000000..d25a157
--- /dev/null
+++ b/res/drawable/circle_button.xml
@@ -0,0 +1,20 @@
+<selector xmlns:android="http://schemas.android.com/apk/res/android">
+ <item android:state_pressed="true">
+
+ <shape android:shape="oval">
+ <size android:width="80dp" android:height="80dp"/>
+ <stroke android:width="0.5dp" android:color="#FFBBBBBB" />
+ <solid android:color="#BB888888" />
+ </shape>
+
+ </item>
+
+ <item>
+ <shape android:shape="oval">
+ <size android:width="80dp" android:height="80dp"/>
+ <stroke android:width="0.5dp" android:color="#FFBBBBBB" />
+ <solid android:color="#BB444444" />
+ </shape>
+ </item>
+
+</selector> \ No newline at end of file
diff --git a/res/drawable/circle_button_blue.xml b/res/drawable/circle_button_blue.xml
new file mode 100644
index 0000000..f1f6e3f
--- /dev/null
+++ b/res/drawable/circle_button_blue.xml
@@ -0,0 +1,20 @@
+<selector xmlns:android="http://schemas.android.com/apk/res/android">
+ <item android:state_pressed="true">
+
+ <shape android:shape="oval">
+ <size android:width="80dp" android:height="80dp"/>
+ <stroke android:width="0.5dp" android:color="#FFBBBBBB" />
+ <solid android:color="#BB666688" />
+ </shape>
+
+ </item>
+
+ <item>
+ <shape android:shape="oval">
+ <size android:width="80dp" android:height="80dp"/>
+ <stroke android:width="0.5dp" android:color="#FFBBBBBB" />
+ <solid android:color="#BB222244" />
+ </shape>
+ </item>
+
+</selector> \ No newline at end of file
diff --git a/res/drawable/focus_square_button.xml b/res/drawable/focus_square_button.xml
new file mode 100644
index 0000000..bed0c57
--- /dev/null
+++ b/res/drawable/focus_square_button.xml
@@ -0,0 +1,21 @@
+<selector xmlns:android="http://schemas.android.com/apk/res/android">
+
+ <!-- pressed state -->
+ <item android:state_pressed="true">
+ <shape android:shape="rectangle">
+ <size android:width="80dp" android:height="80dp"/>
+ <stroke android:width="2dp" android:color="#FFFFFF00"/>
+ <solid android:color="#00FFFFFF"/>
+ </shape>
+
+ </item>
+
+ <item>
+ <shape android:shape="rectangle">
+ <size android:width="80dp" android:height="80dp"/>
+ <stroke android:width="0.5dp" android:color="#FFFFFF00"/>
+ <solid android:color="#00FFFFFF"/>
+ </shape>
+ </item>
+
+</selector> \ No newline at end of file
diff --git a/res/drawable/shot_circle.xml b/res/drawable/shot_circle.xml
new file mode 100644
index 0000000..9fe49e4
--- /dev/null
+++ b/res/drawable/shot_circle.xml
@@ -0,0 +1,10 @@
+<selector xmlns:android="http://schemas.android.com/apk/res/android">
+ <item>
+ <shape android:shape="oval">
+ <size android:width="70dp" android:height="70dp"/>
+ <stroke android:width="0.5dp" android:color="#CCFFFFFF" />
+ <solid android:color="#66FFFFFF" />
+ </shape>
+ </item>
+
+</selector> \ No newline at end of file
diff --git a/res/layout/activity_main.xml b/res/layout/activity_main.xml
new file mode 100644
index 0000000..c54786f
--- /dev/null
+++ b/res/layout/activity_main.xml
@@ -0,0 +1,232 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:background="@color/capture_button_color"
+ android:orientation="vertical"
+ android:id="@+id/root_view">
+
+ <FrameLayout
+ android:layout_width="360dp"
+ android:layout_height="480dp"
+ android:layout_gravity="center_horizontal"
+ android:id="@+id/preview_frame">
+
+ <SurfaceView
+ android:id="@+id/preview_view"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:background="#FF000000" />
+
+ <com.google.snappy.PreviewOverlay
+ android:id="@+id/preview_overlay_view"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent" />
+
+
+ <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/reprocessing_controls"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="bottom"
+ android:orientation="vertical">
+
+ <TextView
+ android:id="@+id/label_reprocessing"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="center"
+ android:text="@string/Reprocessing"
+ android:background="#44000000"
+ android:textColor="#FFFFFF"/>
+
+ <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/root_view4"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="bottom"
+ android:orientation="horizontal">
+
+ <Button
+ android:id="@+id/jpeg_capture"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/jpeg_capture"
+ android:textSize="@dimen/control_text" />
+
+ <ToggleButton
+ android:id="@+id/toggle_burst_jpeg"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="Burst JPEG"
+ android:textOn="Burst JPEG"
+ android:textSize="@dimen/control_text" />
+
+ <ToggleButton
+ android:id="@+id/toggle_save_sdcard"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="Save"
+ android:textOn="Save"
+ android:textSize="@dimen/control_text" />
+
+ <Button
+ android:id="@+id/button_noise_reprocess"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/nr_unknown"
+ android:textSize="@dimen/control_text" />
+
+ <Button
+ android:id="@+id/button_edge_reprocess"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/edge_unknown"
+ android:textSize="@dimen/control_text" />
+ </LinearLayout>
+ </LinearLayout>
+ </FrameLayout>
+
+
+ <TextView
+ android:id="@+id/label1"
+ android:layout_width="wrap_content"
+ android:layout_height="18dp"
+ android:textSize="14dp"
+ android:textColor="@color/exp_iso_color"
+ android:layout_gravity="left"
+ android:text="@string/minus"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:fontFamily="monospace"
+ />
+
+ <TextView
+ android:id="@+id/label2"
+ android:layout_width="wrap_content"
+ android:layout_height="18dp"
+ android:textSize="14dp"
+ android:textColor="@color/exp_iso_color"
+ android:layout_gravity="left"
+ android:text="@string/minus"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:fontFamily="monospace"
+ />
+
+ <HorizontalScrollView
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content">
+
+ <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/root_view2"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="left"
+ android:orientation="horizontal">
+
+ <ToggleButton
+ android:id="@+id/toggle_yuv_full"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="YUV Full"
+ android:textOn="YUV Full"
+ android:textSize="@dimen/control_text" />
+
+ <ToggleButton
+ android:id="@+id/toggle_yuv_vga"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="320x240"
+ android:textOn="320x240"
+ android:textSize="@dimen/control_text" />
+
+ <ToggleButton
+ android:id="@+id/toggle_raw"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="Raw"
+ android:textOn="Raw"
+ android:textSize="@dimen/control_text" />
+
+ <Button
+ android:id="@+id/button_noise"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/nr_unknown"
+ android:textSize="@dimen/control_text" />
+
+ <Button
+ android:id="@+id/button_edge"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/edge_unknown"
+ android:textSize="@dimen/control_text" />
+
+ <ToggleButton
+ android:id="@+id/toggle_face"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="Face"
+ android:textOn="Face"
+ android:textSize="@dimen/control_text" />
+
+ </LinearLayout>
+
+ </HorizontalScrollView>
+
+ <HorizontalScrollView
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content">
+
+ <LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/root_view3"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:layout_gravity="left"
+ android:orientation="horizontal">
+
+ <ToggleButton
+ android:id="@+id/toggle_front_cam"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="Front"
+ android:textOn="Front"
+ android:textSize="@dimen/control_text" />
+
+ <ToggleButton
+ android:id="@+id/toggle_show_3A"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="3A viz"
+ android:textOn="3A viz"
+ android:textSize="@dimen/control_text" />
+
+ <ToggleButton
+ android:id="@+id/toggle_show_gyro"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:textOff="Gyro"
+ android:textOn="Gyro"
+ android:textSize="@dimen/control_text" />
+
+ <Button
+ android:id="@+id/af_trigger"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/aftrig"
+ android:textSize="@dimen/control_text" />
+
+ <Button
+ android:id="@+id/gallery"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/gallery"
+ android:textSize="@dimen/control_text" />
+ </LinearLayout>
+
+
+
+ </HorizontalScrollView>
+
+</LinearLayout> \ No newline at end of file
diff --git a/res/values-sw600dp/dimens.xml b/res/values-sw600dp/dimens.xml
new file mode 100644
index 0000000..44f01db
--- /dev/null
+++ b/res/values-sw600dp/dimens.xml
@@ -0,0 +1,8 @@
+<resources>
+
+ <!--
+ Customize dimensions originally defined in res/values/dimens.xml (such as
+ screen margins) for sw600dp devices (e.g. 7" tablets) here.
+ -->
+
+</resources>
diff --git a/res/values-sw720dp-land/dimens.xml b/res/values-sw720dp-land/dimens.xml
new file mode 100644
index 0000000..61e3fa8
--- /dev/null
+++ b/res/values-sw720dp-land/dimens.xml
@@ -0,0 +1,9 @@
+<resources>
+
+ <!--
+ Customize dimensions originally defined in res/values/dimens.xml (such as
+ screen margins) for sw720dp devices (e.g. 10" tablets) in landscape here.
+ -->
+ <dimen name="activity_horizontal_margin">128dp</dimen>
+
+</resources>
diff --git a/res/values-v11/styles.xml b/res/values-v11/styles.xml
new file mode 100644
index 0000000..3c02242
--- /dev/null
+++ b/res/values-v11/styles.xml
@@ -0,0 +1,11 @@
+<resources>
+
+ <!--
+ Base application theme for API 11+. This theme completely replaces
+ AppBaseTheme from res/values/styles.xml on API 11+ devices.
+ -->
+ <style name="AppBaseTheme" parent="android:Theme.Holo.Light">
+ <!-- API 11 theme customizations can go here. -->
+ </style>
+
+</resources>
diff --git a/res/values-v14/styles.xml b/res/values-v14/styles.xml
new file mode 100644
index 0000000..a91fd03
--- /dev/null
+++ b/res/values-v14/styles.xml
@@ -0,0 +1,12 @@
+<resources>
+
+ <!--
+ Base application theme for API 14+. This theme completely replaces
+ AppBaseTheme from BOTH res/values/styles.xml and
+ res/values-v11/styles.xml on API 14+ devices.
+ -->
+ <style name="AppBaseTheme" parent="android:Theme.Holo.Light.DarkActionBar">
+ <!-- API 14 theme customizations can go here. -->
+ </style>
+
+</resources>
diff --git a/res/values/colors.xml b/res/values/colors.xml
new file mode 100644
index 0000000..47fef74
--- /dev/null
+++ b/res/values/colors.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+ <color name="horiz_rule_color">#777777</color>
+ <color name="black">#000000</color>
+ <color name="dark_blue">#000066</color>
+ <color name="face_color">#FFFF00</color>
+ <color name="capture_button_color">#191919</color>
+ <color name="exp_iso_color">#FFFFFF</color>
+ <color name="hud_color">#DDDDDD</color>
+ <color name="message_color">#444444</color>
+</resources> \ No newline at end of file
diff --git a/res/values/dimens.xml b/res/values/dimens.xml
new file mode 100644
index 0000000..ec52df0
--- /dev/null
+++ b/res/values/dimens.xml
@@ -0,0 +1,10 @@
+<resources>
+
+ <!-- Default screen margins, per the Android Design guidelines. -->
+ <dimen name="activity_horizontal_margin">16dp</dimen>
+ <dimen name="activity_vertical_margin">16dp</dimen>
+ <dimen name="face_circle_stroke">0.65dp</dimen>
+ <dimen name="hud_stroke">0.33dp</dimen>
+ <dimen name="control_text">12dp</dimen>
+
+</resources>
diff --git a/res/values/strings.xml b/res/values/strings.xml
new file mode 100644
index 0000000..cf216cf
--- /dev/null
+++ b/res/values/strings.xml
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="utf-8"?>
+<resources>
+
+ <string name="app_name">Snappy2</string>
+ <string name="action_settings">Settings</string>
+ <string name="hello_world">This is a snappy app!</string>
+ <string name="jpeg_capture">JPEG</string>
+ <string name="aftrig">AF trig</string>
+ <string name="nr_unknown">NR --</string>
+ <string name="edge_unknown">Edge --</string>
+ <string name="minus">–</string>
+ <string name="plus">+</string>
+ <string name="gallery">Gallery</string>
+ <string name="Reprocessing">–– Reprocessing ––</string>
+
+</resources>
diff --git a/res/values/styles.xml b/res/values/styles.xml
new file mode 100644
index 0000000..6ce89c7
--- /dev/null
+++ b/res/values/styles.xml
@@ -0,0 +1,20 @@
+<resources>
+
+ <!--
+ Base application theme, dependent on API level. This theme is replaced
+ by AppBaseTheme from res/values-vXX/styles.xml on newer devices.
+ -->
+ <style name="AppBaseTheme" parent="android:Theme.Light">
+ <!--
+ Theme customizations available in newer API levels can go in
+ res/values-vXX/styles.xml, while customizations related to
+ backward-compatibility can go here.
+ -->
+ </style>
+
+ <!-- Application theme. -->
+ <style name="AppTheme" parent="AppBaseTheme">
+ <!-- All customizations that are NOT specific to a particular API-level can go here. -->
+ </style>
+
+</resources>
diff --git a/src/com/google/snappy/BitmapUtility.java b/src/com/google/snappy/BitmapUtility.java
new file mode 100644
index 0000000..682bcaa
--- /dev/null
+++ b/src/com/google/snappy/BitmapUtility.java
@@ -0,0 +1,54 @@
+package com.google.snappy;
+
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Matrix;
+import android.media.Image;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Some Bitmap utility functions.
+ */
+public class BitmapUtility {
+
+ public static Bitmap bitmapFromJpeg(byte[] data) {
+ // 32K buffer.
+ byte[] decodeBuffer = new byte[32 * 1024]; // 32K buffer.
+
+ BitmapFactory.Options opts = new BitmapFactory.Options();
+ opts.inSampleSize = 16; // 3264 / 16 = 204.
+ opts.inTempStorage = decodeBuffer;
+ Bitmap b = BitmapFactory.decodeByteArray(data, 0, data.length, opts);
+
+ return rotatedBitmap(b);
+ }
+
+ public static Bitmap bitmapFromYuvImage(Image img) {
+ int w = img.getWidth();
+ int h = img.getHeight();
+ ByteBuffer buf0 = img.getPlanes()[0].getBuffer();
+ int len = buf0.capacity();
+ int[] colors = new int[len];
+ int alpha = 255 << 24;
+ int green;
+ for (int i = 0; i < len; i++) {
+ green = ((int) buf0.get(i)) & 255;
+ colors[i] = green << 16 | green << 8 | green | alpha;
+ }
+ Bitmap b = Bitmap.createBitmap(colors, w, h, Bitmap.Config.ARGB_8888);
+
+ return rotatedBitmap(b);
+ }
+
+ /**
+ * Returns parameter bitmap rotated 90 degrees
+ */
+ private static Bitmap rotatedBitmap(Bitmap b) {
+ Matrix mat = new Matrix();
+ mat.postRotate(90);
+ Bitmap b2 = Bitmap.createBitmap(b, 0, 0,b.getWidth(),b.getHeight(), mat, true);
+ return b2;
+ }
+
+}
diff --git a/src/com/google/snappy/CameraInfoCache.java b/src/com/google/snappy/CameraInfoCache.java
new file mode 100644
index 0000000..9de5464
--- /dev/null
+++ b/src/com/google/snappy/CameraInfoCache.java
@@ -0,0 +1,214 @@
+package com.google.snappy;
+
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.util.Log;
+import android.util.Size;
+
+/**
+ * Caches (static) information about the first/main camera.
+ * Convenience functions represent data from CameraCharacteristics.
+ */
+
+public class CameraInfoCache {
+ private static final String TAG = "SNAPPY_CAMINFO";
+
+ public static final boolean IS_NEXUS_5 = "hammerhead".equalsIgnoreCase(Build.DEVICE);
+ public static final boolean IS_NEXUS_6 = "shamu".equalsIgnoreCase(Build.DEVICE);
+ public static final boolean IS_NEXUS_9 = "flounder".equalsIgnoreCase(Build.DEVICE);
+ public static final boolean IS_ANGLER = "angler".equalsIgnoreCase(Build.DEVICE);
+ public static final boolean IS_BULLHEAD = "bullhead".equalsIgnoreCase(Build.DEVICE);
+ public static final boolean IS_SAMSUNG_S6 = "zerofltevzw".equalsIgnoreCase(Build.DEVICE);
+ public static final boolean IS_LG_G4 = "p1_lgu_kr".equalsIgnoreCase(Build.PRODUCT);
+
+ public int[] noiseModes;
+ public int[] edgeModes;
+
+ private CameraCharacteristics mCameraCharacteristics;
+ private String mCameraId;
+ private Size mLargestYuvSize;
+ private Size mLargestJpegSize;
+ private Size mRawSize;
+ private Rect mActiveArea;
+ private Integer mSensorOrientation;
+ private Integer mRawFormat;
+ private int mBestFaceMode;
+ private boolean mCamera2FullModeAvailable;
+
+ /**
+ * Constructor.
+ */
+ public CameraInfoCache(CameraManager cameraMgr, boolean useFrontCamera) {
+ String[] cameralist;
+ try {
+ cameralist = cameraMgr.getCameraIdList();
+ for (String id : cameralist) {
+ mCameraCharacteristics = cameraMgr.getCameraCharacteristics(id);
+ Integer facing = mCameraCharacteristics.get(CameraCharacteristics.LENS_FACING);
+ if (facing == (useFrontCamera ? CameraMetadata.LENS_FACING_FRONT : CameraMetadata.LENS_FACING_BACK)) {
+ mCameraId = id;
+ break;
+ }
+ }
+ } catch (Exception e) {
+ Log.e(TAG, "ERROR: Could not get camera ID list / no camera information is available: " + e);
+ return;
+ }
+ // Should have mCameraId as this point.
+ if (mCameraId == null) {
+ Log.e(TAG, "ERROR: Could not find a suitable rear or front camera.");
+ return;
+ }
+
+ // Store YUV_420_888, JPEG, Raw info
+ StreamConfigurationMap map = mCameraCharacteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ int[] formats = map.getOutputFormats();
+ long lowestStall = Long.MAX_VALUE;
+ for (int i = 0; i < formats.length; i++) {
+ if (formats[i] == ImageFormat.YUV_420_888) {
+ mLargestYuvSize = returnLargestSize(map.getOutputSizes(formats[i]));
+ }
+ if (formats[i] == ImageFormat.JPEG) {
+ mLargestJpegSize = returnLargestSize(map.getOutputSizes(formats[i]));
+ }
+ if (formats[i] == ImageFormat.RAW10 || formats[i] == ImageFormat.RAW_SENSOR) { // TODO: Add RAW12
+ Size size = returnLargestSize(map.getOutputSizes(formats[i]));
+ long stall = map.getOutputStallDuration(formats[i], size);
+ if (stall < lowestStall) {
+ mRawFormat = formats[i];
+ mRawSize = size;
+ lowestStall = stall;
+ }
+ }
+ }
+
+ mActiveArea = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+
+ // Compute best face mode.
+ int[] faceModes = mCameraCharacteristics.get(CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES);
+ for (int i=0; i<faceModes.length; i++) {
+ if (faceModes[i] > mBestFaceMode) {
+ mBestFaceMode = faceModes[i];
+ }
+ }
+ edgeModes = mCameraCharacteristics.get(CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES);
+ noiseModes = mCameraCharacteristics.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES);
+
+ // Misc stuff.
+ mCamera2FullModeAvailable = mCameraCharacteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL) ==
+ CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
+
+ mSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ }
+
+ public int sensorOrientation() {
+ return mSensorOrientation;
+ }
+
+ public boolean isCamera2FullModeAvailable() {
+ return mCamera2FullModeAvailable;
+ }
+
+ public float getDiopterLow() {
+ if (IS_NEXUS_6) {
+ return 0f;
+ }
+ return 0f; // Infinity
+ }
+
+ public float getDiopterHi() {
+ if (IS_NEXUS_6) {
+ return 14.29f;
+ }
+ return 16f;
+ }
+
+ /**
+ * Private utility function.
+ */
+ private Size returnLargestSize(Size[] sizes) {
+ Size largestSize = null;
+ int area = 0;
+ for (int j = 0; j < sizes.length; j++) {
+ if (sizes[j].getHeight() * sizes[j].getWidth() > area) {
+ area = sizes[j].getHeight() * sizes[j].getWidth();
+ largestSize = sizes[j];
+ }
+ }
+ return largestSize;
+ }
+
+ public int bestFaceDetectionMode() {
+ return mBestFaceMode;
+ }
+
+ public int faceOffsetX() {
+ return (mActiveArea.width() - mLargestYuvSize.getWidth()) / 2;
+ }
+
+ public int faceOffsetY() {
+ return (mActiveArea.height() - mLargestYuvSize.getHeight()) / 2;
+ }
+
+ public int activeAreaWidth() {
+ return mActiveArea.width();
+ }
+
+ public int activeAreaHeight() {
+ return mActiveArea.height();
+ }
+
+ public Rect getActiveAreaRect() {
+ return mActiveArea;
+ }
+
+ public String getCameraId() {
+ return mCameraId;
+ }
+
+ public Size getPreviewSize() {
+ float aspect = mLargestYuvSize.getWidth() / mLargestYuvSize.getHeight();
+ aspect = aspect > 1f ? aspect : 1f / aspect;
+ if (aspect > 1.6) {
+ return new Size(1920, 1080); // TODO: Check available resolutions.
+ }
+ if (IS_ANGLER || IS_BULLHEAD) {
+ return new Size(1440, 1080);
+ }
+ return new Size(1280, 960); // TODO: Check available resolutions.
+ }
+
+ public Size getJpegStreamSize() {
+ return mLargestJpegSize;
+ }
+
+ public Size getYuvStream1Size() {
+ return mLargestYuvSize;
+ }
+
+ public Size getYuvStream2Size() {
+ return new Size(320, 240);
+ }
+
+ public boolean rawAvailable() {
+ return mRawSize != null;
+ }
+ public boolean reprocessingAvailable() {
+ // TODO: Actually query capabilities list.
+ return (IS_ANGLER || IS_BULLHEAD);
+ }
+
+ public Integer getRawFormat() {
+ return mRawFormat;
+ }
+
+ public Size getRawStreamSize() {
+ return mRawSize;
+ }
+
+}
diff --git a/src/com/google/snappy/GyroListener.java b/src/com/google/snappy/GyroListener.java
new file mode 100644
index 0000000..2d70a06
--- /dev/null
+++ b/src/com/google/snappy/GyroListener.java
@@ -0,0 +1,10 @@
+package com.google.snappy;
+
+/**
+ * Created by andyhuibers on 7/21/15.
+ */
+public interface GyroListener {
+
+ void updateGyroAngles(float[] gyroAngles);
+
+}
diff --git a/src/com/google/snappy/GyroOperations.java b/src/com/google/snappy/GyroOperations.java
new file mode 100644
index 0000000..a3326bb
--- /dev/null
+++ b/src/com/google/snappy/GyroOperations.java
@@ -0,0 +1,92 @@
+package com.google.snappy;
+
+import android.hardware.Sensor;
+import android.hardware.SensorEvent;
+import android.hardware.SensorEventListener;
+import android.hardware.SensorManager;
+import android.util.Log;
+
+import java.util.ArrayDeque;
+
+/**
+ * Created by andyhuibers on 7/21/15.
+ *
+ * Put all the Gyro stuff here.
+ */
+public class GyroOperations {
+ private static final String TAG = "SNAPPY_GYRO";
+
+ private SensorManager mSensorManager;
+ private GyroListener mListener;
+
+ private SensorEventListener mSensorEventListener = new SensorEventListener() {
+ @Override
+ public void onSensorChanged(SensorEvent event) {
+ delayGyroData(event);
+ }
+ @Override
+ public void onAccuracyChanged(Sensor sensor, int accuracy) {
+ }
+ };
+
+ public GyroOperations(SensorManager sensorManager) {
+ mSensorManager = sensorManager;
+ }
+
+ public void startListening(GyroListener listener) {
+ mSensorManager.registerListener(mSensorEventListener, mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE), SensorManager.SENSOR_DELAY_FASTEST);
+ mListener = listener;
+ }
+
+ public void stopListening() {
+ mSensorManager.unregisterListener(mSensorEventListener);
+ }
+
+ // We need to make a copy of SensorEvent so we can put it in our delay-line.
+ class GyroEvent2D {
+ public long timestamp;
+ public final float[] values = new float[2];
+
+ public GyroEvent2D(SensorEvent event) {
+ this.timestamp = event.timestamp;
+ this.values[0] = event.values[0];
+ this.values[1] = event.values[1];
+ }
+ }
+
+ private long mGyroLastTimestamp = 0;
+ private float[] mGyroAngle = new float[]{0f, 0f}; // radians, X and Y axes.
+ // Gyro arrives at 230 Hz on N6: 23 samples in 100 ms. Viewfinder latency is 70 ms. Delay about 15 samples.
+ private ArrayDeque<GyroEvent2D> mSensorDelayLine = new ArrayDeque<>();
+ private static final int DELAY_SIZE = 10;
+
+ void delayGyroData(SensorEvent event) {
+ mSensorDelayLine.addLast(new GyroEvent2D(event));
+ if (mSensorDelayLine.size() < DELAY_SIZE) {
+ return;
+ }
+ GyroEvent2D delayedEvent = mSensorDelayLine.removeFirst();
+ integrateGyroForPosition(delayedEvent);
+ }
+
+ void integrateGyroForPosition(GyroEvent2D event) {
+ if (mGyroLastTimestamp == 0) {
+ mGyroLastTimestamp = event.timestamp;
+ return;
+ }
+ long dt = (event.timestamp - mGyroLastTimestamp) / 1000; // microseconds between samples
+ if (dt > 10000) { // below 100 Hz
+ Log.v(TAG, " ===============> GYRO STALL <==============");
+ }
+ mGyroAngle[0] += event.values[0] * 0.000001f * dt;
+ mGyroAngle[1] += event.values[1] * 0.000001f * dt;
+ mGyroLastTimestamp = event.timestamp;
+
+ // TODO: Add UI
+ //updateOrientationUI(mGyroAngle, dt);
+ //Log.v(TAG, String.format("Gyro: theta_x = %.2f theta_y = %.2f dt = %d", mGyroAngle[0]*180f/3.14f, mGyroAngle[1]*180f/3.14f, dt));
+
+ mListener.updateGyroAngles(mGyroAngle);
+ }
+
+}
diff --git a/src/com/google/snappy/MainActivity.java b/src/com/google/snappy/MainActivity.java
new file mode 100644
index 0000000..63704aa
--- /dev/null
+++ b/src/com/google/snappy/MainActivity.java
@@ -0,0 +1,545 @@
+package com.google.snappy;
+
+import android.content.Intent;
+import android.graphics.Color;
+import android.hardware.SensorManager;
+import android.os.Bundle;
+import android.app.Activity;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.util.Size;
+import android.view.Gravity;
+import android.view.SurfaceHolder;
+import android.view.SurfaceView;
+import android.view.View;
+import android.view.WindowManager;
+import android.widget.Button;
+import android.widget.FrameLayout;
+import android.widget.LinearLayout;
+import android.widget.TextView;
+import android.widget.Toast;
+import android.widget.ToggleButton;
+
+
+/**
+ * A minimum camera app.
+ * To keep it simple: portrait mode only.
+ */
+public class MainActivity extends Activity implements MyCameraInterface.MyCameraCallback, SurfaceHolder.Callback {
+ private static final String TAG = "SNAPPY_UI";
+
+ private static final boolean LOG_FRAME_DATA = false;
+ private static final int AF_TRIGGER_HOLD_MILLIS = 4000;
+ private static final boolean STARTUP_FULL_YUV_ON = true;
+ private static final boolean START_WITH_FRONT_CAMERA = false;
+
+ private SurfaceView mPreviewView;
+ private SurfaceHolder mPreviewHolder;
+ private PreviewOverlay mPreviewOverlay;
+ private FrameLayout mPreviewFrame;
+
+ private TextView mLabel1;
+ private TextView mLabel2;
+ private ToggleButton mToggleFrontCam; // Use front camera
+ private ToggleButton mToggleYuvFull; // full YUV
+ private ToggleButton mToggleYuvVga; // VGA YUV
+ private ToggleButton mToggleRaw; // raw10
+ private Button mButtonNoiseMode; // Noise reduction mode
+ private Button mButtonEdgeModeReprocess; // Edge mode
+ private Button mButtonNoiseModeReprocess; // Noise reduction mode for reprocessing
+ private Button mButtonEdgeMode; // Edge mode for reprocessing
+ private ToggleButton mToggleFace; // Face detection
+ private ToggleButton mToggleShow3A; // 3A info
+ private ToggleButton mToggleGyro; // Gyro
+ private ToggleButton mToggleBurstJpeg;
+ private ToggleButton mToggleSaveSdCard;
+ private LinearLayout mReprocessingGroup;
+ private Handler mMainHandler;
+ private MyCameraInterface mCamera;
+
+ // Used for saving JPEGs.
+ private HandlerThread mUtilityThread;
+ private Handler mUtilityHandler;
+
+ // send null for initialization
+ View.OnClickListener mTransferUiStateToCameraState = new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ // set capture flow.
+ if (view == mToggleYuvFull || view == mToggleYuvVga || view == mToggleRaw ||
+ view == mButtonNoiseMode || view == mButtonEdgeMode || view == mToggleFace || view == null)
+ mCamera.setCaptureFlow(
+ mToggleYuvFull.isChecked(),
+ mToggleYuvVga.isChecked(),
+ mToggleRaw.isChecked(),
+ view == mButtonNoiseMode, /* cycle noise reduction mode */
+ view == mButtonEdgeMode, /* cycle edge mode */
+ mToggleFace.isChecked()
+ );
+ // set reprocessing flow.
+ if (view == mButtonNoiseModeReprocess || view == mButtonEdgeModeReprocess || view == null) {
+ mCamera.setReprocessingFlow(view == mButtonNoiseModeReprocess, view == mButtonEdgeModeReprocess);
+ }
+ // set visibility of cluster of reprocessing controls.
+ int reprocessingViz = mToggleYuvFull.isChecked() && mCamera.isReprocessingAvailable() ? View.VISIBLE : View.GONE;
+ mReprocessingGroup.setVisibility(reprocessingViz);
+
+ // if just turned off YUV1 stream, end burst.
+ if (view == mToggleYuvFull && !mToggleYuvFull.isChecked()) {
+ mToggleBurstJpeg.setChecked(false);
+ mCamera.setBurst(false);
+ }
+
+ if (view == mToggleBurstJpeg) {
+ mCamera.setBurst(mToggleBurstJpeg.isChecked());
+ }
+
+ if (view == mToggleShow3A || view == null) {
+ mPreviewOverlay.show3AInfo(mToggleShow3A.isChecked());
+ }
+ if (view == mToggleGyro || view == null) {
+ if (mToggleGyro.isChecked()) {
+ startGyroDisplay();
+ } else {
+ stopGyroDisplay();
+ }
+ }
+ }
+ };
+
+ @Override
+ protected void onCreate(Bundle savedInstanceState) {
+ Log.v(TAG, "onCreate");
+ MyTimer.t0 = SystemClock.elapsedRealtime();
+
+ // Go speed racer.
+ openCamera(START_WITH_FRONT_CAMERA);
+
+ // Initialize UI.
+ setContentView(R.layout.activity_main);
+ mLabel1 = (TextView) findViewById(R.id.label1);
+ mLabel1.setText("Snappy initializing.");
+ mLabel2 = (TextView) findViewById(R.id.label2);
+ mLabel2.setText(" ...");
+ Button mAfTriggerButton = (Button) findViewById(R.id.af_trigger);
+ mToggleFrontCam = (ToggleButton) findViewById(R.id.toggle_front_cam);
+ mToggleFrontCam.setChecked(START_WITH_FRONT_CAMERA);
+ mToggleYuvFull = (ToggleButton) findViewById(R.id.toggle_yuv_full);
+ mToggleYuvVga = (ToggleButton) findViewById(R.id.toggle_yuv_vga);
+ mToggleRaw = (ToggleButton) findViewById(R.id.toggle_raw);
+ mButtonNoiseMode = (Button) findViewById(R.id.button_noise);
+ mButtonEdgeMode = (Button) findViewById(R.id.button_edge);
+ mButtonNoiseModeReprocess = (Button) findViewById(R.id.button_noise_reprocess);
+ mButtonEdgeModeReprocess = (Button) findViewById(R.id.button_edge_reprocess);
+
+ mToggleFace = (ToggleButton) findViewById(R.id.toggle_face);
+ mToggleShow3A = (ToggleButton) findViewById(R.id.toggle_show_3A);
+ mToggleGyro = (ToggleButton) findViewById(R.id.toggle_show_gyro);
+ Button mGetJpegButton = (Button) findViewById(R.id.jpeg_capture);
+ Button mGalleryButton = (Button) findViewById(R.id.gallery);
+
+ mToggleBurstJpeg = (ToggleButton) findViewById(R.id.toggle_burst_jpeg);
+ mToggleSaveSdCard = (ToggleButton) findViewById(R.id.toggle_save_sdcard);
+ mReprocessingGroup = (LinearLayout) findViewById(R.id.reprocessing_controls);
+ mPreviewView = (SurfaceView) findViewById(R.id.preview_view);
+ mPreviewHolder = mPreviewView.getHolder();
+ mPreviewHolder.addCallback(this);
+ mPreviewOverlay = (PreviewOverlay) findViewById(R.id.preview_overlay_view);
+ mPreviewFrame = (FrameLayout) findViewById(R.id.preview_frame);
+
+ // Set UI listeners.
+ mAfTriggerButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ doAFScan();
+ }
+ });
+ mGetJpegButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ hitCaptureButton();
+ }
+ });
+ mGalleryButton.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ launchPhotosViewer();
+ }
+ });
+ mToggleFrontCam.setOnClickListener(new View.OnClickListener() {
+ @Override
+ public void onClick(View view) {
+ Log.v(TAG, "switchCamera()");
+ MyTimer.t0 = SystemClock.elapsedRealtime();
+ // ToggleButton isChecked state will determine which camera is started.
+ openCamera(mToggleFrontCam.isChecked());
+ startCamera();
+ }
+ });
+ mToggleYuvFull.setOnClickListener(mTransferUiStateToCameraState);
+ mToggleYuvVga.setOnClickListener(mTransferUiStateToCameraState);
+ mToggleRaw.setOnClickListener(mTransferUiStateToCameraState);
+ mButtonNoiseMode.setOnClickListener(mTransferUiStateToCameraState);
+ mButtonEdgeMode.setOnClickListener(mTransferUiStateToCameraState);
+ mButtonNoiseModeReprocess.setOnClickListener(mTransferUiStateToCameraState);
+ mButtonEdgeModeReprocess.setOnClickListener(mTransferUiStateToCameraState);
+ mToggleFace.setOnClickListener(mTransferUiStateToCameraState);
+ mToggleShow3A.setOnClickListener(mTransferUiStateToCameraState);
+ mToggleGyro.setOnClickListener(mTransferUiStateToCameraState);
+ mToggleBurstJpeg.setOnClickListener(mTransferUiStateToCameraState);
+ mToggleSaveSdCard.setOnClickListener(mTransferUiStateToCameraState);
+ mToggleSaveSdCard.setChecked(true);
+
+ mMainHandler = new Handler(this.getApplicationContext().getMainLooper());
+
+ // Can start camera now that we have the above initialized.
+ startCamera();
+
+ // General utility thread for e.g. saving JPEGs.
+ mUtilityThread = new HandlerThread("UtilityThread");
+ mUtilityThread.start();
+ mUtilityHandler = new Handler(mUtilityThread.getLooper());
+
+ // --- PRINT REPORT ---
+ //MyDeviceReport.printReport(this, false);
+ super.onCreate(savedInstanceState);
+ }
+
+ // Open camera. No UI required.
+ private void openCamera(boolean frontCamera) {
+ // Close previous camera if required.
+ if (mCamera != null) {
+ mCamera.closeCamera();
+ }
+ // --- SET UP CAMERA ---
+ mCamera = new MyApi2Camera(this, frontCamera);
+ mCamera.setCallback(this);
+ mCamera.openCamera();
+ }
+
+ // Initialize camera related UI and start camera; call openCamera first.
+ private void startCamera() {
+ // --- SET UP USER INTERFACE ---
+ mToggleYuvFull.setChecked(STARTUP_FULL_YUV_ON);
+ mToggleFace.setChecked(true);
+ mToggleRaw.setVisibility(mCamera.isRawAvailable() ? View.VISIBLE : View.GONE);
+ mToggleShow3A.setChecked(true);
+ mTransferUiStateToCameraState.onClick(null);
+
+ // --- SET UP PREVIEW AND OPEN CAMERA ---
+
+ if (mPreviewSurfaceValid) {
+ mCamera.startPreview(mPreviewHolder.getSurface());
+ } else {
+ // Note that preview is rotated 90 degrees from camera. We just hard code this now.
+ Size previewSize = mCamera.getPreviewSize();
+ // Render in top 12 x 9 of 16 x 9 display.
+ int renderHeight = 3 * displayHeight() / 4;
+ int renderWidth = renderHeight * previewSize.getHeight() / previewSize.getWidth();
+ int renderPad = (displayWidth() - renderWidth) / 2;
+
+ mPreviewFrame.setPadding(renderPad, 0, 0, 0);
+ mPreviewFrame.setLayoutParams(new LinearLayout.LayoutParams(renderWidth + renderPad, renderHeight));
+ // setFixedSize() will trigger surfaceChanged() callback below, which will start preview.
+ mPreviewHolder.setFixedSize(previewSize.getHeight(), previewSize.getWidth());
+ }
+ }
+
+ boolean mPreviewSurfaceValid = false;
+
+ @Override
+ public synchronized void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ Log.v(TAG, String.format("surfaceChanged: format=%x w=%d h=%d", format, width, height));
+ mPreviewSurfaceValid = true;
+ mCamera.startPreview(mPreviewHolder.getSurface());
+ }
+
+ Runnable mReturnToCafRunnable = new Runnable() {
+ @Override
+ public void run() {
+ mCamera.setCAF();
+ }
+ };
+
+ private void doAFScan() {
+ mCamera.triggerAFScan();
+ mMainHandler.removeCallbacks(mReturnToCafRunnable);
+ mMainHandler.postDelayed(mReturnToCafRunnable, AF_TRIGGER_HOLD_MILLIS);
+ }
+
+ private int displayWidth() {
+ DisplayMetrics metrics = new DisplayMetrics();
+ this.getWindowManager().getDefaultDisplay().getRealMetrics(metrics);
+ return metrics.widthPixels;
+ }
+
+ private int displayHeight() {
+ DisplayMetrics metrics = new DisplayMetrics();
+ this.getWindowManager().getDefaultDisplay().getRealMetrics(metrics);
+ return metrics.heightPixels;
+ }
+
+ @Override
+ public void onResume() {
+ Log.v(TAG, "onResume");
+ super.onResume();
+ // Leave screen on.
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
+ }
+
+ @Override
+ public void onPause() {
+ Log.v(TAG, "onPause");
+ mCamera.closeCamera();
+
+ // Cancel any pending AF operations.
+ mMainHandler.removeCallbacks(mReturnToCafRunnable);
+ stopGyroDisplay(); // No-op if not running.
+ super.onPause();
+ // Close app.
+ finish();
+ }
+
+ public void noCamera2Full() {
+ Toast toast = Toast.makeText(this, "WARNING: this camera does not support camera2 HARDWARE_LEVEL_FULL.", Toast.LENGTH_LONG);
+ toast.setGravity(Gravity.TOP, 0, 0);
+ toast.show();
+ }
+
+ @Override
+ public void setNoiseEdgeText(final String nrMode, final String edgeMode) {
+ mMainHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mButtonNoiseMode.setText(nrMode);
+ mButtonEdgeMode.setText(edgeMode);
+ }
+ });
+ }
+
+ @Override
+ public void setNoiseEdgeTextForReprocessing(final String nrMode, final String edgeMode) {
+ mMainHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mButtonNoiseModeReprocess.setText(nrMode);
+ mButtonEdgeModeReprocess.setText(edgeMode);
+ }
+ });
+ }
+
+ int mJpegCounter = 0;
+ long mJpegMillis = 0;
+
+ @Override
+ public void jpegAvailable(final byte[] jpegData, final int x, final int y) {
+ Log.v(TAG, "JPEG returned, size = " + jpegData.length);
+ long now = SystemClock.elapsedRealtime();
+ final long dt = mJpegMillis > 0 ? now - mJpegMillis : 0;
+ mJpegMillis = now;
+
+ if (mToggleSaveSdCard.isChecked()) {
+ mUtilityHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ final String result = MediaSaver.saveJpeg(getApplicationContext(), jpegData, getContentResolver());
+ mMainHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ fileNameToast(String.format("Saved %dx%d and %d bytes JPEG to %s in %d ms.", x, y, jpegData.length, result, dt));
+ }
+ });
+
+ }
+ });
+ } else {
+ mMainHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ fileNameToast(String.format("Processing JPEG #%d %dx%d and %d bytes in %d ms.", ++mJpegCounter, x, y, jpegData.length, dt));
+ }
+ });
+ }
+ }
+
+ @Override
+ public void receivedFirstFrame() {
+ mMainHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mPreviewView.setBackgroundColor(Color.TRANSPARENT);
+ }
+ });
+ }
+
+ Toast mToast;
+
+ public void fileNameToast(String s) {
+ if (mToast != null) {
+ mToast.cancel();
+ }
+ mToast = Toast.makeText(this, s, Toast.LENGTH_SHORT);
+ mToast.setGravity(Gravity.TOP, 0, 0);
+ mToast.show();
+ }
+
+ @Override
+ public void frameDataAvailable(final NormalizedFace[] faces, final float normExposure, final float normLens, float fps, int iso, final int afState, int aeState, int awbState) {
+ mMainHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mPreviewOverlay.setFrameData(faces, normExposure, normLens, afState);
+ }
+ });
+ // Build info string.
+ String ae = aeModeToString(aeState);
+ String af = afModeToString(afState);
+ String awb = awbModeToString(awbState);
+ final String info = String.format(" %2.0f FPS%5d ISO AF:%s AE:%s AWB:%s", fps, iso, af, ae, awb);
+ mLastInfo = info;
+
+ if (LOG_FRAME_DATA && faces != null) {
+ Log.v(TAG, "normExposure: " + normExposure);
+ Log.v(TAG, "normLens: " + normLens);
+ for (int i = 0; i < faces.length; ++i) {
+ Log.v(TAG, "Face getBounds: " + faces[i].bounds);
+ Log.v(TAG, "Face left eye: " + faces[i].leftEye);
+ Log.v(TAG, "Face right eye: " + faces[i].rightEye);
+ Log.v(TAG, "Face mouth: " + faces[i].mouth);
+ }
+ }
+
+ // Status line
+ mMainHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mLabel1.setText(info);
+ }
+ });
+ }
+
+ Integer mTimeToFirstFrame = 0;
+ Integer mHalWaitTime = 0;
+ Float mDroppedFrameCount = 0f;
+ String mLastInfo;
+
+ @Override
+ public void performanceDataAvailable(Integer timeToFirstFrame, Integer halWaitTime, Float droppedFrameCount) {
+ if (timeToFirstFrame != null) {
+ mTimeToFirstFrame = timeToFirstFrame;
+ }
+ if (halWaitTime != null) {
+ mHalWaitTime = halWaitTime;
+ }
+ if (droppedFrameCount != null) {
+ mDroppedFrameCount += droppedFrameCount;
+ }
+ mMainHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ mLabel2.setText(String.format("TTP %dms HAL %dms Framedrops:%.2f", mTimeToFirstFrame, mHalWaitTime, mDroppedFrameCount));
+ }
+ });
+ }
+
+ // Hit capture button.
+ private void hitCaptureButton() {
+ Log.v(TAG, "hitCaptureButton");
+ mCamera.takePicture();
+ }
+
+ // Hit Photos button.
+ private void launchPhotosViewer() {
+ Intent intent = new Intent(android.content.Intent.ACTION_VIEW);
+ intent.setType("image/*");
+ intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
+ startActivity(intent);
+ }
+
+ /*********************************
+ * Gyro graphics overlay update. *
+ *********************************/
+ GyroOperations mGyroOperations;
+
+ private void startGyroDisplay() {
+ // TODO: Get field of view angles from Camera API.
+ // TODO: Consider turning OIS off.
+ float fovLargeDegrees = 62.7533f; // Nexus 6
+ float fovSmallDegrees = 49.157f; // Nexus 6
+ mPreviewOverlay.setFieldOfView(fovLargeDegrees, fovSmallDegrees);
+
+ if (mGyroOperations == null) {
+ SensorManager sensorManager = (SensorManager) getSystemService(this.SENSOR_SERVICE);
+ mGyroOperations = new GyroOperations(sensorManager);
+ }
+ mGyroOperations.startListening(
+ new GyroListener() {
+ @Override
+ public void updateGyroAngles(float[] gyroAngles) {
+ mPreviewOverlay.setGyroAngles(gyroAngles);
+ }
+ }
+ );
+
+ mPreviewOverlay.showGyroGrid(true);
+ }
+
+ private void stopGyroDisplay() {
+ if (mGyroOperations != null) {
+ mGyroOperations.stopListening();
+ }
+ mPreviewOverlay.showGyroGrid(false);
+ }
+
+
+ /*******************************************
+ * SurfaceView callbacks just for logging. *
+ *******************************************/
+
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ Log.v(TAG, "surfaceCreated");
+ }
+
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ Log.v(TAG, "surfaceDestroyed");
+ }
+
+ /*********************
+ * UTILITY FUNCTIONS *
+ *********************/
+
+ private static String awbModeToString(int mode) {
+ switch (mode) {
+ case 1:
+ return "scan";
+ case 2:
+ return "lock";
+ }
+ return Integer.toString(mode);
+ }
+
+ private static String aeModeToString(int mode) {
+ switch (mode) {
+ case 1:
+ return "scan";
+ case 2:
+ return "lock";
+ }
+ return Integer.toString(mode);
+ }
+
+ private static String afModeToString(int mode) {
+ /* switch (mode) {
+ case 1: return "scan";
+ case 2: return "good";
+ case 6: return "bad";
+ } */
+ return Integer.toString(mode);
+ }
+
+}
diff --git a/src/com/google/snappy/MediaSaver.java b/src/com/google/snappy/MediaSaver.java
new file mode 100644
index 0000000..1cf04e4
--- /dev/null
+++ b/src/com/google/snappy/MediaSaver.java
@@ -0,0 +1,99 @@
+package com.google.snappy;
+
+import android.content.ContentResolver;
+import android.content.ContentValues;
+import android.content.Context;
+import android.content.SharedPreferences;
+import android.os.SystemClock;
+import android.provider.MediaStore;
+import android.util.Log;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+
+/**
+ * This class has methods required to save a JPEG to disk as well as update the
+ * MediaStore database.
+ */
+
+
+public class MediaSaver {
+ private static final String TAG = "Snappy_MediaSaver";
+ private static final String MY_PREFS_NAME = "SnappyPrefs";
+
+ // MediaStore is slow/broken
+ private static final boolean UDPATE_MEDIA_STORE = true;
+
+
+ public static int getNextInt(Context context) {
+ SharedPreferences prefs = context.getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE);
+ int i = prefs.getInt("counter", 1);
+ SharedPreferences.Editor editor = prefs.edit();
+ editor.putInt("counter", i+1);
+ editor.commit();
+ return i;
+ }
+
+ /**
+ * @param context Application context.
+ * @param jpegData JPEG byte stream.
+ */
+ public static String saveJpeg(Context context, byte[] jpegData, ContentResolver resolver) {
+ String filename = "";
+ try {
+ File file;
+ while (true) {
+ int i = getNextInt(context);
+ filename = String.format("/sdcard/DCIM/Camera/SNAP_%05d.JPG", i);
+ file = new File(filename);
+ if (file.createNewFile()) {
+ break;
+ }
+ }
+
+ long t0 = SystemClock.uptimeMillis();
+ OutputStream os = new FileOutputStream(file);
+ os.write(jpegData);
+ os.flush();
+ os.close();
+ long t1 = SystemClock.uptimeMillis();
+
+ // update MediaStore so photos apps can find photos right away.
+ if (UDPATE_MEDIA_STORE) {
+ // really slow for some reason: MediaStore.Images.Media.insertImage(resolver, file.getAbsolutePath(), file.getName(), file.getName());
+ insertImage(resolver, file);
+ }
+ long t2 = SystemClock.uptimeMillis();
+
+ Log.v(TAG, String.format("Wrote JPEG %d bytes as %s in %.3f seconds; mediastore update = %.3f secs",
+ jpegData.length, file, (t1 - t0) * 0.001, (t2 - t1) * 0.001) );
+ } catch (IOException e) {
+ Log.e(TAG, "Error creating new file: ", e);
+ }
+ return filename;
+ }
+
+
+ // We use this instead of MediaStore.Images.Media.insertImage() because we want to add date metadata
+ public static void insertImage(ContentResolver cr, File file) {
+
+ ContentValues values = new ContentValues();
+ values.put(MediaStore.Images.Media.TITLE, file.getName());
+ values.put(MediaStore.Images.Media.DISPLAY_NAME, file.getName());
+ values.put(MediaStore.Images.Media.DESCRIPTION, file.getName());
+ values.put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg");
+ values.put(MediaStore.Images.Media.DATA, file.getAbsolutePath());
+ // Add the date meta data to ensure the image is added at the front of the gallery
+ values.put(MediaStore.Images.Media.DATE_ADDED, System.currentTimeMillis());
+ values.put(MediaStore.Images.Media.DATE_TAKEN, System.currentTimeMillis());
+
+ try {
+ cr.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
+ } catch (Exception e) {
+ Log.w(TAG, "Error updating media store for " + file, e);
+ }
+ }
+
+}
diff --git a/src/com/google/snappy/MyApi2Camera.java b/src/com/google/snappy/MyApi2Camera.java
new file mode 100644
index 0000000..8f2f306
--- /dev/null
+++ b/src/com/google/snappy/MyApi2Camera.java
@@ -0,0 +1,793 @@
+package com.google.snappy;
+
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.Face;
+import android.hardware.camera2.params.InputConfiguration;
+import android.media.Image;
+import android.media.ImageReader;
+import android.media.ImageWriter;
+import android.media.MediaActionSound;
+import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.SystemClock;
+import android.util.Log;
+import android.util.Size;
+import android.view.Surface;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.LinkedList;
+import java.util.List;
+
+import javax.microedition.khronos.opengles.GL10;
+
+
+/**
+ * MyApi2Camera : a camera2 implementation
+ *
+ * The goal here is to make the simplest possible API2 camera,
+ * where individual streams and capture options (e.g. edge enhancement,
+ * noise reduction, face detection) can be toggled on and off.
+ *
+ */
+
+public class MyApi2Camera implements MyCameraInterface, SurfaceTexture.OnFrameAvailableListener {
+ private static final String TAG = "SNAPPY_API2";
+
+ // Nth frame to log; put 10^6 if you don't want logging.
+ private static int LOG_NTH_FRAME = 30;
+ // Log dropped frames. There are a log on Angler MDA32.
+ private static boolean LOG_DROPPED_FRAMES = true;
+
+ // IMPORTANT: Only one of these can be true:
+ private static boolean SECOND_YUV_IMAGEREADER_STREAM = true;
+ private static boolean SECOND_SURFACE_TEXTURE_STREAM = false;
+
+ // Enable raw stream if available.
+ private static boolean RAW_STREAM_ENABLE = true;
+ // Use JPEG ImageReader and YUV ImageWriter if reprocessing is available
+ private static final boolean USE_REPROCESSING_IF_AVAIL = true;
+
+ // Whether we are continuously taking pictures, or not.
+ boolean mIsBursting = false;
+ // Last total capture result
+ TotalCaptureResult mLastTotalCaptureResult;
+
+ // ImageReader/Writer buffer sizes.
+ private static final int YUV1_IMAGEREADER_SIZE = 8;
+ private static final int YUV2_IMAGEREADER_SIZE = 8;
+ private static final int RAW_IMAGEREADER_SIZE = 8;
+ private static final int IMAGEWRITER_SIZE = 2;
+
+ private CameraInfoCache mCameraInfoCache;
+ private CameraManager mCameraManager;
+ private CameraCaptureSession mCurrentCaptureSession;
+ private MediaActionSound mMediaActionSound = new MediaActionSound();
+
+ MyCameraCallback mMyCameraCallback;
+
+ // Generally everything running on this thread & this module is *not thread safe*.
+ private HandlerThread mOpsThread;
+ private Handler mOpsHandler;
+ private HandlerThread mInitThread;
+ private Handler mInitHandler;
+ private HandlerThread mJpegListenerThread;
+ private Handler mJpegListenerHandler;
+
+ Context mContext;
+ boolean mCameraIsFront;
+ SurfaceTexture mSurfaceTexture;
+ Surface mSurfaceTextureSurface;
+
+ private boolean mFirstFrameArrived;
+ private ImageReader mYuv1ImageReader;
+ private int mYuv1ImageCounter;
+ // Handle to last received Image: allows ZSL to be implemented.
+ private Image mYuv1LastReceivedImage = null;
+ // Time at which reprocessing request went in (right now we are doing one at a time).
+ private long mReprocessingRequestNanoTime;
+
+ private ImageReader mJpegImageReader;
+ private ImageReader mYuv2ImageReader;
+ private int mYuv2ImageCounter;
+ private ImageReader mRawImageReader;
+ private int mRawImageCounter;
+
+ // Starting the preview requires each of these 3 to be true/non-null:
+ volatile private Surface mPreviewSurface;
+ volatile private CameraDevice mCameraDevice;
+ volatile boolean mAllThingsInitialized = false;
+
+ /**
+ * Constructor.
+ */
+ public MyApi2Camera(Context context, boolean useFrontCamera) {
+ mContext = context;
+ mCameraIsFront = useFrontCamera;
+ mCameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
+ mCameraInfoCache = new CameraInfoCache(mCameraManager, useFrontCamera);
+
+ // Create thread and handler for camera operations.
+ mOpsThread = new HandlerThread("CameraOpsThread");
+ mOpsThread.start();
+ mOpsHandler = new Handler(mOpsThread.getLooper());
+
+ // Create thread and handler for slow initialization operations.
+ // Don't want to use camera operations thread because we want to time camera open carefully.
+ mInitThread = new HandlerThread("CameraInitThread");
+ mInitThread.start();
+ mInitHandler = new Handler(mInitThread.getLooper());
+ mInitHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ InitializeAllTheThings();
+ mAllThingsInitialized = true;
+ Log.v(TAG, "STARTUP_REQUIREMENT ImageReader initialization done.");
+ tryToStartCaptureSession();
+ }
+ });
+
+ // Set initial Noise and Edge modes.
+ if (mCameraInfoCache.IS_BULLHEAD || mCameraInfoCache.IS_ANGLER) {
+ // YUV streams.
+ mCaptureNoiseIndex = 4 /*ZSL*/ % mCameraInfoCache.noiseModes.length;
+ mCaptureEdgeIndex = 3 /*ZSL*/ % mCameraInfoCache.edgeModes.length;
+ // Reprocessing.
+ mReprocessingNoiseIndex = 2 /*High Quality*/ % mCameraInfoCache.noiseModes.length;
+ mReprocessingEdgeIndex = 2 /*HIgh Quality*/ % mCameraInfoCache.edgeModes.length;
+ }
+ }
+
+ // Ugh, why is this stuff so slow?
+ private void InitializeAllTheThings() {
+
+ // Thread to handle returned JPEGs.
+ mJpegListenerThread = new HandlerThread("CameraJpegThread");
+ mJpegListenerThread.start();
+ mJpegListenerHandler = new Handler(mJpegListenerThread.getLooper());
+
+ // Create ImageReader to receive JPEG image buffers via reprocessing.
+ mJpegImageReader = ImageReader.newInstance(
+ mCameraInfoCache.getYuvStream1Size().getWidth(),
+ mCameraInfoCache.getYuvStream1Size().getHeight(),
+ ImageFormat.JPEG,
+ 2);
+ mJpegImageReader.setOnImageAvailableListener(mJpegImageListener, mJpegListenerHandler);
+
+ // Create ImageReader to receive YUV image buffers.
+ mYuv1ImageReader = ImageReader.newInstance(
+ mCameraInfoCache.getYuvStream1Size().getWidth(),
+ mCameraInfoCache.getYuvStream1Size().getHeight(),
+ ImageFormat.YUV_420_888,
+ YUV1_IMAGEREADER_SIZE);
+ mYuv1ImageReader.setOnImageAvailableListener(mYuv1ImageListener, mOpsHandler);
+
+ if (SECOND_YUV_IMAGEREADER_STREAM) {
+ // Create ImageReader to receive YUV image buffers.
+ mYuv2ImageReader = ImageReader.newInstance(
+ mCameraInfoCache.getYuvStream2Size().getWidth(),
+ mCameraInfoCache.getYuvStream2Size().getHeight(),
+ ImageFormat.YUV_420_888,
+ YUV2_IMAGEREADER_SIZE);
+ mYuv2ImageReader.setOnImageAvailableListener(mYuv2ImageListener, mOpsHandler);
+ }
+
+ if (SECOND_SURFACE_TEXTURE_STREAM) {
+ int[] textures = new int[1];
+ // generate one texture pointer and bind it as an external texture.
+ GLES20.glGenTextures(1, textures, 0);
+ GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
+ // No mip-mapping with camera source.
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+ GL10.GL_TEXTURE_MIN_FILTER,
+ GL10.GL_LINEAR);
+ GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+ GL10.GL_TEXTURE_MAG_FILTER, GL10.GL_LINEAR);
+ // Clamp to edge is only option.
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+ GL10.GL_TEXTURE_WRAP_S, GL10.GL_CLAMP_TO_EDGE);
+ GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+ GL10.GL_TEXTURE_WRAP_T, GL10.GL_CLAMP_TO_EDGE);
+
+ int texture_id = textures[0];
+ mSurfaceTexture = new SurfaceTexture(texture_id);
+ mSurfaceTexture.setDefaultBufferSize(320, 240);
+ mSurfaceTexture.setOnFrameAvailableListener(this);
+ mSurfaceTextureSurface = new Surface(mSurfaceTexture);
+ }
+
+ if (RAW_STREAM_ENABLE && mCameraInfoCache.rawAvailable()) {
+ // Create ImageReader to receive thumbnail sized YUV image buffers.
+ mRawImageReader = ImageReader.newInstance(
+ mCameraInfoCache.getRawStreamSize().getWidth(),
+ mCameraInfoCache.getRawStreamSize().getHeight(),
+ mCameraInfoCache.getRawFormat(),
+ RAW_IMAGEREADER_SIZE);
+ mRawImageReader.setOnImageAvailableListener(mRawImageListener, mOpsHandler);
+ }
+
+ // Load click sound.
+ mMediaActionSound.load(MediaActionSound.SHUTTER_CLICK);
+
+ }
+
+ public void setCallback(MyCameraCallback callback) {
+ mMyCameraCallback = callback;
+ }
+
+ public void triggerAFScan() {
+ Log.v(TAG, "AF trigger");
+ issuePreviewCaptureRequest(true);
+ }
+
+ public void setCAF() {
+ Log.v(TAG, "run CAF");
+ issuePreviewCaptureRequest(false);
+ }
+
+ public void takePicture() {
+ mMediaActionSound.play(MediaActionSound.SHUTTER_CLICK);
+ mOpsHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ runReprocessing();
+ }
+ });
+ }
+
+ public void onFrameAvailable (SurfaceTexture surfaceTexture) {
+ Log.v(TAG, " onFrameAvailable(SurfaceTexture)");
+ }
+
+ public void setBurst(boolean go) {
+ // if false to true transition.
+ if (go && !mIsBursting) {
+ takePicture();
+ }
+ mIsBursting = go;
+ }
+
+ public boolean isRawAvailable() {
+ return mCameraInfoCache.rawAvailable();
+ }
+
+ public boolean isReprocessingAvailable() {
+ return mCameraInfoCache.reprocessingAvailable();
+ }
+
+ @Override
+ public Size getPreviewSize() {
+ return mCameraInfoCache.getPreviewSize();
+ }
+
+ @Override
+ public void openCamera() {
+ // If API2 FULL mode is not available, display toast, do nothing.
+ if (!mCameraInfoCache.isCamera2FullModeAvailable()) {
+ mMyCameraCallback.noCamera2Full();
+ if (!mCameraInfoCache.IS_NEXUS_6) {
+ return;
+ }
+ }
+
+ Log.v(TAG, "Opening camera " + mCameraInfoCache.getCameraId());
+ mOpsHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ MyTimer.t_open_start = SystemClock.elapsedRealtime();
+ try {
+ mCameraManager.openCamera(mCameraInfoCache.getCameraId(), mCameraStateCallback, null);
+ } catch (CameraAccessException e) {
+ Log.e(TAG, "Unable to openCamera().");
+ }
+ }
+ });
+ }
+
+ @Override
+ public void closeCamera() {
+ // TODO: We are stalling main thread now which is bad.
+ Log.v(TAG, "Closing camera " + mCameraInfoCache.getCameraId());
+ if (mCameraDevice != null) {
+ try {
+ mCurrentCaptureSession.abortCaptures();
+ } catch (CameraAccessException e) {
+ Log.e(TAG, "Could not abortCaptures().");
+ }
+ mCameraDevice.close();
+ }
+ mCurrentCaptureSession = null;
+ Log.v(TAG, "Done closing camera " + mCameraInfoCache.getCameraId());
+ }
+
+ public void startPreview(final Surface surface) {
+ Log.v(TAG, "STARTUP_REQUIREMENT preview Surface ready.");
+ mPreviewSurface = surface;
+ tryToStartCaptureSession();
+ }
+
+ private CameraDevice.StateCallback mCameraStateCallback = new MyLoggingCallbacks.DeviceStateCallback() {
+ @Override
+ public void onOpened(CameraDevice camera) {
+ MyTimer.t_open_end = SystemClock.elapsedRealtime();
+ mCameraDevice = camera;
+ Log.v(TAG, "STARTUP_REQUIREMENT Done opening camera " + mCameraInfoCache.getCameraId() +
+ ". HAL open took: (" + (MyTimer.t_open_end - MyTimer.t_open_start) + " ms)");
+
+ super.onOpened(camera);
+ tryToStartCaptureSession();
+ }
+ };
+
+ private void tryToStartCaptureSession() {
+ if (mCameraDevice != null && mAllThingsInitialized && mPreviewSurface != null) {
+ mOpsHandler.post(new Runnable() {
+ @Override
+ public void run() {
+ // It used to be: this needed to be posted on a Handler.
+ startCaptureSession();
+ }
+ });
+ }
+ }
+
+ // Create CameraCaptureSession. Callback will start repeating request with current parameters.
+ private void startCaptureSession() {
+ MyTimer.t_session_go = SystemClock.elapsedRealtime();
+
+ Log.v(TAG, "Configuring session..");
+ List<Surface> outputSurfaces = new ArrayList<Surface>(3);
+
+ outputSurfaces.add(mPreviewSurface);
+ Log.v(TAG, " .. added SurfaceView " + mCameraInfoCache.getPreviewSize().getWidth() +
+ " x " + mCameraInfoCache.getPreviewSize().getHeight());
+
+ outputSurfaces.add(mYuv1ImageReader.getSurface());
+ Log.v(TAG, " .. added YUV ImageReader " + mCameraInfoCache.getYuvStream1Size().getWidth() +
+ " x " + mCameraInfoCache.getYuvStream1Size().getHeight());
+
+ if (SECOND_YUV_IMAGEREADER_STREAM) {
+ outputSurfaces.add(mYuv2ImageReader.getSurface());
+ Log.v(TAG, " .. added YUV ImageReader " + mCameraInfoCache.getYuvStream2Size().getWidth() +
+ " x " + mCameraInfoCache.getYuvStream2Size().getHeight());
+ }
+
+ if (SECOND_SURFACE_TEXTURE_STREAM) {
+ outputSurfaces.add(mSurfaceTextureSurface);
+ Log.v(TAG, " .. added SurfaceTexture");
+ }
+
+ if (RAW_STREAM_ENABLE && mCameraInfoCache.rawAvailable()) {
+ outputSurfaces.add(mRawImageReader.getSurface());
+ Log.v(TAG, " .. added Raw ImageReader " + mCameraInfoCache.getRawStreamSize().getWidth() +
+ " x " + mCameraInfoCache.getRawStreamSize().getHeight());
+ }
+
+ if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.reprocessingAvailable()) {
+ outputSurfaces.add(mJpegImageReader.getSurface());
+ Log.v(TAG, " .. added JPEG ImageReader " + mCameraInfoCache.getJpegStreamSize().getWidth() +
+ " x " + mCameraInfoCache.getJpegStreamSize().getHeight());
+ }
+
+ try {
+ if (USE_REPROCESSING_IF_AVAIL && mCameraInfoCache.reprocessingAvailable()) {
+ InputConfiguration inputConfig = new InputConfiguration(mCameraInfoCache.getYuvStream1Size().getWidth(),
+ mCameraInfoCache.getYuvStream1Size().getHeight(), ImageFormat.YUV_420_888);
+ mCameraDevice.createReprocessableCaptureSession(inputConfig, outputSurfaces,
+ mSessionStateCallback, null);
+ Log.v(TAG, " Call to createReprocessableCaptureSession complete.");
+ } else {
+ mCameraDevice.createCaptureSession(outputSurfaces, mSessionStateCallback, null);
+ Log.v(TAG, " Call to createCaptureSession complete.");
+ }
+
+ } catch (CameraAccessException e) {
+ Log.e(TAG, "Error configuring ISP.");
+ }
+ }
+
+ ImageWriter mImageWriter;
+
+ private CameraCaptureSession.StateCallback mSessionStateCallback = new MyLoggingCallbacks.SessionStateCallback() {
+ @Override
+ public void onReady(CameraCaptureSession session) {
+ Log.v(TAG, "capture session onReady(). HAL capture session took: (" + (SystemClock.elapsedRealtime() - MyTimer.t_session_go) + " ms)");
+ mCurrentCaptureSession = session;
+ issuePreviewCaptureRequest(false);
+
+ if (session.isReprocessable()) {
+ mImageWriter = ImageWriter.newInstance(session.getInputSurface(), IMAGEWRITER_SIZE);
+ mImageWriter.setOnImageReleasedListener(
+ new ImageWriter.OnImageReleasedListener() {
+ @Override
+ public void onImageReleased(ImageWriter writer) {
+ Log.v(TAG, "ImageWriter.OnImageReleasedListener onImageReleased()");
+ }
+ }, null);
+ Log.v(TAG, "Created ImageWriter.");
+ }
+ super.onReady(session);
+ }
+ };
+
+ // Variables to hold capture flow state.
+ private boolean mCaptureYuv1 = false;
+ private boolean mCaptureYuv2 = false;
+ private boolean mCaptureRaw = false;
+ private int mCaptureNoiseIndex = CaptureRequest.NOISE_REDUCTION_MODE_OFF;
+ private int mCaptureEdgeIndex = CaptureRequest.EDGE_MODE_OFF;
+ private boolean mCaptureFace = false;
+ // Variables to hold reprocessing state.
+ private int mReprocessingNoiseIndex = CaptureRequest.NOISE_REDUCTION_MODE_OFF;
+ private int mReprocessingEdgeIndex = CaptureRequest.EDGE_MODE_OFF;
+
+
+ public void setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face) {
+ if (yuv1 != null) mCaptureYuv1 = yuv1;
+ if (yuv2 != null) mCaptureYuv2 = yuv2;
+ if (raw10 != null) mCaptureRaw = raw10 && RAW_STREAM_ENABLE;
+ if (nr) {
+ mCaptureNoiseIndex = ++mCaptureNoiseIndex % mCameraInfoCache.noiseModes.length;
+ }
+ if (edge) {
+ mCaptureEdgeIndex = ++mCaptureEdgeIndex % mCameraInfoCache.edgeModes.length;
+ }
+ if (face != null) mCaptureFace = face;
+ mMyCameraCallback.setNoiseEdgeText(
+ "NR " + noiseModeToString(mCameraInfoCache.noiseModes[mCaptureNoiseIndex]),
+ "Edge " + edgeModeToString(mCameraInfoCache.edgeModes[mCaptureEdgeIndex])
+ );
+
+ if (mCurrentCaptureSession != null) {
+ issuePreviewCaptureRequest(false);
+ }
+ }
+
+ public void setReprocessingFlow(Boolean nr, Boolean edge) {
+ if (nr) {
+ mReprocessingNoiseIndex = ++mReprocessingNoiseIndex % mCameraInfoCache.noiseModes.length;
+ }
+ if (edge) {
+ mReprocessingEdgeIndex = ++mReprocessingEdgeIndex % mCameraInfoCache.edgeModes.length;
+ }
+ mMyCameraCallback.setNoiseEdgeTextForReprocessing(
+ "NR " + noiseModeToString(mCameraInfoCache.noiseModes[mReprocessingNoiseIndex]),
+ "Edge " + edgeModeToString(mCameraInfoCache.edgeModes[mReprocessingEdgeIndex])
+ );
+ }
+
+ public void issuePreviewCaptureRequest(boolean AFtrigger) {
+ MyTimer.t_burst = SystemClock.elapsedRealtime();
+ Log.v(TAG, "issuePreviewCaptureRequest...");
+ try {
+ CaptureRequest.Builder b1 = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
+ b1.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_USE_SCENE_MODE);
+ b1.set(CaptureRequest.CONTROL_SCENE_MODE, CameraMetadata.CONTROL_SCENE_MODE_FACE_PRIORITY);
+ if (AFtrigger) {
+ b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_AUTO);
+ } else {
+ b1.set(CaptureRequest.CONTROL_AF_MODE, CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
+ }
+
+ b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCameraInfoCache.noiseModes[mCaptureNoiseIndex]);
+ b1.set(CaptureRequest.EDGE_MODE, mCameraInfoCache.edgeModes[mCaptureEdgeIndex]);
+ b1.set(CaptureRequest.STATISTICS_FACE_DETECT_MODE, mCaptureFace ? mCameraInfoCache.bestFaceDetectionMode() : CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF);
+
+ Log.v(TAG, " .. NR=" + mCaptureNoiseIndex + " Edge=" + mCaptureEdgeIndex + " Face=" + mCaptureFace);
+
+ if (mCaptureYuv1) {
+ b1.addTarget(mYuv1ImageReader.getSurface());
+ Log.v(TAG, " .. YUV1 on");
+ }
+
+ if (mCaptureRaw) {
+ b1.addTarget(mRawImageReader.getSurface());
+ }
+
+ b1.addTarget(mPreviewSurface);
+
+ if (mCaptureYuv2) {
+ if (SECOND_SURFACE_TEXTURE_STREAM) {
+ b1.addTarget(mSurfaceTextureSurface);
+ }
+ if (SECOND_YUV_IMAGEREADER_STREAM) {
+ b1.addTarget(mYuv2ImageReader.getSurface());
+ }
+ Log.v(TAG, " .. YUV2 on");
+ }
+
+ if (AFtrigger) {
+ b1.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_START);
+ mCurrentCaptureSession.capture(b1.build(), mCaptureCallback, mOpsHandler);
+ b1.set(CaptureRequest.CONTROL_AF_TRIGGER, CameraMetadata.CONTROL_AF_TRIGGER_IDLE);
+ }
+ mCurrentCaptureSession.setRepeatingRequest(b1.build(), mCaptureCallback, mOpsHandler);
+ } catch (CameraAccessException e) {
+ Log.e(TAG, "Could not access camera for issuePreviewCaptureRequest.");
+ }
+ }
+
+ void runReprocessing() {
+ if (mYuv1LastReceivedImage == null) {
+ Log.e(TAG, "No YUV Image available.");
+ return;
+ }
+ mImageWriter.queueInputImage(mYuv1LastReceivedImage);
+ Log.v(TAG, " Sent YUV1 image to ImageWriter.queueInputImage()");
+ try {
+ CaptureRequest.Builder b1 = mCameraDevice.createReprocessCaptureRequest(mLastTotalCaptureResult);
+ // Portrait.
+ b1.set(CaptureRequest.JPEG_ORIENTATION, 90);
+ b1.set(CaptureRequest.JPEG_QUALITY, (byte) 95);
+ b1.set(CaptureRequest.NOISE_REDUCTION_MODE, mCameraInfoCache.noiseModes[mReprocessingNoiseIndex]);
+ b1.set(CaptureRequest.EDGE_MODE, mCameraInfoCache.edgeModes[mReprocessingEdgeIndex]);
+ b1.addTarget(mJpegImageReader.getSurface());
+ mCurrentCaptureSession.capture(b1.build(), mReprocessingCaptureCallback, mOpsHandler);
+ mReprocessingRequestNanoTime = System.nanoTime();
+ } catch (CameraAccessException e) {
+ Log.e(TAG, "Could not access camera for issuePreviewCaptureRequest.");
+ }
+ mYuv1LastReceivedImage = null;
+ Log.v(TAG, " Reprocessing request submitted.");
+ }
+
+
+ /*********************************
+ * onImageAvailable() processing *
+ *********************************/
+
+ ImageReader.OnImageAvailableListener mYuv1ImageListener =
+ new ImageReader.OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Image img = reader.acquireLatestImage();
+ if (img == null) {
+ Log.e(TAG, "Null image returned YUV1");
+ return;
+ }
+ if (mYuv1LastReceivedImage != null) {
+ mYuv1LastReceivedImage.close();
+ }
+ mYuv1LastReceivedImage = img;
+ if (++mYuv1ImageCounter % LOG_NTH_FRAME == 0) {
+ Log.v(TAG, "YUV1 buffer available, Frame #=" + mYuv1ImageCounter + " w=" + img.getWidth() + " h=" + img.getHeight() + " time=" + img.getTimestamp());
+ }
+
+ }
+ };
+
+
+ ImageReader.OnImageAvailableListener mJpegImageListener =
+ new ImageReader.OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Image img = reader.acquireLatestImage();
+ if (img == null) {
+ Log.e(TAG, "Null image returned JPEG");
+ return;
+ }
+ Image.Plane plane0 = img.getPlanes()[0];
+ final ByteBuffer buffer = plane0.getBuffer();
+ long dt = System.nanoTime() - mReprocessingRequestNanoTime;
+ Log.v(TAG, String.format("JPEG buffer available, w=%d h=%d time=%d size=%d dt=%.1f ms ISO=%d",
+ img.getWidth(), img.getHeight(), img.getTimestamp(), buffer.capacity(), 0.000001 * dt, mLastIso));
+ // Save JPEG on the utility thread,
+ final byte[] jpegBuf;
+ if (buffer.hasArray()) {
+ jpegBuf = buffer.array();
+ } else {
+ jpegBuf = new byte[buffer.capacity()];
+ buffer.get(jpegBuf);
+ }
+ mMyCameraCallback.jpegAvailable(jpegBuf, img.getWidth(), img.getHeight());
+ img.close();
+
+ // take (reprocess) another picture right away if bursting.
+ if (mIsBursting) {
+ takePicture();
+ }
+ }
+ };
+
+
+ ImageReader.OnImageAvailableListener mYuv2ImageListener =
+ new ImageReader.OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Image img = reader.acquireLatestImage();
+ if (img == null) {
+ Log.e(TAG, "Null image returned YUV2");
+ } else {
+ if (++mYuv2ImageCounter % LOG_NTH_FRAME == 0) {
+ Log.v(TAG, "YUV2 buffer available, Frame #=" + mYuv2ImageCounter + " w=" + img.getWidth() + " h=" + img.getHeight() + " time=" + img.getTimestamp());
+ }
+ img.close();
+ }
+ }
+ };
+
+
+ ImageReader.OnImageAvailableListener mRawImageListener =
+ new ImageReader.OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ final Image img = reader.acquireLatestImage();
+ if (img == null) {
+ Log.e(TAG, "Null image returned RAW");
+ } else {
+ if (++mRawImageCounter % LOG_NTH_FRAME == 0) {
+ Image.Plane plane0 = img.getPlanes()[0];
+ final ByteBuffer buffer = plane0.getBuffer();
+ Log.v(TAG, "Raw buffer available, Frame #=" + mRawImageCounter + "w=" + img.getWidth()
+ + " h=" + img.getHeight()
+ + " format=" + MyDeviceReport.getFormatName(img.getFormat())
+ + " time=" + img.getTimestamp()
+ + " size=" + buffer.capacity()
+ + " getRowStride()=" + plane0.getRowStride());
+ }
+ img.close();
+ }
+ }
+ };
+
+ /*************************************
+ * CaptureResult metadata processing *
+ *************************************/
+
+ private CameraCaptureSession.CaptureCallback mCaptureCallback = new MyLoggingCallbacks.SessionCaptureCallback() {
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
+ if (!mFirstFrameArrived) {
+ mFirstFrameArrived = true;
+ long now = SystemClock.elapsedRealtime();
+ long dt = now - MyTimer.t0;
+ long camera_dt = now - MyTimer.t_session_go + MyTimer.t_open_end - MyTimer.t_open_start;
+ long repeating_req_dt = now - MyTimer.t_burst;
+ Log.v(TAG, "App control to first frame: (" + dt + " ms)");
+ Log.v(TAG, "HAL request to first frame: (" + repeating_req_dt + " ms) " + " Total HAL wait: (" + camera_dt + " ms)");
+ mMyCameraCallback.receivedFirstFrame();
+ mMyCameraCallback.performanceDataAvailable((int) dt, (int) camera_dt, null);
+ }
+ publishFrameData(result);
+ // Used for reprocessing.
+ mLastTotalCaptureResult = result;
+ super.onCaptureCompleted(session, request, result);
+ }
+ };
+
+ // Reprocessing capture completed.
+ private CameraCaptureSession.CaptureCallback mReprocessingCaptureCallback = new MyLoggingCallbacks.SessionCaptureCallback() {
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
+ Log.v(TAG, "Reprocessing onCaptureCompleted()");
+ }
+ };
+
+ private static double SHORT_LOG_EXPOSURE = Math.log10(1000000000 / 10000); // 1/10000 second
+ private static double LONG_LOG_EXPOSURE = Math.log10(1000000000 / 10); // 1/10 second
+ public int FPS_CALC_LOOKBACK = 15;
+ private LinkedList<Long> mFrameTimes = new LinkedList<Long>();
+
+ private void publishFrameData(TotalCaptureResult result) {
+ // Faces.
+ final Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
+ NormalizedFace[] newFaces = new NormalizedFace[faces.length];
+ if (faces.length > 0) {
+ int offX = mCameraInfoCache.faceOffsetX();
+ int offY = mCameraInfoCache.faceOffsetY();
+ int dX = mCameraInfoCache.activeAreaWidth() - 2 * offX;
+ int dY = mCameraInfoCache.activeAreaHeight() - 2 * offY;
+ if (mCameraInfoCache.IS_NEXUS_6 && mCameraIsFront) {
+ // Front camera on Nexus 6 is currently 16 x 9 cropped to 4 x 3.
+ // TODO: Generalize this.
+ int cropOffset = dX / 8;
+ dX -= 2 * cropOffset;
+ offX += cropOffset;
+ }
+ int orientation = mCameraInfoCache.sensorOrientation();
+ for (int i = 0; i < faces.length; ++i) {
+ newFaces[i] = new NormalizedFace(faces[i], dX, dY, offX, offY);
+ if (mCameraIsFront && orientation == 90) {
+ newFaces[i].mirrorInY();
+ }
+ if (mCameraIsFront && orientation == 270) {
+ newFaces[i].mirrorInX();
+ }
+ if (!mCameraIsFront && orientation == 270) {
+ newFaces[i].mirrorInX();
+ newFaces[i].mirrorInY();
+ }
+ }
+ }
+
+ // Normalized lens and exposure coordinates.
+ double rm = Math.log10(result.get(CaptureResult.SENSOR_EXPOSURE_TIME));
+ float normExposure = (float) ((rm - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE));
+ float normLensPos = (mCameraInfoCache.getDiopterHi() - result.get(CaptureResult.LENS_FOCUS_DISTANCE)) / (mCameraInfoCache.getDiopterHi() - mCameraInfoCache.getDiopterLow());
+ mLastIso = result.get(CaptureResult.SENSOR_SENSITIVITY);
+
+ // Update frame arrival history.
+ mFrameTimes.add(result.get(CaptureResult.SENSOR_TIMESTAMP));
+ if (mFrameTimes.size() > FPS_CALC_LOOKBACK) {
+ mFrameTimes.removeFirst();
+ }
+
+ // Frame drop detector
+ {
+ float frameDuration = result.get(CaptureResult.SENSOR_FRAME_DURATION);
+ if (mFrameTimes.size() > 1) {
+ long dt = result.get(CaptureResult.SENSOR_TIMESTAMP) - mFrameTimes.get(mFrameTimes.size()-2);
+ if (dt > 3 * frameDuration / 2 && LOG_DROPPED_FRAMES) {
+ float drops = (dt * 1f / frameDuration) - 1f;
+ Log.e(TAG, String.format("dropped %.2f frames", drops));
+ mMyCameraCallback.performanceDataAvailable(null, null, drops);
+ }
+ }
+ }
+
+ // FPS calc.
+ float fps = 0;
+ if (mFrameTimes.size() > 1) {
+ long dt = mFrameTimes.getLast() - mFrameTimes.getFirst();
+ fps = (mFrameTimes.size() - 1) * 1000000000f / dt;
+ fps = (float) Math.floor(fps + 0.1); // round to nearest whole number, ish.
+ }
+
+ // Do callback.
+ if (mMyCameraCallback != null) {
+ mMyCameraCallback.frameDataAvailable(newFaces, normExposure, normLensPos, fps,
+ (int) mLastIso, result.get(CaptureResult.CONTROL_AF_STATE), result.get(CaptureResult.CONTROL_AE_STATE), result.get(CaptureResult.CONTROL_AWB_STATE));
+ } else {
+ Log.v(TAG, "mMyCameraCallbacks is null!!.");
+ }
+ }
+
+ long mLastIso = 0;
+
+ /*********************
+ * UTILITY FUNCTIONS *
+ *********************/
+
+ private static String edgeModeToString(int mode) {
+ switch (mode) {
+ case CaptureRequest.EDGE_MODE_OFF:
+ return "OFF";
+ case CaptureRequest.EDGE_MODE_FAST:
+ return "FAST";
+ case CaptureRequest.EDGE_MODE_HIGH_QUALITY:
+ return "HiQ";
+ case 3:
+ return "ZSL";
+ }
+ return Integer.toString(mode);
+ }
+
+
+ private static String noiseModeToString(int mode) {
+ switch (mode) {
+ case CaptureRequest.NOISE_REDUCTION_MODE_OFF:
+ return "OFF";
+ case CaptureRequest.NOISE_REDUCTION_MODE_FAST:
+ return "FAST";
+ case CaptureRequest.NOISE_REDUCTION_MODE_HIGH_QUALITY:
+ return "HiQ";
+ case 3:
+ return "MIN";
+ case 4:
+ return "ZSL";
+ }
+ return Integer.toString(mode);
+ }
+} \ No newline at end of file
diff --git a/src/com/google/snappy/MyCameraInterface.java b/src/com/google/snappy/MyCameraInterface.java
new file mode 100644
index 0000000..d6a1be7
--- /dev/null
+++ b/src/com/google/snappy/MyCameraInterface.java
@@ -0,0 +1,128 @@
+package com.google.snappy;
+
+import android.util.Size;
+import android.view.Surface;
+
+/**
+ * This is a simple camera interface not specific to API1 or API2.
+ */
+public interface MyCameraInterface {
+ /**
+ * Return preview size to use pass thru from camera API.
+ */
+ Size getPreviewSize();
+
+ /**
+ * Open the camera. Call startPreview() to actually see something.
+ */
+ void openCamera();
+
+ /**
+ * Start preview to a surface. Also need to call openCamera().
+ * @param surface
+ */
+ void startPreview(Surface surface);
+
+ /**
+ * Close the camera.
+ */
+ void closeCamera();
+
+ /**
+ * Take a picture and return data with provided callback.
+ * Preview must be started.
+ */
+ void takePicture();
+
+ /**
+ * Set whether we are continuously taking pictures, or not.
+ */
+ void setBurst(boolean go);
+
+ /**
+ * Take a picture and return data with provided callback.
+ * Preview must be started.
+ */
+ void setCallback(MyCameraCallback callback);
+
+ /**
+ * Is a raw stream available.
+ */
+ boolean isRawAvailable();
+
+ /**
+ * Is a reprocessing available.
+ */
+ boolean isReprocessingAvailable();
+
+ /**
+ * Triggers an AF scan. Leaves camera in AUTO.
+ */
+ void triggerAFScan();
+
+ /**
+ * Runs CAF (continuous picture).
+ */
+ void setCAF();
+
+ /**
+ * Camera picture callbacks.
+ */
+ interface MyCameraCallback {
+ /**
+ * What text to display on the Edge and NR mode buttons.
+ */
+ void setNoiseEdgeText(String s1, String s2);
+
+ /**
+ * What text to display on the Edge and NR mode buttons (reprocessing flow).
+ */
+ void setNoiseEdgeTextForReprocessing(String s1, String s2);
+
+ /**
+ * Full size JPEG is available.
+ * @param jpegData
+ * @param x
+ * @param y
+ */
+ void jpegAvailable(byte[] jpegData, int x, int y);
+
+ /**
+ * Metadata from an image frame.
+ *
+ * @param info Info string we print just under viewfinder.
+ *
+ * fps, mLastIso, af, ae, awb
+ * @param faces Face coordinates.
+ * @param normExposure Exposure value normalized from 0 to 1.
+ * @param normLensPos Lens position value normalized from 0 to 1.
+ * @param fps
+ * @param iso
+ * @param afState
+ * @param aeState
+ * @param awbState
+ *
+ */
+ void frameDataAvailable(NormalizedFace[] faces, float normExposure, float normLensPos, float fps, int iso, int afState, int aeState, int awbState);
+
+ /**
+ * Misc performance data.
+ */
+ void performanceDataAvailable(Integer timeToFirstFrame, Integer halWaitTime, Float droppedFrameCount);
+
+ /**
+ * Called when camera2 FULL not available.
+ */
+ void noCamera2Full();
+
+ /**
+ * Used to set the preview SurfaceView background color from black to transparent.
+ */
+ void receivedFirstFrame();
+ }
+
+ void setCaptureFlow(Boolean yuv1, Boolean yuv2, Boolean raw10, Boolean nr, Boolean edge, Boolean face);
+
+ void setReprocessingFlow(Boolean nr, Boolean edge);
+
+}
diff --git a/src/com/google/snappy/MyDeviceReport.java b/src/com/google/snappy/MyDeviceReport.java
new file mode 100644
index 0000000..2505d30
--- /dev/null
+++ b/src/com/google/snappy/MyDeviceReport.java
@@ -0,0 +1,385 @@
+package com.google.snappy;
+
+import android.app.Activity;
+import android.content.Context;
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.graphics.SurfaceTexture;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.params.StreamConfigurationMap;
+import android.os.Build;
+import android.util.DisplayMetrics;
+import android.util.Log;
+import android.util.Range;
+import android.util.Size;
+import android.util.SizeF;
+import android.view.SurfaceHolder;
+import android.view.WindowManager;
+
+/**
+ * Created by andyhuibers on 6/24/14.
+ */
+
+public class MyDeviceReport {
+ private static final String TAG = "SNAPPY_INFO";
+
+ // Note: we actually need the activity to get window information
+ public static void printReport(Activity activity, boolean firstCameraOnly) {
+ printDisplayInfo(activity);
+ printCameraSystemInfo(activity, firstCameraOnly);
+ }
+
+ /**
+ * Print out information about all cameras.
+ */
+ private static void printCameraSystemInfo(Activity activity, boolean firstCameraOnly) {
+ CameraManager cameraMgr = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); // "camera"
+ String[] cameralist;
+ try {
+ cameralist = cameraMgr.getCameraIdList();
+ Log.v(TAG, "Number of cameras:" + cameralist.length);
+ } catch (Exception e) {
+ Log.e(TAG, "Could not get camera ID list: "+e);
+ return;
+ }
+ for (String cameraId : cameralist) {
+ printCameraInfo(cameraMgr, cameraId);
+ if (firstCameraOnly) {
+ break;
+ }
+ }
+ }
+
+ /**
+ * Print out information about a specific camera.
+ */
+ private static void printCameraInfo(CameraManager manager, String id) {
+ Log.v(TAG, "============= CAMERA " + id + " INFO =============");
+
+ CameraCharacteristics p;
+ try {
+ p = manager.getCameraCharacteristics(id);
+ } catch (Exception e) {
+ Log.e(TAG, "Could not get getCameraCharacteristics");
+ return;
+ }
+ // dumpsys media.camera
+
+ // Print out various CameraCharacteristics.
+ Rect size = p.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
+ if (size != null) {
+ Log.v(TAG, "SENSOR_INFO_ACTIVE_ARRAY_SIZE: "
+ + size.width() + "x" + size.height());
+ } else {
+ Log.v(TAG, "SENSOR_INFO_ACTIVE_ARRAY_SIZE: null");
+ }
+
+ Size size2 = p.get(CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE);
+ Log.v(TAG, "SENSOR_INFO_PIXEL_ARRAY_SIZE: " + size2.getWidth() + "x" + size2.getHeight());
+
+ SizeF size3 = p.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE);
+ Log.v(TAG, "SENSOR_INFO_PHYSICAL_SIZE: " + size3.getWidth() + "x" + size3.getHeight());
+
+
+ int sensorOrientation = p.get(CameraCharacteristics.SENSOR_ORIENTATION);
+ Log.v(TAG, "SENSOR_ORIENTATION: " + sensorOrientation);
+
+ Log.v(TAG, "SENSOR_INFO_TIMESTAMP_SOURCE: " +
+ getTimestampSourceName(p.get(CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE)));
+
+ Log.v(TAG, "LENS_INFO_FOCUS_DISTANCE_CALIBRATION: " +
+ getFocusDistanceCalibrationName(p.get(CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION)));
+
+ int[] faceModes = p.get(CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES);
+ Log.v(TAG, "STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES: ");
+ for (int i = 0; i < faceModes.length; i++) {
+ switch (faceModes[i]) {
+ case CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_OFF:
+ Log.v(TAG, " STATISTICS_FACE_DETECT_MODE_OFF");
+ break;
+ case CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_SIMPLE:
+ Log.v(TAG, " STATISTICS_FACE_DETECT_MODE_SIMPLE");
+ break;
+ case CameraCharacteristics.STATISTICS_FACE_DETECT_MODE_FULL:
+ Log.v(TAG, " STATISTICS_FACE_DETECT_MODE_FULL");
+ break;
+ default:
+ Log.v(TAG, " STATISTICS_FACE_DETECT_MODE_? (unknown)");
+ }
+ }
+
+ Log.v(TAG, "STATISTICS_INFO_MAX_FACE_COUNT: " + p.get(CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT));
+
+ Log.v(TAG, "REQUEST_PIPELINE_MAX_DEPTH: "
+ + p.get(CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH));
+
+ Log.v(TAG, "REQUEST_MAX_NUM_OUTPUT_RAW: "
+ + p.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW));
+ Log.v(TAG, "REQUEST_MAX_NUM_OUTPUT_PROC: "
+ + p.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC));
+ Log.v(TAG, "REQUEST_MAX_NUM_OUTPUT_PROC_STALLING: "
+ + p.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING));
+
+ Log.v(TAG, "EDGE_AVAILABLE_EDGE_MODES: "
+ + intsToString(p.get(CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES)));
+
+ Log.v(TAG, "NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES: "
+ + intsToString(p.get(CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES)));
+
+ Log.v(TAG, "REQUEST_MAX_NUM_OUTPUT_PROC_STALLING: "
+ + p.get(CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING));
+
+
+ // REQUEST_AVAILABLE_CAPABILITIES
+ boolean mHasReprocessing = false;
+ {
+ Log.v(TAG, "CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES:");
+ for (int item : p.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES)) {
+ Log.v(TAG, " " + item + " = " + getCapabilityName(item));
+ if (item == 4 || item == 7) {
+ mHasReprocessing = true;
+ }
+ }
+ }
+
+ StreamConfigurationMap map = p.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ {
+ int[] formats = map.getOutputFormats();
+ Log.v(TAG, "number of output formats: " + formats.length);
+ for (int i = 0; i < formats.length; i++) {
+ Log.v(TAG, "output sizes for format " + formats[i] +
+ " = ImageFormat." + getFormatName(formats[i]) + " = " +
+ ImageFormat.getBitsPerPixel(formats[i]) + " bits per pixel.");
+ Size[] sizes = map.getOutputSizes(formats[i]);
+ if (sizes != null) {
+ Log.v(TAG, " Size Stall duration Min frame duration");
+ for (int j = 0; j < sizes.length; j++) {
+ Log.v(TAG, String.format(" %10s %7d ms %7d ms \n",
+ sizes[j].toString(),
+ map.getOutputStallDuration(formats[i], sizes[j]) / 1000000,
+ map.getOutputMinFrameDuration(formats[i], sizes[j]) / 1000000
+ ));
+ }
+ }
+ }
+ }
+
+ if (mHasReprocessing) {
+ int[] formats = map.getInputFormats();
+ Log.v(TAG, "number of input formats: " + formats.length);
+ for (int i = 0; i < formats.length; i++) {
+ Size[] sizes = map.getInputSizes(formats[i]);
+ Log.v(TAG, "input sizes for format " + formats[i] + " = ImageFormat."
+ + getFormatName(formats[i]) + " are: " + sizesToString(sizes));
+ }
+ }
+
+ {
+ Size[] sizes = map.getOutputSizes(SurfaceHolder.class);
+ Log.v(TAG, "output sizes for SurfaceHolder.class are: " + sizesToString(sizes));
+ }
+
+ {
+ Size[] sizes = map.getOutputSizes(SurfaceTexture.class);
+ Log.v(TAG, "output sizes for SurfaceTexture.class are: " + sizesToString(sizes));
+ }
+
+ // JPEG thumbnail sizes
+ {
+ Size[] sizes = p.get(CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES);
+ Log.v(TAG, "JPEG thumbnail sizes: " + sizesToString(sizes));
+ }
+
+ // REQUEST HARDWARE LEVEL
+ {
+ int level = p.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
+ Log.v(TAG, "CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL: " + getHardwareLevelName(level));
+ }
+
+
+ // REQUEST CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES
+ {
+ Log.v(TAG, "CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES:");
+ for (Range<Integer> item : p.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)) {
+ Log.v(TAG, " " + item);
+ }
+ }
+ // SENSOR_INFO_EXPOSURE_TIME_RANGE
+ {
+ Range<Long> rr = p.get(CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE);
+ Log.v(TAG, "CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE: " + rr);
+ }
+
+
+ // CAPTURE REQUEST KEYS
+ {
+ String keys = "";
+ for (CaptureRequest.Key key : p.getAvailableCaptureRequestKeys()) {
+ keys += key.getName() + " ";
+ }
+ Log.v(TAG, "CameraCharacteristics.getAvailableCaptureRequestKeys() = " + keys);
+ }
+
+ // CAPTURE RESULT KEYS
+ {
+ String keys = "";
+ for (CaptureResult.Key key : p.getAvailableCaptureResultKeys()) {
+ keys += key.getName() + " ";
+ }
+ Log.v(TAG, "CameraCharacteristics.getAvailableCaptureResultKeys() = " + keys);
+ }
+
+ }
+
+ public static String sizesToString(Size[] sizes) {
+ String result = "";
+ if (sizes != null) {
+ for (int j = 0; j < sizes.length; j++) {
+ result += sizes[j].toString() + " ";
+ }
+ }
+ return result;
+ }
+
+ public static String intsToString(int[] modes) {
+ String result = "";
+ if (modes != null) {
+ for (int j = 0; j < modes.length; j++) {
+ result += modes[j] + " ";
+ }
+ }
+ return result;
+ }
+
+ public static String getTimestampSourceName(Integer level) {
+ if (level == null) return "null";
+ switch (level) {
+ case CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME:
+ return "SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME";
+ case CameraMetadata.SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN:
+ return "SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN";
+ }
+ return "Unknown";
+ }
+
+ public static String getFocusDistanceCalibrationName(Integer level) {
+ if (level == null) return "null";
+ switch (level) {
+ case CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE:
+ return "LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE";
+ case CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED:
+ return "LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED";
+ case CameraMetadata.LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED:
+ return "LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED";
+ }
+ return "Unknown";
+ }
+
+ public static String getCapabilityName(int format) {
+ switch (format) {
+ case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE:
+ return "REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE";
+ case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR:
+ return "REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR";
+ case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING:
+ return "REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING";
+ case CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW:
+ return "REQUEST_AVAILABLE_CAPABILITIES_RAW";
+ case 4:
+ return "REQUEST_AVAILABLE_CAPABILITIES_OPAQUE_REPROCESSING";
+ case 5:
+ return "REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS";
+ case 6:
+ return "REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE";
+ case 7:
+ return "REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING";
+ case 8:
+ return "REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT";
+ }
+ return "Unknown";
+ }
+
+ public static String getHardwareLevelName(int level) {
+ switch (level) {
+ case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_FULL:
+ return "INFO_SUPPORTED_HARDWARE_LEVEL_FULL";
+ case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED:
+ return "INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED";
+ case CameraMetadata.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY:
+ return "INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY";
+ }
+ return "Unknown";
+ }
+
+
+ public static String getFormatName(int format) {
+ switch (format) {
+ // Android M
+ //case ImageFormat.PRIVATE:
+ // return "PRIVATE";
+ // Android L
+ case ImageFormat.JPEG:
+ return "JPEG";
+ case ImageFormat.RGB_565:
+ return "RGB_565";
+ case ImageFormat.NV16:
+ return "NV16";
+ case ImageFormat.YUY2:
+ return "YUY2";
+ case ImageFormat.YV12:
+ return "YV12";
+ case ImageFormat.NV21:
+ return "NV21";
+ case ImageFormat.YUV_420_888:
+ return "YUV_420_888";
+ case ImageFormat.RAW_SENSOR:
+ return "RAW_SENSOR";
+ case ImageFormat.RAW10:
+ return "RAW10";
+ }
+ return "Unknown";
+ }
+
+
+ /**
+ * Print out various information about the device display.
+ */
+ private static void printDisplayInfo(Activity activity) {
+ Log.v(TAG, "============= DEVICE INFO =============");
+ Log.v(TAG, "Build.DEVICE = " + Build.DEVICE);
+ Log.v(TAG, "Build.FINGERPRINT = " + Build.FINGERPRINT);
+ Log.v(TAG, "Build.BRAND = " + Build.BRAND);
+ Log.v(TAG, "Build.MODEL = " + Build.MODEL);
+ Log.v(TAG, "Build.PRODUCT = " + Build.PRODUCT);
+ Log.v(TAG, "Build.MANUFACTURER = " + Build.MANUFACTURER);
+ Log.v(TAG, "Build.VERSION.CODENAME = " + Build.VERSION.CODENAME);
+ Log.v(TAG, "Build.VERSION.SDK_INT = " + Build.VERSION.SDK_INT);
+
+ Log.v(TAG, "============= DEVICE DISPLAY INFO =============");
+ WindowManager windowMgr = activity.getWindowManager();
+
+ // Nexus 5 is 360dp * 567dp
+ // Each dp is 3 hardware pixels
+ Log.v(TAG, "screen width dp = " + activity.getResources().getConfiguration().screenWidthDp);
+ Log.v(TAG, "screen height dp = " + activity.getResources().getConfiguration().screenHeightDp);
+
+ DisplayMetrics metrics = new DisplayMetrics();
+ // With chrome subtracted.
+ windowMgr.getDefaultDisplay().getMetrics(metrics);
+ Log.v(TAG, "screen width pixels = " + metrics.widthPixels);
+ Log.v(TAG, "screen height pixels = " + metrics.heightPixels);
+ // Native.
+ windowMgr.getDefaultDisplay().getRealMetrics(metrics);
+ Log.v(TAG, "real screen width pixels = " + metrics.widthPixels);
+ Log.v(TAG, "real screen height pixels = " + metrics.heightPixels);
+
+ Log.v(TAG, "refresh rate = " + windowMgr.getDefaultDisplay().getRefreshRate() + " Hz");
+ }
+
+
+
+}
diff --git a/src/com/google/snappy/MyLoggingCallbacks.java b/src/com/google/snappy/MyLoggingCallbacks.java
new file mode 100644
index 0000000..d95e635
--- /dev/null
+++ b/src/com/google/snappy/MyLoggingCallbacks.java
@@ -0,0 +1,123 @@
+package com.google.snappy;
+
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.util.Log;
+
+/**
+ * Static utility class that logs various camera2 callbacks.
+ *
+ * The only reason this exists as a separate class is void cluttering up MyApi2Camera.
+ */
+
+public class MyLoggingCallbacks {
+ private static final String TAG = "SNAPPY_LOG2";
+ private static final Boolean LOG_EVERY_FRAME = false;
+ private static final Boolean LOG_NON_ERRORS = false;
+
+ public static class DeviceStateCallback extends CameraDevice.StateCallback {
+ @Override
+ public void onOpened(CameraDevice camera) {
+ if (LOG_NON_ERRORS) {
+ Log.v(TAG, "Camera opened.");
+ }
+ }
+
+ @Override
+ public void onClosed(CameraDevice camera) {
+ if (LOG_NON_ERRORS) {
+ Log.v(TAG, "Camera closed.");
+ }
+ }
+
+ @Override
+ public void onDisconnected(CameraDevice camera) {
+ Log.v(TAG, "Camera disconnected.");
+ }
+
+ @Override
+ public void onError(CameraDevice camera, int error) {
+ Log.v(TAG, "Camera error: " + error);
+ }
+ }
+
+ public static class SessionStateCallback extends CameraCaptureSession.StateCallback {
+ @Override
+ public void onConfigured(CameraCaptureSession session) {
+ if (LOG_NON_ERRORS) {
+ Log.v(TAG, "Capture session callback onConfigured("+session+")");
+ }
+ }
+
+ @Override
+ public void onConfigureFailed(CameraCaptureSession session) {
+ Log.v(TAG, "Capture session callback onConfigureFailed("+session+")");
+ super.onReady(session);
+ }
+
+ @Override
+ public void onReady(CameraCaptureSession session) {
+ if (LOG_NON_ERRORS) {
+ Log.v(TAG, "Capture session callback onReady("+session+")");
+ }
+ super.onReady(session);
+ }
+
+ @Override
+ public void onActive(CameraCaptureSession session) {
+ if (LOG_NON_ERRORS) {
+ Log.v(TAG, "Capture session callback onActive("+session+")");
+ }
+ super.onActive(session);
+ }
+
+ @Override
+ public void onClosed(CameraCaptureSession session) {
+ if (LOG_NON_ERRORS) {
+ Log.v(TAG, "Capture session callback onClosed("+session+")");
+ }
+ super.onClosed(session);
+ }
+ }
+
+ public static class SessionCaptureCallback extends CameraCaptureSession.CaptureCallback {
+ @Override
+ public void onCaptureStarted(CameraCaptureSession session, CaptureRequest request, long timestamp, long frameNumber) {
+ if (LOG_EVERY_FRAME) {
+ Log.v(TAG, "Capture started.");
+ }
+ super.onCaptureStarted(session, request, timestamp, frameNumber);
+ }
+
+ @Override
+ public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) {
+ if (LOG_EVERY_FRAME) {
+ Log.v(TAG, "Capture progressed.");
+ }
+ super.onCaptureProgressed(session, request, partialResult);
+ }
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
+ if (LOG_EVERY_FRAME) {
+ Log.v(TAG, "Capture completed.");
+ }
+ super.onCaptureCompleted(session, request, result);
+ }
+
+ @Override
+ public void onCaptureFailed(CameraCaptureSession session, CaptureRequest request, CaptureFailure failure) {
+ super.onCaptureFailed(session, request, failure);
+ }
+
+ @Override
+ public void onCaptureSequenceCompleted(CameraCaptureSession session, int sequenceId, long frameNumber) {
+ super.onCaptureSequenceCompleted(session, sequenceId, frameNumber);
+ }
+
+ }
+}
diff --git a/src/com/google/snappy/MyTimer.java b/src/com/google/snappy/MyTimer.java
new file mode 100644
index 0000000..1460037
--- /dev/null
+++ b/src/com/google/snappy/MyTimer.java
@@ -0,0 +1,18 @@
+package com.google.snappy;
+
+/**
+ * A global spot to store some times.
+ */
+public class MyTimer {
+ // Got control in onCreate()
+ public static long t0;
+ // Sent open() to camera.
+ public static long t_open_start;
+ // Open from camera done.
+ public static long t_open_end;
+ // Told camera to configure capture session.
+ public static long t_session_go;
+ // Told session to do repeating request.
+ public static long t_burst;
+
+}
diff --git a/src/com/google/snappy/NormalizedFace.java b/src/com/google/snappy/NormalizedFace.java
new file mode 100644
index 0000000..42d890c
--- /dev/null
+++ b/src/com/google/snappy/NormalizedFace.java
@@ -0,0 +1,77 @@
+package com.google.snappy;
+
+import android.graphics.PointF;
+import android.graphics.RectF;
+import android.hardware.camera2.params.Face;
+
+/**
+ * Created by andyhuibers on 7/7/15.
+ *
+ * Face coordinates. Normalized 0 to 1, and in native sensor orientation, which so far seems to be
+ * landscape.
+ *
+ */
+public class NormalizedFace {
+ public RectF bounds;
+ public PointF leftEye;
+ public PointF rightEye;
+ public PointF mouth;
+
+ public NormalizedFace(Face face, int dX, int dY, int offX, int offY) {
+ if (face.getLeftEyePosition() != null) {
+ leftEye = new PointF();
+ leftEye.x = (float) (face.getLeftEyePosition().x - offX) / dX;
+ leftEye.y = (float) (face.getLeftEyePosition().y - offY) / dY;
+ }
+ if (face.getRightEyePosition() != null) {
+ rightEye = new PointF();
+ rightEye.x = (float) (face.getRightEyePosition().x - offX) / dX;
+ rightEye.y = (float) (face.getRightEyePosition().y - offY) / dY;
+ }
+ if (face.getMouthPosition() != null) {
+ mouth = new PointF();
+ mouth.x = (float) (face.getMouthPosition().x - offX) / dX;
+ mouth.y = (float) (face.getMouthPosition().y - offY) / dY;
+ }
+ if (face.getBounds() != null) {
+ bounds = new RectF();
+ bounds.left = (float) (face.getBounds().left - offX) / dX;
+ bounds.top = (float) (face.getBounds().top - offY) / dY;
+ bounds.right = (float) (face.getBounds().right - offX) / dX;
+ bounds.bottom = (float) (face.getBounds().bottom - offY) / dY;
+ }
+ }
+
+ public void mirrorInX() {
+ if (leftEye != null) {
+ leftEye.x = 1f - leftEye.x;
+ }
+ if (rightEye != null) {
+ rightEye.x = 1f - rightEye.x;
+ }
+ if (mouth != null) {
+ mouth.x = 1f - mouth.x;
+ }
+ float oldLeft = bounds.left;
+ bounds.left = 1f - bounds.right;
+ bounds.right = 1f - oldLeft;
+ }
+
+ /**
+ * Typically required for front camera
+ */
+ public void mirrorInY() {
+ if (leftEye != null) {
+ leftEye.y = 1f - leftEye.y;
+ }
+ if (rightEye != null) {
+ rightEye.y = 1f - rightEye.y;
+ }
+ if (mouth != null) {
+ mouth.y = 1f - mouth.y;
+ }
+ float oldTop = bounds.top;
+ bounds.top = 1f - bounds.bottom;
+ bounds.bottom = 1f - oldTop;
+ }
+}
diff --git a/src/com/google/snappy/PreviewOverlay.java b/src/com/google/snappy/PreviewOverlay.java
new file mode 100644
index 0000000..803d824
--- /dev/null
+++ b/src/com/google/snappy/PreviewOverlay.java
@@ -0,0 +1,216 @@
+package com.google.snappy;
+
+import android.content.Context;
+import android.content.res.Resources;
+import android.graphics.Canvas;
+import android.graphics.Paint;
+import android.graphics.PointF;
+import android.graphics.RectF;
+import android.util.AttributeSet;
+import android.view.View;
+
+/**
+ * Created by andyhuibers on 9/18/14.
+ */
+
+public class PreviewOverlay extends View {
+ private static final String TAG = "SNAPPY_FACE";
+
+ private boolean mShow3AInfo;
+ private boolean mShowGyroGrid;
+ private int mColor;
+ private int mColor2;
+ private Paint mPaint;
+ private Paint mPaint2;
+
+ // Rendered data:
+ private NormalizedFace[] mFaces;
+ private float mExposure;
+ private float mLens;
+ private int mAfState;
+ private float mFovLargeDegrees;
+ private float mFovSmallDegrees;
+ float[] mAngles = new float[2];
+
+ public PreviewOverlay(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ Resources res = getResources();
+ mColor = res.getColor(R.color.face_color);
+ mPaint = new Paint();
+ mPaint.setColor(mColor);
+ mPaint.setAntiAlias(true);
+ mPaint.setStyle(Paint.Style.STROKE);
+ mPaint.setStrokeWidth(res.getDimension(R.dimen.face_circle_stroke));
+
+ mColor2 = res.getColor(R.color.hud_color);
+ mPaint2 = new Paint();
+ mPaint2.setAntiAlias(true);
+ mPaint2.setStyle(Paint.Style.STROKE);
+ mPaint2.setStrokeWidth(res.getDimension(R.dimen.hud_stroke));
+ }
+
+ public void setFrameData(NormalizedFace[] faces, float normExposure, float normLens, int afState) {
+ mFaces = faces;
+ mExposure = normExposure;
+ mLens = normLens;
+ mAfState = afState;
+ this.setVisibility(VISIBLE);
+ invalidate();
+ }
+
+ public void show3AInfo(boolean show) {
+ mShow3AInfo = show;
+ this.setVisibility(VISIBLE);
+ invalidate();
+ }
+
+ public void setGyroAngles(float[] angles) {
+ mAngles = angles;
+ }
+
+ public void setFieldOfView(float fovLargeDegrees, float fovSmallDegrees) {
+ mFovLargeDegrees = fovLargeDegrees;
+ mFovSmallDegrees = fovSmallDegrees;
+ }
+
+ public void showGyroGrid(boolean show) {
+ mShowGyroGrid = show;
+ this.setVisibility(VISIBLE);
+ invalidate();
+ }
+
+ private static double SHORT_LOG_EXPOSURE = Math.log10(1000000000 / 10000); // 1/10000 second
+ private static double LONG_LOG_EXPOSURE = Math.log10(1000000000 / 10); // 1/10 second
+ float[] yGridValues = new float[] {
+ (float) ((Math.log10(1000000000 / 30) - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE)),
+ (float) ((Math.log10(1000000000 / 100) - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE)),
+ (float) ((Math.log10(1000000000 / 1000) - SHORT_LOG_EXPOSURE) / (LONG_LOG_EXPOSURE - SHORT_LOG_EXPOSURE))};
+
+ /** Focus states
+ CONTROL_AF_STATE_INACTIVE 0
+ CONTROL_AF_STATE_PASSIVE_SCAN 1
+ CONTROL_AF_STATE_PASSIVE_FOCUSED 2
+ CONTROL_AF_STATE_ACTIVE_SCAN 3
+ CONTROL_AF_STATE_FOCUSED_LOCKED 4
+ CONTROL_AF_STATE_NOT_FOCUSED_LOCKED 5
+ CONTROL_AF_STATE_PASSIVE_UNFOCUSED 6
+ */
+
+ @Override
+ protected void onDraw(Canvas canvas) {
+ if (mFaces == null) {
+ return;
+ }
+ float previewW = this.getWidth();
+ float previewH = this.getHeight();
+
+ // 3A visualizatoins
+ if (mShow3AInfo) {
+
+ // Draw 3A ball on a rail
+ if (false) {
+ mPaint2.setStyle(Paint.Style.FILL_AND_STROKE);
+ mPaint2.setColor(0x33FFFFFF);
+ canvas.drawRect(0.04f * previewW, 0.03f * previewH, 0.96f * previewW, 0.05f * previewH, mPaint2);
+
+ mPaint2.setStyle(Paint.Style.FILL_AND_STROKE);
+ float x1 = (0.92f * mLens + 0.04f) * previewW;
+ float y1 = (0.04f) * previewH;
+ mPaint2.setColor(0xFF000000);
+ canvas.drawCircle(x1, y1, 20, mPaint2);
+ mPaint2.setColor(0xFFDDDDDD);
+ canvas.drawCircle(x1, y1, 18, mPaint2);
+ }
+
+ // Draw AF center thing
+ mPaint2.setStyle(Paint.Style.FILL_AND_STROKE);
+ float x2 = 0.5f * previewW;
+ float y2 = 0.5f * previewH;
+ mPaint2.setColor(0x990000FF);
+ String text = "NOT IN CAF";
+ if (mAfState == 1) { // passive scan RED
+ mPaint2.setColor(0x99FF0000);
+ text = "CAF SCAN";
+ }
+ if (mAfState == 2) { // passive good
+ mPaint2.setColor(0x9999FF99);
+ text = "CAF FOCUSED";
+ }
+ if (mAfState == 6) { // passive bad
+ mPaint2.setColor(0x99FFFFFF);
+ text = "CAF UNFOCUSED";
+ }
+ canvas.drawCircle(x2, y2, mLens * 0.25f * previewW, mPaint2);
+ mPaint.setColor(0xFFFFFFFF);
+ mPaint.setTextSize(36f);
+ canvas.drawText(text, x2, y2 - mLens * 0.25f * previewW - 7f, mPaint);
+ }
+
+ // Draw Faces
+ for (NormalizedFace face : mFaces) {
+ RectF r1 = face.bounds;
+ float newY = r1.centerX() * previewH;
+ float newX = (1 - r1.centerY()) * previewW;
+ float dY = r1.width() * previewH;
+ float dX = r1.height() * previewW;
+ float dP = (dX + dY) * 0.045f;
+ RectF newR1 = new RectF(newX - dX * 0.5f, newY - dY * 0.5f, newX + dX * 0.5f, newY + dY * 0.5f);
+ canvas.drawRoundRect(newR1, dP, dP, mPaint);
+
+ PointF[] p = new PointF[3];
+ p[0] = face.leftEye;
+ p[1] = face.rightEye;
+ p[2] = face.mouth;
+
+ for (int j = 0; j < 3; j++) {
+ if (p[j] == null) {
+ continue;
+ }
+ newY = p[j].x * previewH;
+ newX = (1 - p[j].y) * previewW;
+ canvas.drawCircle(newX, newY, dP, mPaint);
+ }
+ }
+
+ // Draw Gyro grid.
+ if (mShowGyroGrid) {
+ float x1, x2, y1, y2;
+
+ //
+ // screen/sensor
+ // |
+ // screen/2 = FL tan(FOV/2) |
+ // | lens
+ // |<––––––––––––– FL –––––––––––>()–––––––––> scene @ infinity
+ // |
+ // |
+ // |
+ //
+
+ float focalLengthH = 0.5f * previewH / (float) Math.tan(Math.toRadians(mFovLargeDegrees) * 0.5);
+ float focalLengthW = 0.5f * previewW / (float) Math.tan(Math.toRadians(mFovSmallDegrees) * 0.5);
+ final double ANGLE_STEP = (float) Math.toRadians(10f);
+
+ // Draw horizontal lines, with 10 degree spacing.
+ double phase1 = mAngles[0] % ANGLE_STEP;
+ for (double i = -5 * ANGLE_STEP + phase1; i < 5 * ANGLE_STEP; i += ANGLE_STEP) {
+ x1 = 0;
+ x2 = previewW;
+ y1 = y2 = previewH / 2 + focalLengthH * (float) Math.tan(i);
+ canvas.drawLine(x1, y1, x2, y2, mPaint);
+ }
+ // Draw vertical lines, with 10 degree spacing.
+ double phase2 = mAngles[1] % ANGLE_STEP;
+ for (double i = -5 * ANGLE_STEP + phase2; i < 5 * ANGLE_STEP; i += ANGLE_STEP) {
+ x1 = x2 = previewW / 2 + focalLengthW * (float) Math.tan(i);
+ y1 = 0;
+ y2 = previewH;
+ canvas.drawLine(x1, y1, x2, y2, mPaint);
+ }
+ }
+
+ super.onDraw(canvas);
+ }
+}
+
+