summaryrefslogtreecommitdiff
path: root/devices
diff options
context:
space:
mode:
authorJayant Chowdhary <jchowdhary@google.com>2021-03-24 21:37:03 +0000
committerAndroid (Google) Code Review <android-gerrit@google.com>2021-03-24 21:37:03 +0000
commit9c4061fbaf64e7e54ab82fd59df0a6f9a16a4459 (patch)
tree77a7daefcab6443e129fff7a0d90c5d4ddb84a56 /devices
parent4e3f7a629b3a3b4960a2998dbc2d1ff0c6610edb (diff)
parent5b01dbff7d31d7e6d5e400276545ce0c887a84db (diff)
downloadcamera-9c4061fbaf64e7e54ab82fd59df0a6f9a16a4459.tar.gz
Merge "EmulatedCamera: Implement pixel binning." into sc-dev
Diffstat (limited to 'devices')
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp10
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h2
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp57
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h8
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp14
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedRequestState.cpp31
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedRequestState.h3
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedSensor.cpp349
-rw-r--r--devices/EmulatedCamera/hwl/EmulatedSensor.h47
-rw-r--r--devices/EmulatedCamera/hwl/configs/emu_camera_front.json241
-rw-r--r--devices/EmulatedCamera/hwl/utils/ExifUtils.cpp3
-rw-r--r--devices/EmulatedCamera/hwl/utils/HWLUtils.cpp10
-rw-r--r--devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp83
-rw-r--r--devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h3
14 files changed, 755 insertions, 106 deletions
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp
index a95e5c3..f6ffaee 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.cpp
@@ -77,6 +77,9 @@ status_t EmulatedCameraDeviceHwlImpl::Initialize() {
stream_configuration_map_ =
std::make_unique<StreamConfigurationMap>(*static_metadata_);
+ stream_configuration_map_max_resolution_ =
+ std::make_unique<StreamConfigurationMap>(*static_metadata_,
+ /*maxResolution*/ true);
for (const auto& it : *physical_device_map_) {
uint32_t physical_id = it.first;
@@ -84,6 +87,9 @@ status_t EmulatedCameraDeviceHwlImpl::Initialize() {
physical_stream_configuration_map_.emplace(
physical_id,
std::make_unique<StreamConfigurationMap>(*physical_hal_metadata));
+ physical_stream_configuration_map_max_resolution_.emplace(
+ physical_id, std::make_unique<StreamConfigurationMap>(
+ *physical_hal_metadata, /*maxResolution*/ true));
ret = GetSensorCharacteristics(physical_hal_metadata,
&sensor_chars_[physical_id]);
@@ -181,7 +187,9 @@ bool EmulatedCameraDeviceHwlImpl::IsStreamCombinationSupported(
const StreamConfiguration& stream_config) {
return EmulatedSensor::IsStreamCombinationSupported(
camera_id_, stream_config, *stream_configuration_map_,
- physical_stream_configuration_map_, sensor_chars_);
+ *stream_configuration_map_max_resolution_,
+ physical_stream_configuration_map_,
+ physical_stream_configuration_map_max_resolution_, sensor_chars_);
}
} // namespace android
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h
index 8081171..c88b804 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceHWLImpl.h
@@ -81,7 +81,9 @@ class EmulatedCameraDeviceHwlImpl : public CameraDeviceHwl {
std::unique_ptr<HalCameraMetadata> static_metadata_;
std::unique_ptr<StreamConfigurationMap> stream_configuration_map_;
+ std::unique_ptr<StreamConfigurationMap> stream_configuration_map_max_resolution_;
PhysicalStreamConfigurationMap physical_stream_configuration_map_;
+ PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution_;
PhysicalDeviceMapPtr physical_device_map_;
std::shared_ptr<EmulatedTorchState> torch_state_;
LogicalCharacteristics sensor_chars_;
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp
index d7d163b..2643c7d 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp
@@ -63,7 +63,8 @@ status_t EmulatedCameraDeviceSessionHwlImpl::Initialize(
static_metadata_ = std::move(static_meta);
stream_configuration_map_ =
std::make_unique<StreamConfigurationMap>(*static_metadata_);
-
+ stream_configuration_map_max_resolution_ =
+ std::make_unique<StreamConfigurationMap>(*static_metadata_, true);
camera_metadata_ro_entry_t entry;
auto ret = static_metadata_->Get(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &entry);
if (ret != OK) {
@@ -96,6 +97,9 @@ status_t EmulatedCameraDeviceSessionHwlImpl::Initialize(
physical_stream_configuration_map_.emplace(
it.first,
std::make_unique<StreamConfigurationMap>(*it.second.second.get()));
+ physical_stream_configuration_map_max_resolution_.emplace(
+ it.first, std::make_unique<StreamConfigurationMap>(
+ *it.second.second.get(), true));
}
return InitializeRequestProcessor();
@@ -152,9 +156,10 @@ status_t EmulatedCameraDeviceSessionHwlImpl::ConfigurePipeline(
if (!EmulatedSensor::IsStreamCombinationSupported(
physical_camera_id, request_config, *stream_configuration_map_,
- physical_stream_configuration_map_, logical_chars_)) {
- ALOGE("%s: Stream combination not supported for camera %d!", __FUNCTION__,
- physical_camera_id);
+ *stream_configuration_map_max_resolution_,
+ physical_stream_configuration_map_,
+ physical_stream_configuration_map_max_resolution_, logical_chars_)) {
+ ALOGE("%s: Stream combination not supported!", __FUNCTION__);
return BAD_VALUE;
}
@@ -275,6 +280,28 @@ void EmulatedCameraDeviceSessionHwlImpl::DestroyPipelines() {
request_processor_ = nullptr;
}
+status_t EmulatedCameraDeviceSessionHwlImpl::CheckOutputFormatsForInput(
+ const HwlPipelineRequest& request,
+ const std::unordered_map<uint32_t, EmulatedStream>& streams,
+ const std::unique_ptr<StreamConfigurationMap>& stream_configuration_map,
+ android_pixel_format_t input_format) {
+ auto output_formats =
+ stream_configuration_map->GetValidOutputFormatsForInput(input_format);
+ for (const auto& output_buffer : request.output_buffers) {
+ auto output_stream = streams.at(output_buffer.stream_id);
+ if (output_formats.find(output_stream.override_format) ==
+ output_formats.end()) {
+ ALOGE(
+ "%s: Reprocess request with input format: 0x%x to output "
+ "format: 0x%x"
+ " not supported!",
+ __FUNCTION__, input_format, output_stream.override_format);
+ return BAD_VALUE;
+ }
+ }
+ return OK;
+}
+
status_t EmulatedCameraDeviceSessionHwlImpl::SubmitRequests(
uint32_t frame_number, std::vector<HwlPipelineRequest>& requests) {
ATRACE_CALL();
@@ -286,21 +313,13 @@ status_t EmulatedCameraDeviceSessionHwlImpl::SubmitRequests(
for (const auto& input_buffer : request.input_buffers) {
const auto& streams = pipelines_[request.pipeline_id].streams;
auto input_stream = streams.at(input_buffer.stream_id);
- auto output_formats =
- stream_configuration_map_->GetValidOutputFormatsForInput(
- input_stream.override_format);
- for (const auto& output_buffer : request.output_buffers) {
- auto output_stream = streams.at(output_buffer.stream_id);
- if (output_formats.find(output_stream.override_format) ==
- output_formats.end()) {
- ALOGE(
- "%s: Reprocess request with input format: 0x%x to output "
- "format: 0x%x"
- " not supported!",
- __FUNCTION__, input_stream.override_format,
- output_stream.override_format);
- return BAD_VALUE;
- }
+ if ((CheckOutputFormatsForInput(request, streams,
+ stream_configuration_map_,
+ input_stream.override_format) != OK) &&
+ (CheckOutputFormatsForInput(
+ request, streams, stream_configuration_map_max_resolution_,
+ input_stream.override_format) != OK)) {
+ return BAD_VALUE;
}
}
}
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h
index 425c3e9..e637e46 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h
@@ -149,6 +149,12 @@ class EmulatedCameraDeviceSessionHwlImpl : public CameraDeviceSessionHwl {
std::unique_ptr<HalCameraMetadata> static_meta);
status_t InitializeRequestProcessor();
+ status_t CheckOutputFormatsForInput(
+ const HwlPipelineRequest& request,
+ const std::unordered_map<uint32_t, EmulatedStream>& streams,
+ const std::unique_ptr<StreamConfigurationMap>& stream_configuration_map,
+ android_pixel_format_t input_format);
+
EmulatedCameraDeviceSessionHwlImpl(
PhysicalDeviceMapPtr physical_devices,
std::shared_ptr<EmulatedTorchState> torch_state)
@@ -168,6 +174,8 @@ class EmulatedCameraDeviceSessionHwlImpl : public CameraDeviceSessionHwl {
std::unique_ptr<EmulatedRequestProcessor> request_processor_;
std::unique_ptr<StreamConfigurationMap> stream_configuration_map_;
PhysicalStreamConfigurationMap physical_stream_configuration_map_;
+ PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution_;
+ std::unique_ptr<StreamConfigurationMap> stream_configuration_map_max_resolution_;
SensorCharacteristics sensor_chars_;
std::shared_ptr<EmulatedTorchState> torch_state_;
PhysicalDeviceMapPtr physical_device_map_;
diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp b/devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp
index 4e66325..fdb92cc 100644
--- a/devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedCameraProviderHWLImpl.cpp
@@ -288,6 +288,9 @@ status_t EmulatedCameraProviderHwlImpl::IsConcurrentStreamCombinationSupported(
auto stream_configuration_map = std::make_unique<StreamConfigurationMap>(
*(static_metadata_[config.camera_id]));
+ auto stream_configuration_map_max_resolution =
+ std::make_unique<StreamConfigurationMap>(
+ *(static_metadata_[config.camera_id]), /*maxResolution*/ true);
LogicalCharacteristics sensor_chars;
status_t ret =
@@ -300,6 +303,7 @@ status_t EmulatedCameraProviderHwlImpl::IsConcurrentStreamCombinationSupported(
}
PhysicalStreamConfigurationMap physical_stream_configuration_map;
+ PhysicalStreamConfigurationMap physical_stream_configuration_map_max_resolution;
auto const& physicalCameraInfo = camera_id_map_[config.camera_id];
for (size_t i = 0; i < physicalCameraInfo.size(); i++) {
uint32_t physical_camera_id = physicalCameraInfo[i].second;
@@ -307,6 +311,11 @@ status_t EmulatedCameraProviderHwlImpl::IsConcurrentStreamCombinationSupported(
physical_camera_id, std::make_unique<StreamConfigurationMap>(
*(static_metadata_[physical_camera_id])));
+ physical_stream_configuration_map_max_resolution.emplace(
+ physical_camera_id,
+ std::make_unique<StreamConfigurationMap>(
+ *(static_metadata_[physical_camera_id]), /*maxResolution*/ true));
+
ret = GetSensorCharacteristics(static_metadata_[physical_camera_id].get(),
&sensor_chars[physical_camera_id]);
if (ret != OK) {
@@ -318,8 +327,9 @@ status_t EmulatedCameraProviderHwlImpl::IsConcurrentStreamCombinationSupported(
if (!EmulatedSensor::IsStreamCombinationSupported(
config.camera_id, config.stream_configuration,
- *stream_configuration_map, physical_stream_configuration_map,
- sensor_chars)) {
+ *stream_configuration_map, *stream_configuration_map_max_resolution,
+ physical_stream_configuration_map,
+ physical_stream_configuration_map_max_resolution, sensor_chars)) {
return OK;
}
}
diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp b/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp
index 88064e8..0b01bb7 100644
--- a/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp
@@ -40,7 +40,8 @@ const std::set<uint8_t> EmulatedRequestState::kSupportedCapabilites = {
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING,
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_LOGICAL_MULTI_CAMERA,
-};
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING,
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR};
const std::set<uint8_t> EmulatedRequestState::kSupportedHWLevels = {
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
@@ -623,6 +624,17 @@ status_t EmulatedRequestState::InitializeSensorSettings(
}
}
+ ret = request_settings_->Get(ANDROID_SENSOR_PIXEL_MODE, &entry);
+ if ((ret == OK) && (entry.count == 1)) {
+ if (available_sensor_pixel_modes_.find(entry.data.u8[0]) !=
+ available_sensor_pixel_modes_.end()) {
+ sensor_pixel_mode_ = entry.data.u8[0];
+ } else {
+ ALOGE("%s: Unsupported control sensor pixel mode!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ }
+
ret = request_settings_->Get(ANDROID_CONTROL_SCENE_MODE, &entry);
if ((ret == OK) && (entry.count == 1)) {
// Disabled scene is not expected to be among the available scene list
@@ -790,6 +802,7 @@ status_t EmulatedRequestState::InitializeSensorSettings(
sensor_settings->video_stab = vstab_mode;
sensor_settings->report_edge_mode = report_edge_mode_;
sensor_settings->edge_mode = edge_mode;
+ sensor_settings->sensor_pixel_mode = sensor_pixel_mode_;
return OK;
}
@@ -808,6 +821,9 @@ std::unique_ptr<HwlPipelineResult> EmulatedRequestState::InitializeResult(
result->result_metadata->Set(ANDROID_REQUEST_PIPELINE_DEPTH,
&max_pipeline_depth_, 1);
result->result_metadata->Set(ANDROID_CONTROL_MODE, &control_mode_, 1);
+ result->result_metadata->Set(ANDROID_SENSOR_PIXEL_MODE, &sensor_pixel_mode_,
+ 1);
+
result->result_metadata->Set(ANDROID_CONTROL_AF_MODE, &af_mode_, 1);
result->result_metadata->Set(ANDROID_CONTROL_AF_STATE, &af_state_, 1);
result->result_metadata->Set(ANDROID_CONTROL_AWB_MODE, &awb_mode_, 1);
@@ -1574,6 +1590,14 @@ status_t EmulatedRequestState::InitializeControlDefaults() {
return BAD_VALUE;
}
+ available_sensor_pixel_modes_.insert(ANDROID_SENSOR_PIXEL_MODE_DEFAULT);
+
+ if (SupportsCapability(
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR)) {
+ available_sensor_pixel_modes_.insert(
+ ANDROID_SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION);
+ }
+
// Auto mode must always be present
if (available_control_modes_.find(ANDROID_CONTROL_MODE_AUTO) ==
available_control_modes_.end()) {
@@ -2647,7 +2671,8 @@ status_t EmulatedRequestState::InitializeInfoDefaults() {
}
status_t EmulatedRequestState::InitializeReprocessDefaults() {
- if (supports_private_reprocessing_ || supports_yuv_reprocessing_) {
+ if (supports_private_reprocessing_ || supports_yuv_reprocessing_ ||
+ supports_remosaic_reprocessing_) {
StreamConfigurationMap config_map(*static_metadata_);
if (!config_map.SupportsReprocessing()) {
ALOGE(
@@ -2748,6 +2773,8 @@ status_t EmulatedRequestState::InitializeRequestDefaults() {
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
supports_yuv_reprocessing_ = SupportsCapability(
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
+ supports_remosaic_reprocessing_ = SupportsCapability(
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_REMOSAIC_REPROCESSING);
is_backward_compatible_ = SupportsCapability(
ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
is_raw_capable_ =
diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestState.h b/devices/EmulatedCamera/hwl/EmulatedRequestState.h
index 42f8639..1591422 100644
--- a/devices/EmulatedCamera/hwl/EmulatedRequestState.h
+++ b/devices/EmulatedCamera/hwl/EmulatedRequestState.h
@@ -127,6 +127,7 @@ class EmulatedRequestState {
bool is_raw_capable_ = false;
bool supports_private_reprocessing_ = false;
bool supports_yuv_reprocessing_ = false;
+ bool supports_remosaic_reprocessing_ = false;
// android.control.*
struct SceneOverride {
@@ -173,6 +174,7 @@ class EmulatedRequestState {
std::set<uint8_t> available_antibanding_modes_;
std::set<uint8_t> available_effects_;
std::set<uint8_t> available_vstab_modes_;
+ std::set<uint8_t> available_sensor_pixel_modes_;
std::vector<ExtendedSceneModeCapability> available_extended_scene_mode_caps_;
std::unordered_map<uint8_t, SceneOverride> scene_overrides_;
std::vector<FPSRange> available_fps_ranges_;
@@ -190,6 +192,7 @@ class EmulatedRequestState {
size_t max_awb_regions_ = 0;
size_t max_af_regions_ = 0;
uint8_t control_mode_ = ANDROID_CONTROL_MODE_AUTO;
+ uint8_t sensor_pixel_mode_ = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
uint8_t scene_mode_ = ANDROID_CONTROL_SCENE_MODE_DISABLED;
uint8_t ae_mode_ = ANDROID_CONTROL_AE_MODE_ON;
uint8_t awb_mode_ = ANDROID_CONTROL_AWB_MODE_AUTO;
diff --git a/devices/EmulatedCamera/hwl/EmulatedSensor.cpp b/devices/EmulatedCamera/hwl/EmulatedSensor.cpp
index c1ca8dc..c31878e 100644
--- a/devices/EmulatedCamera/hwl/EmulatedSensor.cpp
+++ b/devices/EmulatedCamera/hwl/EmulatedSensor.cpp
@@ -154,6 +154,13 @@ bool EmulatedSensor::AreCharacteristicsSupported(
return false;
}
+ if ((characteristics.full_res_width == 0) ||
+ (characteristics.full_res_height == 0)) {
+ ALOGE("%s: Invalid sensor full res size %zux%zu", __FUNCTION__,
+ characteristics.full_res_width, characteristics.full_res_height);
+ return false;
+ }
+
if ((characteristics.exposure_time_range[0] >=
characteristics.exposure_time_range[1]) ||
((characteristics.exposure_time_range[0] < kSupportedExposureTimeRange[0]) ||
@@ -244,11 +251,63 @@ bool EmulatedSensor::AreCharacteristicsSupported(
return true;
}
+static void SplitStreamCombination(
+ const StreamConfiguration& original_config,
+ StreamConfiguration* default_mode_config,
+ StreamConfiguration* max_resolution_mode_config,
+ StreamConfiguration* input_stream_config) {
+ // Go through the streams
+ if (default_mode_config == nullptr || max_resolution_mode_config == nullptr ||
+ input_stream_config == nullptr) {
+ ALOGE("%s: Input stream / output stream configs are nullptr", __FUNCTION__);
+ return;
+ }
+ for (const auto& stream : original_config.streams) {
+ if (stream.stream_type == google_camera_hal::StreamType::kInput) {
+ input_stream_config->streams.push_back(stream);
+ continue;
+ }
+ if (stream.used_in_default_resolution_mode) {
+ default_mode_config->streams.push_back(stream);
+ }
+ if (stream.used_in_max_resolution_mode) {
+ max_resolution_mode_config->streams.push_back(stream);
+ }
+ }
+}
+
bool EmulatedSensor::IsStreamCombinationSupported(
uint32_t logical_id, const StreamConfiguration& config,
- StreamConfigurationMap& map,
+ StreamConfigurationMap& default_config_map,
+ StreamConfigurationMap& max_resolution_config_map,
const PhysicalStreamConfigurationMap& physical_map,
+ const PhysicalStreamConfigurationMap& physical_map_max_resolution,
const LogicalCharacteristics& sensor_chars) {
+ StreamConfiguration default_mode_config, max_resolution_mode_config,
+ input_stream_config;
+ SplitStreamCombination(config, &default_mode_config,
+ &max_resolution_mode_config, &input_stream_config);
+
+ return IsStreamCombinationSupported(logical_id, default_mode_config,
+ default_config_map, physical_map,
+ sensor_chars) &&
+ IsStreamCombinationSupported(
+ logical_id, max_resolution_mode_config, max_resolution_config_map,
+ physical_map_max_resolution, sensor_chars, /*is_max_res*/ true) &&
+
+ (IsStreamCombinationSupported(logical_id, input_stream_config,
+ default_config_map, physical_map,
+ sensor_chars) ||
+ IsStreamCombinationSupported(
+ logical_id, input_stream_config, max_resolution_config_map,
+ physical_map_max_resolution, sensor_chars, /*is_max_res*/ true));
+}
+
+bool EmulatedSensor::IsStreamCombinationSupported(
+ uint32_t logical_id, const StreamConfiguration& config,
+ StreamConfigurationMap& config_map,
+ const PhysicalStreamConfigurationMap& physical_map,
+ const LogicalCharacteristics& sensor_chars, bool is_max_res) {
uint32_t input_stream_count = 0;
// Map from physical camera id to number of streams for that physical camera
std::map<uint32_t, uint32_t> raw_stream_count;
@@ -274,7 +333,7 @@ bool EmulatedSensor::IsStreamCombinationSupported(
}
auto const& supported_outputs =
- map.GetValidOutputFormatsForInput(stream.format);
+ config_map.GetValidOutputFormatsForInput(stream.format);
if (supported_outputs.empty()) {
ALOGE("%s: Input stream with format: 0x%x no supported on this device!",
__FUNCTION__, stream.format);
@@ -323,8 +382,10 @@ bool EmulatedSensor::IsStreamCombinationSupported(
stream.is_physical_camera_stream
? sensor_chars.at(stream.physical_camera_id)
: sensor_chars.at(logical_id);
- auto sensor_height = sensor_char.height;
- auto sensor_width = sensor_char.width;
+ auto sensor_height =
+ is_max_res ? sensor_char.full_res_height : sensor_char.height;
+ auto sensor_width =
+ is_max_res ? sensor_char.full_res_width : sensor_char.width;
if (stream.height != sensor_height || stream.width != sensor_width) {
ALOGE(
"%s, RAW16 buffer height %d and width %d must match sensor "
@@ -359,7 +420,7 @@ bool EmulatedSensor::IsStreamCombinationSupported(
: stream.is_physical_camera_stream
? physical_map.at(stream.physical_camera_id)
->GetOutputSizes(stream.format)
- : map.GetOutputSizes(stream.format);
+ : config_map.GetOutputSizes(stream.format);
auto stream_size = std::make_pair(stream.width, stream.height);
if (output_sizes.find(stream_size) == output_sizes.end()) {
@@ -371,10 +432,14 @@ bool EmulatedSensor::IsStreamCombinationSupported(
}
for (const auto& raw_count : raw_stream_count) {
- if (raw_count.second > sensor_chars.at(raw_count.first).max_raw_streams) {
+ unsigned int max_raw_streams =
+ sensor_chars.at(raw_count.first).max_raw_streams +
+ (is_max_res
+ ? 1
+ : 0); // The extra raw stream is allowed for remosaic reprocessing.
+ if (raw_count.second > max_raw_streams) {
ALOGE("%s: RAW streams maximum %u exceeds supported maximum %u",
- __FUNCTION__, raw_count.second,
- sensor_chars.at(raw_count.first).max_raw_streams);
+ __FUNCTION__, raw_count.second, max_raw_streams);
return false;
}
}
@@ -439,7 +504,7 @@ status_t EmulatedSensor::StartUp(
logical_camera_id_ = logical_camera_id;
scene_ = new EmulatedScene(
- device_chars->second.width, device_chars->second.height,
+ device_chars->second.full_res_width, device_chars->second.full_res_height,
kElectronsPerLuxSecond, device_chars->second.orientation,
device_chars->second.is_front_facing);
scene_->InitializeSensorQueue();
@@ -578,16 +643,14 @@ bool EmulatedSensor::threadLoop() {
*/
next_capture_time_ = frame_end_real_time;
+ sensor_binning_factor_info_.clear();
+
bool reprocess_request = false;
if ((next_input_buffer.get() != nullptr) && (!next_input_buffer->empty())) {
if (next_input_buffer->size() > 1) {
ALOGW("%s: Reprocess supports only single input!", __FUNCTION__);
}
- if (next_input_buffer->at(0)->format != PixelFormat::YCBCR_420_888) {
- ALOGE(
- "%s: Reprocess input format: 0x%x not supported! Skipping reprocess!",
- __FUNCTION__, next_input_buffer->at(0)->format);
- } else {
+
camera_metadata_ro_entry_t entry;
auto ret =
next_result->result_metadata->Get(ANDROID_SENSOR_TIMESTAMP, &entry);
@@ -598,7 +661,6 @@ bool EmulatedSensor::threadLoop() {
}
reprocess_request = true;
- }
}
if ((next_buffers != nullptr) && (settings != nullptr)) {
@@ -629,12 +691,16 @@ bool EmulatedSensor::threadLoop() {
continue;
}
+ sensor_binning_factor_info_[(*b)->camera_id].quad_bayer_sensor =
+ device_chars->second.quad_bayer_sensor;
+
ALOGVV("Starting next capture: Exposure: %" PRIu64 " ms, gain: %d",
ns2ms(device_settings->second.exposure_time),
device_settings->second.gain);
- scene_->Initialize(device_chars->second.width,
- device_chars->second.height, kElectronsPerLuxSecond);
+ scene_->Initialize(device_chars->second.full_res_width,
+ device_chars->second.full_res_height,
+ kElectronsPerLuxSecond);
scene_->SetExposureDuration((float)device_settings->second.exposure_time /
1e9);
scene_->SetColorFilterXYZ(device_chars->second.color_filter.rX,
@@ -655,15 +721,52 @@ bool EmulatedSensor::threadLoop() {
scene_->CalculateScene(next_capture_time_, handshake_divider);
(*b)->stream_buffer.status = BufferStatus::kOk;
+ bool max_res_mode = device_settings->second.sensor_pixel_mode;
+ sensor_binning_factor_info_[(*b)->camera_id].max_res_request =
+ max_res_mode;
+ switch ((*b)->format) {
+ case PixelFormat::RAW16:
+ sensor_binning_factor_info_[(*b)->camera_id].has_raw_stream = true;
+ break;
+ default:
+ sensor_binning_factor_info_[(*b)->camera_id].has_non_raw_stream = true;
+ }
+
switch ((*b)->format) {
case PixelFormat::RAW16:
if (!reprocess_request) {
- CaptureRaw((*b)->plane.img.img, device_settings->second.gain,
- device_chars->second);
+ if (device_chars->second.quad_bayer_sensor && !max_res_mode) {
+ CaptureRawBinned((*b)->plane.img.img, device_settings->second.gain,
+ device_chars->second);
+ } else {
+ CaptureRawFullRes((*b)->plane.img.img,
+ device_settings->second.gain,
+ device_chars->second);
+ }
} else {
- ALOGE("%s: Reprocess requests with output format %x no supported!",
+ if (!device_chars->second.quad_bayer_sensor) {
+ ALOGE(
+ "%s: Reprocess requests with output format %x no supported!",
__FUNCTION__, (*b)->format);
- (*b)->stream_buffer.status = BufferStatus::kError;
+ (*b)->stream_buffer.status = BufferStatus::kError;
+ break;
+ }
+ // Remosaic the RAW input buffer
+ if ((*next_input_buffer->begin())->width != (*b)->width ||
+ (*next_input_buffer->begin())->height != (*b)->height) {
+ ALOGE(
+ "%s: RAW16 input dimensions %dx%d don't match output buffer "
+ "dimensions %dx%d",
+ __FUNCTION__, (*next_input_buffer->begin())->width,
+ (*next_input_buffer->begin())->height, (*b)->width,
+ (*b)->height);
+ (*b)->stream_buffer.status = BufferStatus::kError;
+ break;
+ }
+ ALOGV("%s remosaic Raw16 Image", __FUNCTION__);
+ RemosaicRAW16Image(
+ (uint16_t*)(*next_input_buffer->begin())->plane.img.img,
+ (uint16_t*)(*b)->plane.img.img, device_chars->second);
}
break;
case PixelFormat::RGB_888:
@@ -839,7 +942,8 @@ bool EmulatedSensor::threadLoop() {
// the occasional bump during 'ReturnResults' should not have any
// noticeable effect.
if ((work_done_real_time + kReturnResultThreshod) > frame_end_real_time) {
- ReturnResults(callback, std::move(settings), std::move(next_result));
+ ReturnResults(callback, std::move(settings), std::move(next_result),
+ reprocess_request);
}
work_done_real_time = systemTime();
@@ -859,7 +963,8 @@ bool EmulatedSensor::threadLoop() {
ALOGVV("Frame cycle took %" PRIu64 " ms, target %" PRIu64 " ms",
ns2ms(end_real_time - start_real_time), ns2ms(frame_duration));
- ReturnResults(callback, std::move(settings), std::move(next_result));
+ ReturnResults(callback, std::move(settings), std::move(next_result),
+ reprocess_request);
return true;
};
@@ -867,7 +972,7 @@ bool EmulatedSensor::threadLoop() {
void EmulatedSensor::ReturnResults(
HwlPipelineCallback callback,
std::unique_ptr<LogicalCameraSettings> settings,
- std::unique_ptr<HwlPipelineResult> result) {
+ std::unique_ptr<HwlPipelineResult> result, bool reprocess_request) {
if ((callback.process_pipeline_result != nullptr) &&
(result.get() != nullptr) && (result->result_metadata.get() != nullptr)) {
auto logical_settings = settings->find(logical_camera_id_);
@@ -882,9 +987,20 @@ void EmulatedSensor::ReturnResults(
logical_camera_id_);
return;
}
-
result->result_metadata->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_,
1);
+ uint8_t raw_binned_factor_used = false;
+ if (sensor_binning_factor_info_.find(logical_camera_id_) !=
+ sensor_binning_factor_info_.end()) {
+ auto& info = sensor_binning_factor_info_[logical_camera_id_];
+ // Logical stream was included in the request
+ if (!reprocess_request && info.quad_bayer_sensor && info.max_res_request &&
+ info.has_raw_stream && !info.has_non_raw_stream) {
+ raw_binned_factor_used = true;
+ }
+ result->result_metadata->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
+ &raw_binned_factor_used, 1);
+ }
if (logical_settings->second.lens_shading_map_mode ==
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON) {
if ((device_chars->second.lens_shading_map_size[0] > 0) &&
@@ -935,7 +1051,19 @@ void EmulatedSensor::ReturnResults(
__FUNCTION__, it.first);
continue;
}
-
+ uint8_t raw_binned_factor_used = false;
+ if (sensor_binning_factor_info_.find(it.first) !=
+ sensor_binning_factor_info_.end()) {
+ auto& info = sensor_binning_factor_info_[it.first];
+ // physical stream was included in the request
+ if (!reprocess_request && info.quad_bayer_sensor &&
+ info.max_res_request && info.has_raw_stream &&
+ !info.has_non_raw_stream) {
+ raw_binned_factor_used = true;
+ }
+ it.second->Set(ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
+ &raw_binned_factor_used, 1);
+ }
// Sensor timestamp for all physical devices must be the same.
it.second->Set(ANDROID_SENSOR_TIMESTAMP, &next_capture_time_, 1);
if (physical_settings->second.report_neutral_color_point) {
@@ -980,24 +1108,145 @@ void EmulatedSensor::CalculateAndAppendNoiseProfile(
}
}
-void EmulatedSensor::CaptureRaw(uint8_t* img, uint32_t gain,
- const SensorCharacteristics& chars) {
+EmulatedScene::ColorChannels EmulatedSensor::GetQuadBayerColor(uint32_t x,
+ uint32_t y) {
+ // Row within larger set of quad bayer filter
+ uint32_t row_mod = y % 4;
+ // Column within larger set of quad bayer filter
+ uint32_t col_mod = x % 4;
+
+ // Row is within the left quadrants of a quad bayer sensor
+ if (row_mod < 2) {
+ if (col_mod < 2) {
+ return EmulatedScene::ColorChannels::R;
+ }
+ return EmulatedScene::ColorChannels::Gr;
+ } else {
+ if (col_mod < 2) {
+ return EmulatedScene::ColorChannels::Gb;
+ }
+ return EmulatedScene::ColorChannels::B;
+ }
+}
+
+void EmulatedSensor::RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
+ int xstart, int ystart, int stride) {
+ uint32_t quad_block_copy_idx_map[16] = {0, 2, 1, 3, 8, 10, 6, 11,
+ 4, 9, 5, 7, 12, 14, 13, 15};
+ uint16_t quad_block_copy[16];
+ uint32_t i = 0;
+ for (uint32_t row = 0; row < 4; row++) {
+ uint16_t* quad_bayer_row = img_in + (ystart + row) * stride + xstart;
+ for (uint32_t j = 0; j < 4; j++, i++) {
+ quad_block_copy[i] = quad_bayer_row[j];
+ }
+ }
+
+ for (uint32_t row = 0; row < 4; row++) {
+ uint16_t* regular_bayer_row = img_out + (ystart + row) * stride + xstart;
+ for (uint32_t j = 0; j < 4; j++, i++) {
+ uint32_t idx = quad_block_copy_idx_map[row + 4 * j];
+ regular_bayer_row[j] = quad_block_copy[idx];
+ }
+ }
+}
+
+status_t EmulatedSensor::RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
+ const SensorCharacteristics& chars) {
+ if (chars.full_res_width % 2 != 0 || chars.full_res_height % 2 != 0) {
+ ALOGE(
+ "%s RAW16 Image with quad CFA, height %zu and width %zu, not multiples "
+ "of 4",
+ __FUNCTION__, chars.full_res_height, chars.full_res_width);
+ return BAD_VALUE;
+ }
+ for (uint32_t i = 0; i < chars.full_res_width; i += 4) {
+ for (uint32_t j = 0; j < chars.full_res_height; j += 4) {
+ RemosaicQuadBayerBlock(img_in, img_out, i, j, chars.full_res_width);
+ }
+ }
+ return OK;
+}
+
+void EmulatedSensor::CaptureRawBinned(uint8_t* img, uint32_t gain,
+ const SensorCharacteristics& chars) {
ATRACE_CALL();
+ // inc = how many pixels to skip while reading every next pixel
float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
float noise_var_gain = total_gain * total_gain;
float read_noise_var =
kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
- //
- // RGGB
int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
EmulatedScene::B};
scene_->SetReadoutPixel(0, 0);
- for (unsigned int y = 0; y < chars.height; y++) {
+ for (unsigned int out_y = 0; out_y < chars.height; out_y++) {
+ // Stride still stays width since the buffer is binned size.
+ int* bayer_row = bayer_select + (out_y & 0x1) * 2;
+ uint16_t* px = (uint16_t*)img + out_y * chars.width;
+ for (unsigned int out_x = 0; out_x < chars.width; out_x++) {
+ int color_idx = bayer_row[out_x & 0x1];
+ uint16_t raw_count = 0;
+ // Color filter will be the same for each quad.
+ uint32_t electron_count = 0;
+ int x, y;
+ float norm_x = (float)out_x / chars.width;
+ float norm_y = (float)out_y / chars.height;
+ x = static_cast<int>(chars.full_res_width * norm_x);
+ y = static_cast<int>(chars.full_res_height * norm_y);
+
+ x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
+ y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
+
+ scene_->SetReadoutPixel(x, y);
+
+ const uint32_t* pixel = scene_->GetPixelElectrons();
+ electron_count = pixel[color_idx];
+ // TODO: Better pixel saturation curve?
+ electron_count = (electron_count < kSaturationElectrons)
+ ? electron_count
+ : kSaturationElectrons;
+
+ // TODO: Better A/D saturation curve?
+ raw_count = electron_count * total_gain;
+ raw_count =
+ (raw_count < chars.max_raw_value) ? raw_count : chars.max_raw_value;
+
+ // Calculate noise value
+ // TODO: Use more-correct Gaussian instead of uniform noise
+ float photon_noise_var = electron_count * noise_var_gain;
+ float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var);
+ // Scaled to roughly match gaussian/uniform noise stddev
+ float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
+
+ raw_count += chars.black_level_pattern[color_idx];
+ raw_count += noise_stddev * noise_sample;
+ *px++ = raw_count;
+ }
+ }
+ ALOGVV("Binned RAW sensor image captured");
+}
+
+void EmulatedSensor::CaptureRawFullRes(uint8_t* img, uint32_t gain,
+ const SensorCharacteristics& chars) {
+ ATRACE_CALL();
+ float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
+ float noise_var_gain = total_gain * total_gain;
+ float read_noise_var =
+ kReadNoiseVarBeforeGain * noise_var_gain + kReadNoiseVarAfterGain;
+
+ scene_->SetReadoutPixel(0, 0);
+ // RGGB
+ int bayer_select[4] = {EmulatedScene::R, EmulatedScene::Gr, EmulatedScene::Gb,
+ EmulatedScene::B};
+
+ for (unsigned int y = 0; y < chars.full_res_height; y++) {
int* bayer_row = bayer_select + (y & 0x1) * 2;
- uint16_t* px = (uint16_t*)img + y * chars.width;
- for (unsigned int x = 0; x < chars.width; x++) {
+ uint16_t* px = (uint16_t*)img + y * chars.full_res_width;
+ for (unsigned int x = 0; x < chars.full_res_width; x++) {
+ int color_idx = chars.quad_bayer_sensor ? GetQuadBayerColor(x, y)
+ : bayer_row[x & 0x1];
uint32_t electron_count;
- electron_count = scene_->GetPixelElectrons()[bayer_row[x & 0x1]];
+ electron_count = scene_->GetPixelElectrons()[color_idx];
// TODO: Better pixel saturation curve?
electron_count = (electron_count < kSaturationElectrons)
@@ -1016,7 +1265,7 @@ void EmulatedSensor::CaptureRaw(uint8_t* img, uint32_t gain,
// Scaled to roughly match gaussian/uniform noise stddev
float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25;
- raw_count += chars.black_level_pattern[bayer_row[x & 0x1]];
+ raw_count += chars.black_level_pattern[color_idx];
raw_count += noise_stddev * noise_sample;
*px++ = raw_count;
@@ -1034,13 +1283,14 @@ void EmulatedSensor::CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
// In fixed-point math, calculate total scaling from electrons to 8bpp
int scale64x = 64 * total_gain * 255 / chars.max_raw_value;
- uint32_t inc_h = ceil((float)chars.width / width);
- uint32_t inc_v = ceil((float)chars.height / height);
+ uint32_t inc_h = ceil((float)chars.full_res_width / width);
+ uint32_t inc_v = ceil((float)chars.full_res_height / height);
- for (unsigned int y = 0, outy = 0; y < chars.height; y += inc_v, outy++) {
+ for (unsigned int y = 0, outy = 0; y < chars.full_res_height;
+ y += inc_v, outy++) {
scene_->SetReadoutPixel(0, y);
uint8_t* px = img + outy * stride;
- for (unsigned int x = 0; x < chars.width; x += inc_h) {
+ for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
uint32_t r_count, g_count, b_count;
// TODO: Perfect demosaicing is a cheat
const uint32_t* pixel = scene_->GetPixelElectrons();
@@ -1121,16 +1371,16 @@ void EmulatedSensor::CaptureYUV420(YCbCrPlanes yuv_layout, uint32_t width,
float norm_x = out_x / (width * zoom_ratio);
float norm_y = out_y / (height * zoom_ratio);
if (rotate) {
- x = static_cast<int>(chars.width *
+ x = static_cast<int>(chars.full_res_width *
(norm_rot_left - norm_y * norm_rot_width));
- y = static_cast<int>(chars.height *
+ y = static_cast<int>(chars.full_res_height *
(norm_rot_top + norm_x * norm_rot_height));
} else {
- x = static_cast<int>(chars.width * (norm_left_top + norm_x));
- y = static_cast<int>(chars.height * (norm_left_top + norm_y));
+ x = static_cast<int>(chars.full_res_width * (norm_left_top + norm_x));
+ y = static_cast<int>(chars.full_res_height * (norm_left_top + norm_y));
}
- x = std::min(std::max(x, 0), (int)chars.width - 1);
- y = std::min(std::max(y, 0), (int)chars.height - 1);
+ x = std::min(std::max(x, 0), (int)chars.full_res_width - 1);
+ y = std::min(std::max(y, 0), (int)chars.full_res_height - 1);
scene_->SetReadoutPixel(x, y);
int32_t r_count, g_count, b_count;
@@ -1196,13 +1446,14 @@ void EmulatedSensor::CaptureDepth(uint8_t* img, uint32_t gain, uint32_t width,
float total_gain = gain / 100.0 * GetBaseGainFactor(chars.max_raw_value);
// In fixed-point math, calculate scaling factor to 13bpp millimeters
int scale64x = 64 * total_gain * 8191 / chars.max_raw_value;
- uint32_t inc_h = ceil((float)chars.width / width);
- uint32_t inc_v = ceil((float)chars.height / height);
+ uint32_t inc_h = ceil((float)chars.full_res_width / width);
+ uint32_t inc_v = ceil((float)chars.full_res_height / height);
- for (unsigned int y = 0, out_y = 0; y < chars.height; y += inc_v, out_y++) {
+ for (unsigned int y = 0, out_y = 0; y < chars.full_res_height;
+ y += inc_v, out_y++) {
scene_->SetReadoutPixel(0, y);
uint16_t* px = (uint16_t*)(img + (out_y * stride));
- for (unsigned int x = 0; x < chars.width; x += inc_h) {
+ for (unsigned int x = 0; x < chars.full_res_width; x += inc_h) {
uint32_t depth_count;
// TODO: Make up real depth scene instead of using green channel
// as depth
diff --git a/devices/EmulatedCamera/hwl/EmulatedSensor.h b/devices/EmulatedCamera/hwl/EmulatedSensor.h
index 1e33822..6402d4d 100644
--- a/devices/EmulatedCamera/hwl/EmulatedSensor.h
+++ b/devices/EmulatedCamera/hwl/EmulatedSensor.h
@@ -116,6 +116,8 @@ struct ColorFilterXYZ {
struct SensorCharacteristics {
size_t width = 0;
size_t height = 0;
+ size_t full_res_width = 0;
+ size_t full_res_height = 0;
nsecs_t exposure_time_range[2] = {0};
nsecs_t frame_duration_range[2] = {0};
int32_t sensitivity_range[2] = {0};
@@ -134,6 +136,7 @@ struct SensorCharacteristics {
uint32_t max_pipeline_depth = 0;
uint32_t orientation = 0;
bool is_front_facing = false;
+ bool quad_bayer_sensor = false;
};
// Maps logical/physical camera ids to sensor characteristics
@@ -160,17 +163,30 @@ class EmulatedSensor : private Thread, public virtual RefBase {
return true;
}
+ if (HAL_PIXEL_FORMAT_RAW16 == input_format &&
+ HAL_PIXEL_FORMAT_RAW16 == output_format) {
+ return true;
+ }
+
return false;
}
static bool AreCharacteristicsSupported(
const SensorCharacteristics& characteristics);
+
static bool IsStreamCombinationSupported(
uint32_t logical_id, const StreamConfiguration& config,
- StreamConfigurationMap& map,
+ StreamConfigurationMap& map, StreamConfigurationMap& max_resolution_map,
const PhysicalStreamConfigurationMap& physical_map,
+ const PhysicalStreamConfigurationMap& physical_map_max_resolution,
const LogicalCharacteristics& sensor_chars);
+ static bool IsStreamCombinationSupported(
+ uint32_t logical_id, const StreamConfiguration& config,
+ StreamConfigurationMap& map,
+ const PhysicalStreamConfigurationMap& physical_map,
+ const LogicalCharacteristics& sensor_chars, bool is_max_res = false);
+
/*
* Power control
*/
@@ -197,6 +213,7 @@ class EmulatedSensor : private Thread, public virtual RefBase {
uint8_t video_stab = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
bool report_edge_mode = false;
uint8_t edge_mode = ANDROID_EDGE_MODE_OFF;
+ uint8_t sensor_pixel_mode = ANDROID_SENSOR_PIXEL_MODE_DEFAULT;
};
// Maps physical and logical camera ids to individual device settings
@@ -295,10 +312,31 @@ class EmulatedSensor : private Thread, public virtual RefBase {
nsecs_t next_capture_time_;
+ struct SensorBinningFactorInfo {
+ bool has_raw_stream = false;
+ bool has_non_raw_stream = false;
+ bool quad_bayer_sensor = false;
+ bool max_res_request = false;
+ };
+
+ std::map<uint32_t, SensorBinningFactorInfo> sensor_binning_factor_info_;
+
sp<EmulatedScene> scene_;
- void CaptureRaw(uint8_t* img, uint32_t gain,
- const SensorCharacteristics& chars);
+ static EmulatedScene::ColorChannels GetQuadBayerColor(uint32_t x, uint32_t y);
+
+ static void RemosaicQuadBayerBlock(uint16_t* img_in, uint16_t* img_out,
+ int xstart, int ystart, int stride);
+
+ static status_t RemosaicRAW16Image(uint16_t* img_in, uint16_t* img_out,
+ const SensorCharacteristics& chars);
+
+ void CaptureRawBinned(uint8_t* img, uint32_t gain,
+ const SensorCharacteristics& chars);
+
+ void CaptureRawFullRes(uint8_t* img, uint32_t gain,
+ const SensorCharacteristics& chars);
+
enum RGBLayout { RGB, RGBA, ARGB };
void CaptureRGB(uint8_t* img, uint32_t width, uint32_t height,
uint32_t stride, RGBLayout layout, uint32_t gain,
@@ -330,7 +368,8 @@ class EmulatedSensor : private Thread, public virtual RefBase {
void ReturnResults(HwlPipelineCallback callback,
std::unique_ptr<LogicalCameraSettings> settings,
- std::unique_ptr<HwlPipelineResult> result);
+ std::unique_ptr<HwlPipelineResult> result,
+ bool reprocess_request);
static float GetBaseGainFactor(float max_raw_value) {
return max_raw_value / EmulatedSensor::kSaturationElectrons;
diff --git a/devices/EmulatedCamera/hwl/configs/emu_camera_front.json b/devices/EmulatedCamera/hwl/configs/emu_camera_front.json
index 184547b..f56d232 100644
--- a/devices/EmulatedCamera/hwl/configs/emu_camera_front.json
+++ b/devices/EmulatedCamera/hwl/configs/emu_camera_front.json
@@ -211,7 +211,7 @@
"240"
],
"android.jpeg.maxSize": [
- "300000"
+ "3000000"
],
"android.lens.distortion": [
"0.08807813",
@@ -292,9 +292,11 @@
"BURST_CAPTURE",
"MANUAL_POST_PROCESSING",
"LOGICAL_MULTI_CAMERA",
- "PRIVATE_REPROCESSING",
+ "PRIVATE_REPROCESSING",
+ "ULTRA_HIGH_RESOLUTION_SENSOR",
"YUV_REPROCESSING",
- "RAW"
+ "RAW",
+ "REMOSAIC_REPROCESSING"
],
"android.request.maxNumInputStreams": [
"1"
@@ -309,6 +311,11 @@
"33",
"35"
],
+ "android.scaler.availableInputOutputFormatsMapMaximumResolution" : [
+ "32",
+ "1",
+ "32"
+ ],
"android.reprocess.maxCaptureStall": [
"2"
],
@@ -476,7 +483,8 @@
"1245186",
"1245187",
"1441792",
- "851985"
+ "851985",
+ "917536"
],
"android.request.availableResultKeys": [
"0",
@@ -551,7 +559,8 @@
"1114123",
"1703938",
"917530",
- "851985"
+ "851985",
+ "917536"
],
"android.request.maxNumOutputStreams": [
"1",
@@ -756,6 +765,20 @@
"144",
"33331760"
],
+"android.scaler.availableMinFrameDurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "33331760",
+ "33",
+ "6048",
+ "4024",
+ "33331760",
+ "35",
+ "6048",
+ "4024",
+ "33331760"
+ ],
"android.scaler.availableStallDurations": [
"32",
"1920",
@@ -822,6 +845,16 @@
"144",
"164736"
],
+ "android.scaler.availableStallDurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "287539200",
+ "33",
+ "6048",
+ "4024",
+ "287539200"
+ ],
"android.scaler.availableStreamConfigurations": [
"32",
"1920",
@@ -1012,6 +1045,24 @@
"240",
"OUTPUT"
],
+ "android.scaler.availableStreamConfigurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "INPUT",
+ "32",
+ "6048",
+ "4024",
+ "OUTPUT",
+ "33",
+ "6048",
+ "4024",
+ "OUTPUT",
+ "35",
+ "6048",
+ "4024",
+ "OUTPUT"
+ ],
"android.scaler.croppingType": [
"CENTER_ONLY"
],
@@ -1154,6 +1205,12 @@
"1920",
"1440"
],
+ "android.sensor.info.activeArraySizeMaximumResolution": [
+ "0",
+ "0",
+ "6048",
+ "4024"
+ ],
"android.sensor.info.colorFilterArrangement": [
"RGGB"
],
@@ -1181,6 +1238,20 @@
"1920",
"1440"
],
+ "android.sensor.info.pixelArraySizeMaximumResolution": [
+ "6048",
+ "4024"
+ ],
+ "android.sensor.info.binningFactor": [
+ "2",
+ "2"
+ ],
+ "android.sensor.info.preCorrectionActiveArraySizeMaximumResolution": [
+ "0",
+ "0",
+ "6048",
+ "4024"
+ ],
"android.sensor.info.sensitivityRange": [
"100",
"1000"
@@ -1455,7 +1526,7 @@
"240"
],
"android.jpeg.maxSize": [
- "300000"
+ "3000000"
],
"android.lens.distortion": [
"0.08807813",
@@ -1527,9 +1598,11 @@
"MANUAL_SENSOR",
"BURST_CAPTURE",
"MANUAL_POST_PROCESSING",
- "PRIVATE_REPROCESSING",
+ "PRIVATE_REPROCESSING",
+ "ULTRA_HIGH_RESOLUTION_SENSOR",
"YUV_REPROCESSING",
- "RAW"
+ "RAW",
+ "REMOSAIC_REPROCESSING"
],
"android.request.maxNumInputStreams": [
"1"
@@ -1544,6 +1617,11 @@
"33",
"35"
],
+ "android.scaler.availableInputOutputFormatsMapMaximumResolution" : [
+ "32",
+ "1",
+ "32"
+ ],
"android.reprocess.maxCaptureStall": [
"2"
],
@@ -1712,7 +1790,8 @@
"1245186",
"1245187",
"1441792",
- "851985"
+ "851985",
+ "917536"
],
"android.request.availableResultKeys": [
"0",
@@ -1786,7 +1865,8 @@
"65552",
"1114123",
"917530",
- "851985"
+ "851985",
+ "917536"
],
"android.request.maxNumOutputStreams": [
"1",
@@ -1991,6 +2071,20 @@
"144",
"33331760"
],
+ "android.scaler.availableMinFrameDurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "33331760",
+ "33",
+ "6048",
+ "4024",
+ "33331760",
+ "35",
+ "6048",
+ "4024",
+ "33331760"
+ ],
"android.scaler.availableStallDurations": [
"32",
"1920",
@@ -2057,6 +2151,16 @@
"144",
"164736"
],
+ "android.scaler.availableStallDurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "287539200",
+ "33",
+ "6048",
+ "4024",
+ "287539200"
+ ],
"android.scaler.availableStreamConfigurations": [
"32",
"1920",
@@ -2251,6 +2355,24 @@
"144",
"OUTPUT"
],
+ "android.scaler.availableStreamConfigurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "INPUT",
+ "32",
+ "6048",
+ "4024",
+ "OUTPUT",
+ "33",
+ "6048",
+ "4024",
+ "OUTPUT",
+ "35",
+ "6048",
+ "4024",
+ "OUTPUT"
+ ],
"android.scaler.croppingType": [
"CENTER_ONLY"
],
@@ -2415,6 +2537,12 @@
"1920",
"1440"
],
+ "android.sensor.info.activeArraySizeMaximumResolution": [
+ "0",
+ "0",
+ "6048",
+ "4024"
+ ],
"android.sensor.info.colorFilterArrangement": [
"RGGB"
],
@@ -2442,6 +2570,20 @@
"1920",
"1440"
],
+ "android.sensor.info.pixelArraySizeMaximumResolution": [
+ "6048",
+ "4024"
+ ],
+ "android.sensor.info.preCorrectionActiveArraySizeMaximumResolution": [
+ "0",
+ "0",
+ "6048",
+ "4024"
+ ],
+ "android.sensor.info.binningFactor": [
+ "2",
+ "2"
+ ],
"android.sensor.info.sensitivityRange": [
"100",
"1000"
@@ -2716,7 +2858,7 @@
"240"
],
"android.jpeg.maxSize": [
- "300000"
+ "3000000"
],
"android.lens.distortion": [
"0.27679554",
@@ -2789,8 +2931,10 @@
"BURST_CAPTURE",
"MANUAL_POST_PROCESSING",
"PRIVATE_REPROCESSING",
+ "ULTRA_HIGH_RESOLUTION_SENSOR",
"YUV_REPROCESSING",
- "RAW"
+ "RAW",
+ "REMOSAIC_REPROCESSING"
],
"android.request.maxNumInputStreams": [
"1"
@@ -2805,6 +2949,11 @@
"33",
"35"
],
+ "android.scaler.availableInputOutputFormatsMapMaximumResolution" : [
+ "32",
+ "1",
+ "32"
+ ],
"android.reprocess.maxCaptureStall": [
"2"
],
@@ -2973,7 +3122,8 @@
"1245186",
"1245187",
"1441792",
- "851985"
+ "851985",
+ "917536"
],
"android.request.availableResultKeys": [
"0",
@@ -3047,7 +3197,8 @@
"65552",
"1114123",
"917530",
- "851985"
+ "851985",
+ "917536"
],
"android.request.maxNumOutputStreams": [
"1",
@@ -3268,6 +3419,20 @@
"144",
"33331760"
],
+ "android.scaler.availableMinFrameDurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "33331760",
+ "33",
+ "6048",
+ "4024",
+ "33331760",
+ "35",
+ "6048",
+ "4024",
+ "33331760"
+ ],
"android.scaler.availableStallDurations": [
"32",
"2048",
@@ -3342,6 +3507,16 @@
"144",
"164736"
],
+ "android.scaler.availableStallDurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "287539200",
+ "33",
+ "6048",
+ "4024",
+ "287539200"
+ ],
"android.scaler.availableStreamConfigurations": [
"34",
"2048",
@@ -3548,6 +3723,24 @@
"144",
"OUTPUT"
],
+ "android.scaler.availableStreamConfigurationsMaximumResolution": [
+ "32",
+ "6048",
+ "4024",
+ "INPUT",
+ "32",
+ "6048",
+ "4024",
+ "OUTPUT",
+ "33",
+ "6048",
+ "4024",
+ "OUTPUT",
+ "35",
+ "6048",
+ "4024",
+ "OUTPUT"
+ ],
"android.scaler.croppingType": [
"CENTER_ONLY"
],
@@ -3712,6 +3905,12 @@
"2048",
"1536"
],
+ "android.sensor.info.activeArraySizeMaximumResolution": [
+ "0",
+ "0",
+ "6048",
+ "4024"
+ ],
"android.sensor.info.colorFilterArrangement": [
"RGGB"
],
@@ -3739,6 +3938,20 @@
"2048",
"1536"
],
+ "android.sensor.info.pixelArraySizeMaximumResolution": [
+ "6048",
+ "4024"
+ ],
+ "android.sensor.info.preCorrectionActiveArraySizeMaximumResolution": [
+ "0",
+ "0",
+ "6048",
+ "4024"
+ ],
+ "android.sensor.info.binningFactor": [
+ "2",
+ "2"
+ ],
"android.sensor.info.sensitivityRange": [
"100",
"1000"
diff --git a/devices/EmulatedCamera/hwl/utils/ExifUtils.cpp b/devices/EmulatedCamera/hwl/utils/ExifUtils.cpp
index 90f769d..c310bbb 100644
--- a/devices/EmulatedCamera/hwl/utils/ExifUtils.cpp
+++ b/devices/EmulatedCamera/hwl/utils/ExifUtils.cpp
@@ -948,7 +948,8 @@ bool ExifUtilsImpl::SetFromMetadata(const HalCameraMetadata& metadata,
ret = metadata.Get(ANDROID_SCALER_CROP_REGION, &entry);
if (ret == OK) {
if (!SetDigitalZoomRatio(entry.data.i32[2], entry.data.i32[3],
- sensor_chars_.width, sensor_chars_.height)) {
+ sensor_chars_.full_res_width,
+ sensor_chars_.full_res_height)) {
ALOGE("%s: setting digital zoom ratio failed.", __FUNCTION__);
return false;
}
diff --git a/devices/EmulatedCamera/hwl/utils/HWLUtils.cpp b/devices/EmulatedCamera/hwl/utils/HWLUtils.cpp
index c2add6b..8de8ff3 100644
--- a/devices/EmulatedCamera/hwl/utils/HWLUtils.cpp
+++ b/devices/EmulatedCamera/hwl/utils/HWLUtils.cpp
@@ -57,6 +57,16 @@ status_t GetSensorCharacteristics(const HalCameraMetadata* metadata,
}
sensor_chars->width = entry.data.i32[0];
sensor_chars->height = entry.data.i32[1];
+ sensor_chars->full_res_width = sensor_chars->width;
+ sensor_chars->full_res_height = sensor_chars->height;
+
+ ret = metadata->Get(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
+ &entry);
+ if ((ret == OK) && (entry.count == 2)) {
+ sensor_chars->full_res_width = entry.data.i32[0];
+ sensor_chars->full_res_height = entry.data.i32[1];
+ sensor_chars->quad_bayer_sensor = true;
+ }
ret = metadata->Get(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, &entry);
if ((ret != OK) || (entry.count != 3)) {
diff --git a/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp b/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp
index be89bdc..ce840a4 100644
--- a/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp
+++ b/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.cpp
@@ -20,6 +20,40 @@
#include <log/log.h>
namespace android {
+const uint32_t kScalerStreamConfigurations =
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
+const uint32_t kScalerStreamConfigurationsMaxRes =
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kDepthStreamConfigurations =
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS;
+const uint32_t kDepthStreamConfigurationsMaxRes =
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kScalerMinFrameDurations =
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
+const uint32_t kScalerMinFrameDurationsMaxRes =
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kDepthMinFrameDurations =
+ ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS;
+const uint32_t kDepthMinFrameDurationsMaxRes =
+ ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kScalerStallDurations = ANDROID_SCALER_AVAILABLE_STALL_DURATIONS;
+const uint32_t kScalerStallDurationsMaxRes =
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION;
+
+const uint32_t kScalerInputOutputFormatsMap =
+ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP;
+const uint32_t kScalerInputOutputFormatsMapMaxRes =
+ ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION;
+
+const uint32_t kDepthStallDurations =
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS;
+const uint32_t kDepthStallDurationsMaxRes =
+ ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION;
+
void StreamConfigurationMap::AppendAvailableStreamConfigurations(
const camera_metadata_ro_entry& entry) {
for (size_t i = 0; i < entry.count; i += kStreamConfigurationSize) {
@@ -79,47 +113,70 @@ void StreamConfigurationMap::AppendAvailableStreamStallDurations(
}
}
-StreamConfigurationMap::StreamConfigurationMap(const HalCameraMetadata& chars) {
+StreamConfigurationMap::StreamConfigurationMap(const HalCameraMetadata& chars,
+ bool maxResolution) {
camera_metadata_ro_entry_t entry;
- auto ret = chars.Get(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS, &entry);
+ const char* maxResolutionStr = maxResolution ? "true" : "false";
+ auto ret = chars.Get(maxResolution ? kScalerStreamConfigurationsMaxRes
+ : kScalerStreamConfigurations,
+ &entry);
if (ret != OK) {
- ALOGW("%s: ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS missing!",
- __FUNCTION__);
+ ALOGW(
+ "%s: ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS missing, "
+ "maxResolution ? %s!",
+ __FUNCTION__, maxResolutionStr);
entry.count = 0;
}
AppendAvailableStreamConfigurations(entry);
- ret = chars.Get(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, &entry);
+ ret = chars.Get(maxResolution ? kDepthStreamConfigurationsMaxRes
+ : kDepthStreamConfigurations,
+ &entry);
+
if (ret == OK) {
AppendAvailableStreamConfigurations(entry);
}
- ret = chars.Get(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, &entry);
+ ret = chars.Get(
+ maxResolution ? kScalerMinFrameDurationsMaxRes : kScalerMinFrameDurations,
+ &entry);
if (ret != OK) {
- ALOGW("%s: ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS missing!",
- __FUNCTION__);
+ ALOGW(
+ "%s: ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS missing!, max "
+ "resolution ? %s",
+ __FUNCTION__, maxResolutionStr);
entry.count = 0;
}
AppendAvailableStreamMinDurations(entry);
- ret = chars.Get(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS, &entry);
+ ret = chars.Get(
+ maxResolution ? kDepthMinFrameDurationsMaxRes : kDepthMinFrameDurations,
+ &entry);
if (ret == OK) {
AppendAvailableStreamMinDurations(entry);
}
- ret = chars.Get(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS, &entry);
+ ret = chars.Get(
+ maxResolution ? kScalerStallDurationsMaxRes : kScalerStallDurations,
+ &entry);
if (ret != OK) {
- ALOGW("%s: ANDROID_SCALER_AVAILABLE_STALL_DURATIONS missing!", __FUNCTION__);
+ ALOGW(
+ "%s: ANDROID_SCALER_AVAILABLE_STALL_DURATIONS missing! maxResolution ? "
+ "%s",
+ __FUNCTION__, maxResolutionStr);
entry.count = 0;
}
AppendAvailableStreamStallDurations(entry);
- ret = chars.Get(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS, &entry);
+ ret = chars.Get(
+ maxResolution ? kDepthStallDurationsMaxRes : kDepthStallDurations, &entry);
if (ret == OK) {
AppendAvailableStreamStallDurations(entry);
}
- ret = chars.Get(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP, &entry);
+ ret = chars.Get(maxResolution ? kScalerInputOutputFormatsMapMaxRes
+ : kScalerInputOutputFormatsMap,
+ &entry);
if (ret == OK) {
size_t i = 0;
while (i < entry.count) {
diff --git a/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h b/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h
index 8e7560b..f359f6a 100644
--- a/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h
+++ b/devices/EmulatedCamera/hwl/utils/StreamConfigurationMap.h
@@ -54,7 +54,8 @@ struct StreamConfigurationHash {
class StreamConfigurationMap {
public:
- StreamConfigurationMap(const HalCameraMetadata& chars);
+ StreamConfigurationMap(const HalCameraMetadata& chars,
+ bool maxResolution = false);
const std::set<android_pixel_format_t>& GetOutputFormats() const {
return stream_output_formats_;