diff options
author | Emilian Peev <epeev@google.com> | 2019-09-09 10:28:53 -0700 |
---|---|---|
committer | Emilian Peev <epeev@google.com> | 2019-10-23 16:57:59 -0700 |
commit | 48ee48c9c3015a254d6d13877abe85387af7b330 (patch) | |
tree | 47cad3b0f2248ff98dadf5f94f32ae55cd812867 /devices | |
parent | bc9e116f923a99ea23704b44395a768a7f941203 (diff) | |
download | camera-48ee48c9c3015a254d6d13877abe85387af7b330.tar.gz |
EmulatedCamera: Incorporate review feedback
Includes following suggested modifications:
- Shared data access with atomic variables
- Toggle extra Jpeg error logs
- Use reentrant 'rand' variant.
- Correct the handling of some error cases,
typos, long descriptions etc.
Bug: 131342297
Test: Camera CTS
Change-Id: I08e9d464f78385b5d370b0845ed1cc9c95d5d149
Diffstat (limited to 'devices')
-rw-r--r-- | devices/EmulatedCamera/hwl/Base.h | 1 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp | 38 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h | 4 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp | 27 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h | 9 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/EmulatedRequestState.cpp | 150 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/EmulatedRequestState.h | 17 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/EmulatedSensor.cpp | 14 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/EmulatedSensor.h | 2 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/JpegCompressor.cpp | 29 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/JpegCompressor.h | 3 | ||||
-rw-r--r-- | devices/EmulatedCamera/hwl/configs/camera_front.json | 3 |
12 files changed, 158 insertions, 139 deletions
diff --git a/devices/EmulatedCamera/hwl/Base.h b/devices/EmulatedCamera/hwl/Base.h index 112add9..4841cc1 100644 --- a/devices/EmulatedCamera/hwl/Base.h +++ b/devices/EmulatedCamera/hwl/Base.h @@ -71,7 +71,6 @@ struct SensorBuffer { camera_id(0), format(HAL_PIXEL_FORMAT_RGBA_8888), dataSpace(HAL_DATASPACE_UNKNOWN), - stream_buffer{0}, acquire_fence_fd(-1), is_input(false), plane{} { diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp index 8786d7f..29d5b33 100644 --- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp +++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.cpp @@ -13,8 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -#define LOG_TAG "EmulatedCameraDeviceSessionHwlImpl" +#define LOG_TAG "EmulatedCameraDevSession" #define ATRACE_TAG ATRACE_TAG_CAMERA #include "EmulatedCameraDeviceSessionHWLImpl.h" @@ -139,24 +138,25 @@ status_t EmulatedCameraDeviceSessionHwlImpl::ConfigurePipeline( emulated_pipeline.streams.reserve(request_config.streams.size()); for (const auto& stream : request_config.streams) { bool is_input = stream.stream_type == google_camera_hal::StreamType::kInput; - emulated_pipeline.streams.emplace(std::make_pair<uint32_t, EmulatedStream>( + emulated_pipeline.streams.emplace( stream.id, - {{.id = stream.id, - .override_format = - is_input ? stream.format - : EmulatedSensor::OverrideFormat(stream.format), - .producer_usage = is_input ? 0 - : GRALLOC_USAGE_SW_WRITE_OFTEN | - GRALLOC_USAGE_SW_READ_OFTEN, - .consumer_usage = 0, - .max_buffers = max_pipeline_depth_, - .override_data_space = stream.data_space, - .is_physical_camera_stream = stream.is_physical_camera_stream, - .physical_camera_id = stream.physical_camera_id}, - .width = stream.width, - .height = stream.height, - .is_input = is_input, - .buffer_size = stream.buffer_size})); + EmulatedStream( + {{.id = stream.id, + .override_format = + is_input ? stream.format + : EmulatedSensor::OverrideFormat(stream.format), + .producer_usage = is_input ? 0 + : GRALLOC_USAGE_SW_WRITE_OFTEN | + GRALLOC_USAGE_SW_READ_OFTEN, + .consumer_usage = 0, + .max_buffers = max_pipeline_depth_, + .override_data_space = stream.data_space, + .is_physical_camera_stream = stream.is_physical_camera_stream, + .physical_camera_id = stream.physical_camera_id}, + .width = stream.width, + .height = stream.height, + .is_input = is_input, + .buffer_size = stream.buffer_size})); } pipelines_.push_back(emulated_pipeline); diff --git a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h index 6d36dfc..980540f 100644 --- a/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h +++ b/devices/EmulatedCamera/hwl/EmulatedCameraDeviceSessionHWLImpl.h @@ -118,10 +118,10 @@ class EmulatedCameraDeviceSessionHwlImpl : public CameraDeviceSessionHwl { EmulatedCameraDeviceSessionHwlImpl( std::shared_ptr<EmulatedTorchState> torch_state) - : max_pipeline_depth_(0), torch_state_(torch_state) { + : torch_state_(torch_state) { } - uint8_t max_pipeline_depth_; + uint8_t max_pipeline_depth_ = 0; // Protects the API entry points mutable std::mutex api_mutex_; diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp b/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp index f8d6b87..4d6b61c 100644 --- a/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp +++ b/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.cpp @@ -120,14 +120,13 @@ std::unique_ptr<Buffers> EmulatedRequestProcessor::CreateSensorBuffers( void EmulatedRequestProcessor::NotifyFailedRequest(const PendingRequest& request) { if (request.output_buffers->at(0)->callback.notify != nullptr) { + auto output_buffer = std::move(request.output_buffers->at(0)); NotifyMessage msg = { .type = MessageType::kError, - .message.error = { - .frame_number = request.output_buffers->at(0)->frame_number, - .error_stream_id = -1, - .error_code = ErrorCode::kErrorRequest}}; - request.output_buffers->at(0)->callback.notify( - request.output_buffers->at(0)->pipeline_id, msg); + .message.error = {.frame_number = output_buffer->frame_number, + .error_stream_id = -1, + .error_code = ErrorCode::kErrorRequest}}; + output_buffer->callback.notify(output_buffer->pipeline_id, msg); } } @@ -156,12 +155,12 @@ status_t EmulatedRequestProcessor::GetBufferSizeAndStride( switch (stream.override_format) { case HAL_PIXEL_FORMAT_RGB_888: *stride = stream.width * 3; - *size = (*stride) * stream.width; + *size = (*stride) * stream.height; break; case HAL_PIXEL_FORMAT_RGBA_8888: *stride = stream.width * 4; ; - *size = (*stride) * stream.width; + *size = (*stride) * stream.height; break; case HAL_PIXEL_FORMAT_Y16: if (stream.override_data_space == HAL_DATASPACE_DEPTH) { @@ -181,7 +180,7 @@ status_t EmulatedRequestProcessor::GetBufferSizeAndStride( break; case HAL_PIXEL_FORMAT_RAW16: *stride = stream.width * 2; - *size = (*stride) * stream.width; + *size = (*stride) * stream.height; break; default: return BAD_VALUE; @@ -240,7 +239,7 @@ status_t EmulatedRequestProcessor::LockSensorBuffer( sensor_buffer->plane.img.buffer_size = buffer_size; } else { ALOGE("%s: Failed to lock output buffer!", __FUNCTION__); - return ret; + return BAD_VALUE; } } else { ALOGE("%s: Unsupported pixel format: 0x%x", __FUNCTION__, @@ -284,7 +283,7 @@ std::unique_ptr<SensorBuffer> EmulatedRequestProcessor::CreateSensorBuffer( buffer = nullptr; } - if (stream_buffer.acquire_fence != nullptr) { + if ((buffer.get() != nullptr) && (stream_buffer.acquire_fence != nullptr)) { auto fence_status = buffer->importer.importFence( stream_buffer.acquire_fence, buffer->acquire_fence_fd); if (!fence_status) { @@ -330,7 +329,8 @@ std::unique_ptr<Buffers> EmulatedRequestProcessor::AcquireBuffers( void EmulatedRequestProcessor::RequestProcessorLoop() { ATRACE_CALL(); - while (!processor_done_) { + bool vsync_status_ = true; + while (!processor_done_ && vsync_status_) { { std::lock_guard<std::mutex> lock(process_mutex_); if (!pending_requests_.empty()) { @@ -380,7 +380,8 @@ void EmulatedRequestProcessor::RequestProcessorLoop() { } } - sensor_->WaitForVSync(EmulatedSensor::kSupportedFrameDurationRange[1]); + vsync_status_ = + sensor_->WaitForVSync(EmulatedSensor::kSupportedFrameDurationRange[1]); } } diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h b/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h index 10be265..5595111 100644 --- a/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h +++ b/devices/EmulatedCamera/hwl/EmulatedRequestProcessor.h @@ -76,10 +76,8 @@ class EmulatedRequestProcessor { private: void RequestProcessorLoop(); - std::mutex process_mutex_; - std::condition_variable request_condition_; std::thread request_thread_; - bool processor_done_ = false; + std::atomic_bool processor_done_ = false; // helper methods static uint32_t inline AlignTo(uint32_t value, uint32_t alignment) { @@ -106,10 +104,13 @@ class EmulatedRequestProcessor { std::unique_ptr<Buffers> AcquireBuffers(Buffers* buffers); void NotifyFailedRequest(const PendingRequest& request); + std::mutex process_mutex_; + std::condition_variable request_condition_; std::queue<PendingRequest> pending_requests_; uint32_t camera_id_; sp<EmulatedSensor> sensor_; - std::unique_ptr<EmulatedRequestState> request_state_; + std::unique_ptr<EmulatedRequestState> + request_state_; // Stores and handles 3A and related camera states. std::unique_ptr<HalCameraMetadata> last_settings_; EmulatedRequestProcessor(const EmulatedRequestProcessor&) = delete; diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp b/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp index 1430922..5ed2efd 100644 --- a/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp +++ b/devices/EmulatedCamera/hwl/EmulatedRequestState.cpp @@ -60,9 +60,9 @@ T GetClosestValue(T val, T min, T max) { status_t EmulatedRequestState::Update3AMeteringRegion( uint32_t tag, const HalCameraMetadata& settings, int32_t* region /*out*/) { - if ((region == nullptr) && (tag != ANDROID_CONTROL_AE_REGIONS) && - (tag != ANDROID_CONTROL_AF_REGIONS) && - (tag != ANDROID_CONTROL_AWB_REGIONS)) { + if ((region == nullptr) || ((tag != ANDROID_CONTROL_AE_REGIONS) && + (tag != ANDROID_CONTROL_AF_REGIONS) && + (tag != ANDROID_CONTROL_AWB_REGIONS))) { return BAD_VALUE; } @@ -170,7 +170,10 @@ status_t EmulatedRequestState::DoFakeAE() { sensor_max_frame_duration_); sensor_frame_duration_ = (max_frame_duration + min_frame_duration) / 2; - // Use a different AE target exposure for face priority mode + // Face priority mode usually changes the AE algorithm behavior by + // using the regions of interest associated with detected faces. + // Try to emulate this behavior by slightly increasing the target exposure + // time compared to normal operation. if (exposure_compensation_supported_) { float max_ae_compensation = ::powf( 2, exposure_compensation_range_[1] * @@ -201,8 +204,8 @@ status_t EmulatedRequestState::DoFakeAE() { ae_state_ = ANDROID_CONTROL_AE_STATE_CONVERGED; ae_trigger_ = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL; } else if ((ae_frame_counter_ > kAEPrecaptureMinFrames) && - ((ae_target_exposure_time_ - current_exposure_time_) < - ae_target_exposure_time_ / 10)) { + (abs(ae_target_exposure_time_ - current_exposure_time_) < + ae_target_exposure_time_ / kAETargetThreshold)) { // Done with precapture ae_frame_counter_ = 0; ae_state_ = ANDROID_CONTROL_AE_STATE_CONVERGED; @@ -223,7 +226,7 @@ status_t EmulatedRequestState::DoFakeAE() { case ANDROID_CONTROL_AE_STATE_CONVERGED: ae_frame_counter_++; if (ae_frame_counter_ > kStableAeMaxFrames) { - float exposure_step = ((double)rand() / RAND_MAX) * + float exposure_step = ((double)rand_r(&rand_seed_) / RAND_MAX) * (kExposureWanderMax - kExposureWanderMin) + kExposureWanderMin; ae_target_exposure_time_ = @@ -239,7 +242,7 @@ status_t EmulatedRequestState::DoFakeAE() { (ae_target_exposure_time_ - current_exposure_time_) * kExposureTrackRate; if (abs(ae_target_exposure_time_ - current_exposure_time_) < - ae_target_exposure_time_ / 10) { + ae_target_exposure_time_ / kAETargetThreshold) { // Close enough ae_state_ = ANDROID_CONTROL_AE_STATE_CONVERGED; ae_frame_counter_ = 0; @@ -268,7 +271,7 @@ status_t EmulatedRequestState::ProcessAWB() { } if (((awb_mode_ == ANDROID_CONTROL_AWB_MODE_OFF) || (control_mode_ == ANDROID_CONTROL_MODE_OFF)) && - supports_manual_sensor_) { + supports_manual_post_processing_) { // TODO: Add actual manual support } else if (is_backward_compatible_) { camera_metadata_ro_entry_t entry; @@ -279,7 +282,7 @@ status_t EmulatedRequestState::ProcessAWB() { awb_lock_ = ANDROID_CONTROL_AWB_LOCK_OFF; } - if (awb_lock_ == ANDROID_CONTROL_AE_LOCK_ON) { + if (awb_lock_ == ANDROID_CONTROL_AWB_LOCK_ON) { awb_state_ = ANDROID_CONTROL_AWB_STATE_LOCKED; } else { awb_state_ = ANDROID_CONTROL_AWB_STATE_CONVERGED; @@ -310,9 +313,9 @@ status_t EmulatedRequestState::ProcessAF() { focus_distance_ = entry.data.f[0]; } else { ALOGE( - "%s: Unsupported focus distance: %5.2f. It should be within " + "%s: Unsupported focus distance, It should be within " "[%5.2f, %5.2f]", - __FUNCTION__, entry.data.f[0], 0.f, minimum_focus_distance_); + __FUNCTION__, 0.f, minimum_focus_distance_); } } @@ -346,12 +349,14 @@ status_t EmulatedRequestState::ProcessAF() { // Stay in 'inactive' until at least next frame return OK; default: - ALOGE("%s: Unknown af trigger value %d", __FUNCTION__, af_trigger_); + ALOGE("%s: Unknown AF trigger value", __FUNCTION__); return BAD_VALUE; } - // If we get down here, we're either in an autofocus mode - // or in a continuous focus mode (and no other modes) + // If we get down here, we're either in ANDROID_CONTROL_AF_MODE_AUTO, + // ANDROID_CONTROL_AF_MODE_MACRO, ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, + // ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE and no other modes like + // ANDROID_CONTROL_AF_MODE_OFF or ANDROID_CONTROL_AF_MODE_EDOF switch (af_state_) { case ANDROID_CONTROL_AF_STATE_INACTIVE: if (af_trigger_start) { @@ -388,7 +393,7 @@ status_t EmulatedRequestState::ProcessAF() { */ if (af_trigger_start) { // Randomly transition to focused or not focused - if (rand() % 3) { + if (rand_r(&rand_seed_) % 3) { af_state_ = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; } else { af_state_ = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; @@ -401,7 +406,7 @@ status_t EmulatedRequestState::ProcessAF() { */ else { // Randomly transition to passive focus - if (rand() % 3 == 0) { + if (rand_r(&rand_seed_) % 3 == 0) { af_state_ = ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED; } } @@ -410,7 +415,7 @@ status_t EmulatedRequestState::ProcessAF() { case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: if (af_trigger_start) { // Randomly transition to focused or not focused - if (rand() % 3) { + if (rand_r(&rand_seed_) % 3) { af_state_ = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; } else { af_state_ = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; @@ -422,7 +427,7 @@ status_t EmulatedRequestState::ProcessAF() { // Simulate AF sweep completing instantaneously // Randomly transition to focused or not focused - if (rand() % 3) { + if (rand_r(&rand_seed_) % 3) { af_state_ = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; } else { af_state_ = ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED; @@ -498,10 +503,11 @@ status_t EmulatedRequestState::ProcessAE() { (entry.data.i64[0] <= sensor_exposure_time_range_.second)) { sensor_exposure_time_ = entry.data.i64[0]; } else { - ALOGE("%s: Sensor exposure time: %" PRId64 - " not within supported range[%" PRId64 ", %" PRId64 "]", - __FUNCTION__, entry.data.i64[0], sensor_exposure_time_range_.first, - sensor_exposure_time_range_.second); + ALOGE( + "%s: Sensor exposure time" + " not within supported range[%" PRId64 ", %" PRId64 "]", + __FUNCTION__, sensor_exposure_time_range_.first, + sensor_exposure_time_range_.second); // Use last valid value } } @@ -513,11 +519,11 @@ status_t EmulatedRequestState::ProcessAE() { (entry.data.i64[0] <= sensor_max_frame_duration_)) { sensor_frame_duration_ = entry.data.i64[0]; } else { - ALOGE("%s: Sensor frame duration : %" PRId64 - " not within supported range[%" PRId64 ", %" PRId64 "]", - __FUNCTION__, entry.data.i64[0], - EmulatedSensor::kSupportedFrameDurationRange[0], - sensor_max_frame_duration_); + ALOGE( + "%s: Sensor frame duration " + " not within supported range[%" PRId64 ", %" PRId64 "]", + __FUNCTION__, EmulatedSensor::kSupportedFrameDurationRange[0], + sensor_max_frame_duration_); // Use last valid value } } @@ -532,8 +538,8 @@ status_t EmulatedRequestState::ProcessAE() { (entry.data.i32[0] <= sensor_sensitivity_range_.second)) { sensor_sensitivity_ = entry.data.i32[0]; } else { - ALOGE("%s: Sensor sensitivity: %d not within supported range[%d, %d]", - __FUNCTION__, entry.data.i32[0], sensor_sensitivity_range_.first, + ALOGE("%s: Sensor sensitivity not within supported range[%d, %d]", + __FUNCTION__, sensor_sensitivity_range_.first, sensor_sensitivity_range_.second); // Use last valid value } @@ -548,12 +554,13 @@ status_t EmulatedRequestState::ProcessAE() { // Do AE compensation on the results of the AE ret = CompensateAE(); if (ret != OK) { - ALOGE("%s: Failed duiring AE compensation: %d, (%s)", __FUNCTION__, ret, + ALOGE("%s: Failed during AE compensation: %d, (%s)", __FUNCTION__, ret, strerror(-ret)); } } else { - ALOGI("%s: No emulation for AE mode: %d using previous sensor settings!", - __FUNCTION__, ae_mode_); + ALOGI( + "%s: No emulation for current AE mode using previous sensor settings!", + __FUNCTION__); } if (is_flash_supported_) { @@ -569,8 +576,7 @@ status_t EmulatedRequestState::ProcessAE() { manual_flash_mode = true; } } - if (manual_flash_mode && ((ae_mode_ == ANDROID_CONTROL_AE_MODE_OFF) || - (ae_mode_ == ANDROID_CONTROL_AE_MODE_ON))) { + if (manual_flash_mode && !auto_ae_flash_mode) { flash_state_ = ANDROID_FLASH_STATE_FIRED; } else { bool is_still_capture = false; @@ -607,8 +613,7 @@ status_t EmulatedRequestState::InitializeSensorSettings( available_control_modes_.end()) { control_mode_ = entry.data.u8[0]; } else { - ALOGE("%s: Control mode: %d not supported!", __FUNCTION__, - entry.data.u8[0]); + ALOGE("%s: Unsupported control mode!", __FUNCTION__); return BAD_VALUE; } } @@ -620,7 +625,7 @@ status_t EmulatedRequestState::InitializeSensorSettings( (available_scenes_.find(entry.data.u8[0]) != available_scenes_.end())) { scene_mode_ = entry.data.u8[0]; } else { - ALOGE("%s: Scene mode: %d not supported!", __FUNCTION__, entry.data.u8[0]); + ALOGE("%s: Unsupported scene mode!", __FUNCTION__); return BAD_VALUE; } } @@ -636,8 +641,7 @@ status_t EmulatedRequestState::InitializeSensorSettings( available_ae_modes_.end()) { ae_mode_ = entry.data.u8[0]; } else { - ALOGE("%s: AE mode: %d not supported using last valid mode!", - __FUNCTION__, entry.data.u8[0]); + ALOGE("%s: Unsupported AE mode! Using last valid mode!", __FUNCTION__); } } @@ -647,8 +651,7 @@ status_t EmulatedRequestState::InitializeSensorSettings( available_awb_modes_.end()) { awb_mode_ = entry.data.u8[0]; } else { - ALOGE("%s: AWB mode: %d not supported using last valid mode!", - __FUNCTION__, entry.data.u8[0]); + ALOGE("%s: Unsupported AWB mode! Using last valid mode!", __FUNCTION__); } } @@ -659,8 +662,7 @@ status_t EmulatedRequestState::InitializeSensorSettings( af_mode_changed_ = af_mode_ != entry.data.u8[0]; af_mode_ = entry.data.u8[0]; } else { - ALOGE("%s: AF mode: %d not supported using last valid mode!", - __FUNCTION__, entry.data.u8[0]); + ALOGE("%s: Unsupported AF mode! Using last valid mode!", __FUNCTION__); } } } else { @@ -672,9 +674,9 @@ status_t EmulatedRequestState::InitializeSensorSettings( af_mode_ = it->second.af_mode; } else { ALOGW( - "%s: Scene %d has no scene overrides using the currently active 3A " + "%s: Current scene has no overrides! Using the currently active 3A " "modes!", - __FUNCTION__, scene_mode_); + __FUNCTION__); } } @@ -699,8 +701,7 @@ status_t EmulatedRequestState::InitializeSensorSettings( available_lens_shading_map_modes_.end()) { sensor_settings->lens_shading_map_mode = entry.data.u8[0]; } else { - ALOGE("%s: Lens shading map mode: %d not supported!", __FUNCTION__, - entry.data.u8[0]); + ALOGE("%s: Unsupported lens shading map mode!", __FUNCTION__); } } @@ -926,21 +927,21 @@ status_t EmulatedRequestState::InitializeSensorDefaults() { available_results_.find(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW) != available_results_.end(); report_sensitivity_ = available_results_.find(ANDROID_SENSOR_SENSITIVITY) != - available_requests_.end(); + available_results_.end(); report_exposure_time_ = available_results_.find(ANDROID_SENSOR_EXPOSURE_TIME) != - available_requests_.end(); + available_results_.end(); report_frame_duration_ = available_results_.find(ANDROID_SENSOR_FRAME_DURATION) != - available_requests_.end(); + available_results_.end(); report_neutral_color_point_ = available_results_.find(ANDROID_SENSOR_NEUTRAL_COLOR_POINT) != - available_requests_.end(); + available_results_.end(); report_green_split_ = available_results_.find(ANDROID_SENSOR_GREEN_SPLIT) != - available_requests_.end(); + available_results_.end(); report_noise_profile_ = available_results_.find(ANDROID_SENSOR_NOISE_PROFILE) != - available_requests_.end(); + available_results_.end(); if (is_raw_capable_ && !report_green_split_) { ALOGE("%s: RAW capable devices must be able to report the noise profile!", @@ -962,7 +963,7 @@ status_t EmulatedRequestState::InitializeSensorDefaults() { return BAD_VALUE; } if (available_results_.find(ANDROID_SENSOR_TIMESTAMP) == - available_requests_.end()) { + available_results_.end()) { ALOGE("%s: Sensor timestamp must always be part of the results!", __FUNCTION__); return BAD_VALUE; @@ -1300,7 +1301,7 @@ status_t EmulatedRequestState::InitializeControlAWBDefaults() { } status_t EmulatedRequestState::InitializeBlackLevelDefaults() { - if (supported_hw_level_ >= ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL) { + if (is_level_full_or_higher_) { if (available_requests_.find(ANDROID_BLACK_LEVEL_LOCK) == available_requests_.end()) { ALOGE( @@ -1435,10 +1436,10 @@ status_t EmulatedRequestState::InitializeControlAEDefaults() { } bool ae_comp_requests = - available_requests_.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER) != + available_requests_.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) != available_requests_.end(); bool ae_comp_results = - available_results_.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER) != + available_results_.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) != available_results_.end(); exposure_compensation_supported_ = ((exposure_compensation_range_[0] < 0) && @@ -1492,7 +1493,7 @@ status_t EmulatedRequestState::InitializeControlDefaults() { return BAD_VALUE; } - // Capture intent must always be use configurable + // Capture intent must always be user configurable if (available_requests_.find(ANDROID_CONTROL_CAPTURE_INTENT) == available_requests_.end()) { ALOGE("%s: Clients must be able to set the capture intent!", __FUNCTION__); @@ -1594,7 +1595,7 @@ status_t EmulatedRequestState::InitializeControlDefaults() { return BAD_VALUE; } - if ((supported_hw_level_ >= ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL) && + if ((is_level_full_or_higher_) && ((max_ae_regions_ == 0) || (max_af_regions_ == 0))) { ALOGE( "%s: Full and higher level cameras must support at AF and AE " @@ -1782,8 +1783,7 @@ status_t EmulatedRequestState::InitializeTonemapDefaults() { return BAD_VALUE; } - if ((supported_hw_level_ >= ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL) && - (available_tonemap_modes_.size() < 3)) { + if ((is_level_full_or_higher_) && (available_tonemap_modes_.size() < 3)) { ALOGE( "%s: Full and higher level cameras must support at least three or " "more tonemap modes", @@ -2020,7 +2020,8 @@ status_t EmulatedRequestState::InitializeShadingDefaults() { if (supports_manual_post_processing_ && (available_shading_modes_.size() < 2)) { ALOGE( - "%s: Devices capable of manual post-processing need to support aleast " + "%s: Devices capable of manual post-processing need to support at " + "least " "two" " lens shading modes!", __FUNCTION__); @@ -2075,10 +2076,10 @@ status_t EmulatedRequestState::InitializeNoiseReductionDefaults() { return BAD_VALUE; } - if ((supported_hw_level_ >= ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL) && + if ((is_level_full_or_higher_) && (available_noise_reduction_modes_.size() < 2)) { ALOGE( - "%s: Full and above device must support aleast two noise reduction " + "%s: Full and above device must support at least two noise reduction " "modes!", __FUNCTION__); return BAD_VALUE; @@ -2102,12 +2103,12 @@ status_t EmulatedRequestState::InitializeNoiseReductionDefaults() { switch (static_cast<RequestTemplate>(idx)) { case RequestTemplate::kVideoRecord: // Pass-through + case RequestTemplate::kVideoSnapshot: // Pass-through case RequestTemplate::kPreview: if (is_fast_mode_supported) { noise_reduction_mode = ANDROID_NOISE_REDUCTION_MODE_FAST; } break; - case RequestTemplate::kVideoSnapshot: // Pass-through case RequestTemplate::kStillCapture: if (is_hq_mode_supported) { noise_reduction_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY; @@ -2142,8 +2143,7 @@ status_t EmulatedRequestState::InitializeHotPixelDefaults() { return BAD_VALUE; } - if ((supported_hw_level_ >= ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL) && - (available_hot_pixel_modes_.size() < 2)) { + if ((is_level_full_or_higher_) && (available_hot_pixel_modes_.size() < 2)) { ALOGE( "%s: Full and higher level cameras must support at least fast and hq " "hotpixel modes", @@ -2281,21 +2281,20 @@ status_t EmulatedRequestState::InitializeLensDefaults() { ret = static_metadata_->Get(ANDROID_LENS_POSE_ROTATION, &entry); if ((ret == OK) && (entry.count == ARRAY_SIZE(pose_rotation_))) { - memcpy(pose_rotation_, entry.data.f, ARRAY_SIZE(pose_rotation_)); + memcpy(pose_rotation_, entry.data.f, sizeof(pose_rotation_)); } ret = static_metadata_->Get(ANDROID_LENS_POSE_TRANSLATION, &entry); if ((ret == OK) && (entry.count == ARRAY_SIZE(pose_translation_))) { - memcpy(pose_translation_, entry.data.f, ARRAY_SIZE(pose_translation_)); + memcpy(pose_translation_, entry.data.f, sizeof(pose_translation_)); } ret = static_metadata_->Get(ANDROID_LENS_INTRINSIC_CALIBRATION, &entry); if ((ret == OK) && (entry.count == ARRAY_SIZE(intrinsic_calibration_))) { - memcpy(intrinsic_calibration_, entry.data.f, - ARRAY_SIZE(intrinsic_calibration_)); + memcpy(intrinsic_calibration_, entry.data.f, sizeof(intrinsic_calibration_)); } ret = static_metadata_->Get(ANDROID_LENS_DISTORTION, &entry); if ((ret == OK) && (entry.count == ARRAY_SIZE(distortion_))) { - memcpy(distortion_, entry.data.f, ARRAY_SIZE(distortion_)); + memcpy(distortion_, entry.data.f, sizeof(distortion_)); } report_focus_distance_ = @@ -2353,6 +2352,9 @@ status_t EmulatedRequestState::InitializeInfoDefaults() { } supported_hw_level_ = entry.data.u8[0]; + is_level_full_or_higher_ = + (supported_hw_level_ == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL) || + (supported_hw_level_ == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3); return InitializeReprocessDefaults(); } @@ -2411,7 +2413,7 @@ status_t EmulatedRequestState::InitializeRequestDefaults() { ret = static_metadata_->Get(ANDROID_REQUEST_PIPELINE_MAX_DEPTH, &entry); if ((ret == OK) && (entry.count == 1)) { if (entry.data.u8[0] == 0) { - ALOGE("%s: Maximum request pipeline must have a non zero value!", + ALOGE("%s: Maximum request pipeline depth must have a non zero value!", __FUNCTION__); return BAD_VALUE; } diff --git a/devices/EmulatedCamera/hwl/EmulatedRequestState.h b/devices/EmulatedCamera/hwl/EmulatedRequestState.h index 044505d..f2503c8 100644 --- a/devices/EmulatedCamera/hwl/EmulatedRequestState.h +++ b/devices/EmulatedCamera/hwl/EmulatedRequestState.h @@ -160,7 +160,7 @@ class EmulatedRequestState { std::unordered_map<uint8_t, SceneOverride> scene_overrides_; std::vector<FPSRange> available_fps_ranges_; int32_t exposure_compensation_range_[2] = {0, 0}; - camera_metadata_rational exposure_compensation_step_ = {0, 0}; + camera_metadata_rational exposure_compensation_step_ = {0, 1}; bool exposure_compensation_supported_ = false; int32_t exposure_compensation_ = 0; int32_t ae_metering_region_[5] = {0, 0, 0, 0, 0}; @@ -188,10 +188,17 @@ class EmulatedRequestState { bool scenes_supported_ = false; size_t ae_frame_counter_ = 0; const size_t kAEPrecaptureMinFrames = 10; - const float kExposureTrackRate = .2f; - const size_t kStableAeMaxFrames = 100; + // Fake AE related constants + const float kExposureTrackRate = .2f; // This is the rate at which the fake + // AE will reach the calculated target + const size_t kStableAeMaxFrames = + 100; // The number of frames the fake AE will stay in converged state + // After fake AE switches to state searching the exposure + // time will wander randomly in region defined by min/max below. const float kExposureWanderMin = -2; const float kExposureWanderMax = 1; + const uint32_t kAETargetThreshold = + 10; // Defines a threshold for reaching the AE target int32_t post_raw_boost_ = 100; bool report_post_raw_boost_ = false; nsecs_t ae_target_exposure_time_ = EmulatedSensor::kDefaultExposureTime; @@ -242,6 +249,8 @@ class EmulatedRequestState { static const size_t kTemplateCount = static_cast<size_t>(RequestTemplate::kManual) + 1; std::unique_ptr<HalCameraMetadata> default_requests_[kTemplateCount]; + // Set to true if the camera device has HW level FULL or LEVEL3 + bool is_level_full_or_higher_ = false; // android.lens.* float minimum_focus_distance_ = 0.f; @@ -266,6 +275,8 @@ class EmulatedRequestState { bool report_intrinsic_calibration_ = false; int32_t shading_map_size_[2] = {0}; + unsigned int rand_seed_ = 1; + // android.hotpixel.* std::set<uint8_t> available_hot_pixel_modes_; diff --git a/devices/EmulatedCamera/hwl/EmulatedSensor.cpp b/devices/EmulatedCamera/hwl/EmulatedSensor.cpp index 079bf43..babcb10 100644 --- a/devices/EmulatedCamera/hwl/EmulatedSensor.cpp +++ b/devices/EmulatedCamera/hwl/EmulatedSensor.cpp @@ -392,10 +392,12 @@ void EmulatedSensor::SetCurrentRequest(SensorSettings settings, bool EmulatedSensor::WaitForVSyncLocked(nsecs_t reltime) { got_vsync_ = false; - auto res = vsync_.waitRelative(control_mutex_, reltime); - if (res != OK && res != TIMED_OUT) { - ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res); - return false; + while (!got_vsync_) { + auto res = vsync_.waitRelative(control_mutex_, reltime); + if (res != OK && res != TIMED_OUT) { + ALOGE("%s: Error waiting for VSync signal: %d", __FUNCTION__, res); + return false; + } } return got_vsync_; @@ -718,7 +720,7 @@ bool EmulatedSensor::threadLoop() { } nsecs_t end_real_time __unused = systemTime(); ALOGVV("Frame cycle took %" PRIu64 " ms, target %" PRIu64 " ms", - ns2ms(end_real_time - start_real_time), ns2ms(settings.frame_duration)); + ns2ms(end_real_time - start_real_time), ns2ms(frame_duration)); return true; }; @@ -756,7 +758,7 @@ void EmulatedSensor::CaptureRaw(uint8_t* img, uint32_t gain, uint32_t width) { float photon_noise_var = electron_count * noise_var_gain; float noise_stddev = sqrtf_approx(read_noise_var + photon_noise_var); // Scaled to roughly match gaussian/uniform noise stddev - float noise_sample = std::rand() * (2.5 / (1.0 + RAND_MAX)) - 1.25; + float noise_sample = rand_r(&rand_seed_) * (2.5 / (1.0 + RAND_MAX)) - 1.25; raw_count += chars_.black_level_pattern[bayer_row[x & 0x1]]; raw_count += noise_stddev * noise_sample; diff --git a/devices/EmulatedCamera/hwl/EmulatedSensor.h b/devices/EmulatedCamera/hwl/EmulatedSensor.h index cd71b4b..09a4470 100644 --- a/devices/EmulatedCamera/hwl/EmulatedSensor.h +++ b/devices/EmulatedCamera/hwl/EmulatedSensor.h @@ -269,6 +269,8 @@ class EmulatedSensor : private Thread, public virtual RefBase { // End of control parameters + unsigned int rand_seed_ = 1; + /** * Inherited Thread virtual overrides, and members only used by the * processing thread diff --git a/devices/EmulatedCamera/hwl/JpegCompressor.cpp b/devices/EmulatedCamera/hwl/JpegCompressor.cpp index a89f95b..86c955c 100644 --- a/devices/EmulatedCamera/hwl/JpegCompressor.cpp +++ b/devices/EmulatedCamera/hwl/JpegCompressor.cpp @@ -34,6 +34,17 @@ using google_camera_hal::NotifyMessage; JpegCompressor::JpegCompressor(std::unique_ptr<ExifUtils> exif_utils) : exif_utils_(std::move(exif_utils)) { ATRACE_CALL(); + char value[PROPERTY_VALUE_MAX]; + if (property_get("ro.product.vendor.manufacturer", value, "unknown") <= 0) { + ALOGW("%s: No Exif make data!", __FUNCTION__); + } + exif_make_ = std::string(value); + + if (property_get("ro.product.vendor.model", value, "unknown") <= 0) { + ALOGW("%s: No Exif model data!", __FUNCTION__); + } + exif_model_ = std::string(value); + jpeg_processing_thread_ = std::thread([this] { this->ThreadLoop(); }); } @@ -159,20 +170,8 @@ void JpegCompressor::CompressYUV420(std::unique_ptr<JpegYUV420Job> job) { } } - char value[PROPERTY_VALUE_MAX]; - if (property_get("ro.product.vendor.manufacturer", value, "unknown") > - 0) { - exif_utils_->SetMake(std::string(value)); - } else { - ALOGW("%s: No Exif make data!", __FUNCTION__); - } - - if (property_get("ro.product.vendor.model", value, "unknown") > 0) { - exif_utils_->SetModel(std::string(value)); - } else { - ALOGW("%s: No Exif model data!", __FUNCTION__); - } - + exif_utils_->SetMake(exif_make_); + exif_utils_->SetModel(exif_model_); if (exif_utils_->GenerateApp1(thumbnail_jpeg_buffer.empty() ? nullptr : thumbnail_jpeg_buffer.data(), @@ -263,7 +262,7 @@ size_t JpegCompressor::CompressYUV420Frame(YUV420Frame frame) { }; dmgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) { - ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__); + ALOGE("%s:%d Out of buffer", __FUNCTION__, __LINE__); return 0; }; diff --git a/devices/EmulatedCamera/hwl/JpegCompressor.h b/devices/EmulatedCamera/hwl/JpegCompressor.h index e8b2d65..faaa3e1 100644 --- a/devices/EmulatedCamera/hwl/JpegCompressor.h +++ b/devices/EmulatedCamera/hwl/JpegCompressor.h @@ -74,10 +74,11 @@ class JpegCompressor { private: std::mutex mutex_; std::condition_variable condition_; - bool jpeg_done_ = false; + std::atomic_bool jpeg_done_ = false; std::thread jpeg_processing_thread_; std::queue<std::unique_ptr<JpegYUV420Job>> pending_yuv_jobs_; std::unique_ptr<ExifUtils> exif_utils_; + std::string exif_make_, exif_model_; j_common_ptr jpeg_error_info_; bool CheckError(const char* msg); diff --git a/devices/EmulatedCamera/hwl/configs/camera_front.json b/devices/EmulatedCamera/hwl/configs/camera_front.json index e693c17..7da89da 100644 --- a/devices/EmulatedCamera/hwl/configs/camera_front.json +++ b/devices/EmulatedCamera/hwl/configs/camera_front.json @@ -296,7 +296,8 @@ "524292", "655360", "3", - "65576" + "65576", + "917520" ], "android.request.maxNumInputStreams": [ "0" |